xref: /dragonfly/contrib/gcc-8.0/gcc/fold-const.c (revision 8bf5b238)
1 /* Fold a constant sub-tree into a single node for C-compiler
2    Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /*@@ This file should be rewritten to use an arbitrary precision
21   @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22   @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23   @@ The routines that translate from the ap rep should
24   @@ warn if precision et. al. is lost.
25   @@ This would also make life easier when this technology is used
26   @@ for cross-compilers.  */
27 
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29 
30    fold takes a tree as argument and returns a simplified tree.
31 
32    size_binop takes a tree code for an arithmetic operation
33    and two operands that are trees, and produces a tree for the
34    result, assuming the type comes from `sizetype'.
35 
36    size_int takes an integer value, and creates a tree constant
37    with type from `sizetype'.
38 
39    Note: Since the folders get called on non-gimple code as well as
40    gimple code, we need to handle GIMPLE tuples as well as their
41    corresponding tree equivalents.  */
42 
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "params.h"
75 #include "tree-into-ssa.h"
76 #include "md5.h"
77 #include "case-cfn-macros.h"
78 #include "stringpool.h"
79 #include "tree-vrp.h"
80 #include "tree-ssanames.h"
81 #include "selftest.h"
82 #include "stringpool.h"
83 #include "attribs.h"
84 #include "tree-vector-builder.h"
85 #include "vec-perm-indices.h"
86 
87 /* Nonzero if we are folding constants inside an initializer; zero
88    otherwise.  */
89 int folding_initializer = 0;
90 
91 /* The following constants represent a bit based encoding of GCC's
92    comparison operators.  This encoding simplifies transformations
93    on relational comparison operators, such as AND and OR.  */
94 enum comparison_code {
95   COMPCODE_FALSE = 0,
96   COMPCODE_LT = 1,
97   COMPCODE_EQ = 2,
98   COMPCODE_LE = 3,
99   COMPCODE_GT = 4,
100   COMPCODE_LTGT = 5,
101   COMPCODE_GE = 6,
102   COMPCODE_ORD = 7,
103   COMPCODE_UNORD = 8,
104   COMPCODE_UNLT = 9,
105   COMPCODE_UNEQ = 10,
106   COMPCODE_UNLE = 11,
107   COMPCODE_UNGT = 12,
108   COMPCODE_NE = 13,
109   COMPCODE_UNGE = 14,
110   COMPCODE_TRUE = 15
111 };
112 
113 static bool negate_expr_p (tree);
114 static tree negate_expr (tree);
115 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
116 static enum comparison_code comparison_to_compcode (enum tree_code);
117 static enum tree_code compcode_to_comparison (enum comparison_code);
118 static int twoval_comparison_p (tree, tree *, tree *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree optimize_bit_field_compare (location_t, enum tree_code,
121 					tree, tree, tree);
122 static int simple_operand_p (const_tree);
123 static bool simple_operand_p_2 (tree);
124 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
125 static tree range_predecessor (tree);
126 static tree range_successor (tree);
127 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (location_t,
133 						 enum tree_code, tree,
134 						 tree, tree,
135 						 tree, tree, int);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140 static tree fold_view_convert_expr (tree, tree);
141 static tree fold_negate_expr (location_t, tree);
142 
143 
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145    Otherwise, return LOC.  */
146 
147 static location_t
148 expr_location_or (tree t, location_t loc)
149 {
150   location_t tloc = EXPR_LOCATION (t);
151   return tloc == UNKNOWN_LOCATION ? loc : tloc;
152 }
153 
154 /* Similar to protected_set_expr_location, but never modify x in place,
155    if location can and needs to be set, unshare it.  */
156 
157 static inline tree
158 protected_set_expr_location_unshare (tree x, location_t loc)
159 {
160   if (CAN_HAVE_LOCATION_P (x)
161       && EXPR_LOCATION (x) != loc
162       && !(TREE_CODE (x) == SAVE_EXPR
163 	   || TREE_CODE (x) == TARGET_EXPR
164 	   || TREE_CODE (x) == BIND_EXPR))
165     {
166       x = copy_node (x);
167       SET_EXPR_LOCATION (x, loc);
168     }
169   return x;
170 }
171 
172 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
173    division and returns the quotient.  Otherwise returns
174    NULL_TREE.  */
175 
176 tree
177 div_if_zero_remainder (const_tree arg1, const_tree arg2)
178 {
179   widest_int quo;
180 
181   if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
182 			 SIGNED, &quo))
183     return wide_int_to_tree (TREE_TYPE (arg1), quo);
184 
185   return NULL_TREE;
186 }
187 
188 /* This is nonzero if we should defer warnings about undefined
189    overflow.  This facility exists because these warnings are a
190    special case.  The code to estimate loop iterations does not want
191    to issue any warnings, since it works with expressions which do not
192    occur in user code.  Various bits of cleanup code call fold(), but
193    only use the result if it has certain characteristics (e.g., is a
194    constant); that code only wants to issue a warning if the result is
195    used.  */
196 
197 static int fold_deferring_overflow_warnings;
198 
199 /* If a warning about undefined overflow is deferred, this is the
200    warning.  Note that this may cause us to turn two warnings into
201    one, but that is fine since it is sufficient to only give one
202    warning per expression.  */
203 
204 static const char* fold_deferred_overflow_warning;
205 
206 /* If a warning about undefined overflow is deferred, this is the
207    level at which the warning should be emitted.  */
208 
209 static enum warn_strict_overflow_code fold_deferred_overflow_code;
210 
211 /* Start deferring overflow warnings.  We could use a stack here to
212    permit nested calls, but at present it is not necessary.  */
213 
214 void
215 fold_defer_overflow_warnings (void)
216 {
217   ++fold_deferring_overflow_warnings;
218 }
219 
220 /* Stop deferring overflow warnings.  If there is a pending warning,
221    and ISSUE is true, then issue the warning if appropriate.  STMT is
222    the statement with which the warning should be associated (used for
223    location information); STMT may be NULL.  CODE is the level of the
224    warning--a warn_strict_overflow_code value.  This function will use
225    the smaller of CODE and the deferred code when deciding whether to
226    issue the warning.  CODE may be zero to mean to always use the
227    deferred code.  */
228 
229 void
230 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
231 {
232   const char *warnmsg;
233   location_t locus;
234 
235   gcc_assert (fold_deferring_overflow_warnings > 0);
236   --fold_deferring_overflow_warnings;
237   if (fold_deferring_overflow_warnings > 0)
238     {
239       if (fold_deferred_overflow_warning != NULL
240 	  && code != 0
241 	  && code < (int) fold_deferred_overflow_code)
242 	fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
243       return;
244     }
245 
246   warnmsg = fold_deferred_overflow_warning;
247   fold_deferred_overflow_warning = NULL;
248 
249   if (!issue || warnmsg == NULL)
250     return;
251 
252   if (gimple_no_warning_p (stmt))
253     return;
254 
255   /* Use the smallest code level when deciding to issue the
256      warning.  */
257   if (code == 0 || code > (int) fold_deferred_overflow_code)
258     code = fold_deferred_overflow_code;
259 
260   if (!issue_strict_overflow_warning (code))
261     return;
262 
263   if (stmt == NULL)
264     locus = input_location;
265   else
266     locus = gimple_location (stmt);
267   warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
268 }
269 
270 /* Stop deferring overflow warnings, ignoring any deferred
271    warnings.  */
272 
273 void
274 fold_undefer_and_ignore_overflow_warnings (void)
275 {
276   fold_undefer_overflow_warnings (false, NULL, 0);
277 }
278 
279 /* Whether we are deferring overflow warnings.  */
280 
281 bool
282 fold_deferring_overflow_warnings_p (void)
283 {
284   return fold_deferring_overflow_warnings > 0;
285 }
286 
287 /* This is called when we fold something based on the fact that signed
288    overflow is undefined.  */
289 
290 void
291 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
292 {
293   if (fold_deferring_overflow_warnings > 0)
294     {
295       if (fold_deferred_overflow_warning == NULL
296 	  || wc < fold_deferred_overflow_code)
297 	{
298 	  fold_deferred_overflow_warning = gmsgid;
299 	  fold_deferred_overflow_code = wc;
300 	}
301     }
302   else if (issue_strict_overflow_warning (wc))
303     warning (OPT_Wstrict_overflow, gmsgid);
304 }
305 
306 /* Return true if the built-in mathematical function specified by CODE
307    is odd, i.e. -f(x) == f(-x).  */
308 
309 bool
310 negate_mathfn_p (combined_fn fn)
311 {
312   switch (fn)
313     {
314     CASE_CFN_ASIN:
315     CASE_CFN_ASINH:
316     CASE_CFN_ATAN:
317     CASE_CFN_ATANH:
318     CASE_CFN_CASIN:
319     CASE_CFN_CASINH:
320     CASE_CFN_CATAN:
321     CASE_CFN_CATANH:
322     CASE_CFN_CBRT:
323     CASE_CFN_CPROJ:
324     CASE_CFN_CSIN:
325     CASE_CFN_CSINH:
326     CASE_CFN_CTAN:
327     CASE_CFN_CTANH:
328     CASE_CFN_ERF:
329     CASE_CFN_LLROUND:
330     CASE_CFN_LROUND:
331     CASE_CFN_ROUND:
332     CASE_CFN_SIN:
333     CASE_CFN_SINH:
334     CASE_CFN_TAN:
335     CASE_CFN_TANH:
336     CASE_CFN_TRUNC:
337       return true;
338 
339     CASE_CFN_LLRINT:
340     CASE_CFN_LRINT:
341     CASE_CFN_NEARBYINT:
342     CASE_CFN_RINT:
343       return !flag_rounding_math;
344 
345     default:
346       break;
347     }
348   return false;
349 }
350 
351 /* Check whether we may negate an integer constant T without causing
352    overflow.  */
353 
354 bool
355 may_negate_without_overflow_p (const_tree t)
356 {
357   tree type;
358 
359   gcc_assert (TREE_CODE (t) == INTEGER_CST);
360 
361   type = TREE_TYPE (t);
362   if (TYPE_UNSIGNED (type))
363     return false;
364 
365   return !wi::only_sign_bit_p (wi::to_wide (t));
366 }
367 
368 /* Determine whether an expression T can be cheaply negated using
369    the function negate_expr without introducing undefined overflow.  */
370 
371 static bool
372 negate_expr_p (tree t)
373 {
374   tree type;
375 
376   if (t == 0)
377     return false;
378 
379   type = TREE_TYPE (t);
380 
381   STRIP_SIGN_NOPS (t);
382   switch (TREE_CODE (t))
383     {
384     case INTEGER_CST:
385       if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
386 	return true;
387 
388       /* Check that -CST will not overflow type.  */
389       return may_negate_without_overflow_p (t);
390     case BIT_NOT_EXPR:
391       return (INTEGRAL_TYPE_P (type)
392 	      && TYPE_OVERFLOW_WRAPS (type));
393 
394     case FIXED_CST:
395       return true;
396 
397     case NEGATE_EXPR:
398       return !TYPE_OVERFLOW_SANITIZED (type);
399 
400     case REAL_CST:
401       /* We want to canonicalize to positive real constants.  Pretend
402          that only negative ones can be easily negated.  */
403       return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
404 
405     case COMPLEX_CST:
406       return negate_expr_p (TREE_REALPART (t))
407 	     && negate_expr_p (TREE_IMAGPART (t));
408 
409     case VECTOR_CST:
410       {
411 	if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
412 	  return true;
413 
414 	/* Steps don't prevent negation.  */
415 	unsigned int count = vector_cst_encoded_nelts (t);
416 	for (unsigned int i = 0; i < count; ++i)
417 	  if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
418 	    return false;
419 
420 	return true;
421       }
422 
423     case COMPLEX_EXPR:
424       return negate_expr_p (TREE_OPERAND (t, 0))
425 	     && negate_expr_p (TREE_OPERAND (t, 1));
426 
427     case CONJ_EXPR:
428       return negate_expr_p (TREE_OPERAND (t, 0));
429 
430     case PLUS_EXPR:
431       if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
432 	  || HONOR_SIGNED_ZEROS (element_mode (type))
433 	  || (ANY_INTEGRAL_TYPE_P (type)
434 	      && ! TYPE_OVERFLOW_WRAPS (type)))
435 	return false;
436       /* -(A + B) -> (-B) - A.  */
437       if (negate_expr_p (TREE_OPERAND (t, 1)))
438 	return true;
439       /* -(A + B) -> (-A) - B.  */
440       return negate_expr_p (TREE_OPERAND (t, 0));
441 
442     case MINUS_EXPR:
443       /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
444       return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
445 	     && !HONOR_SIGNED_ZEROS (element_mode (type))
446 	     && (! ANY_INTEGRAL_TYPE_P (type)
447 		 || TYPE_OVERFLOW_WRAPS (type));
448 
449     case MULT_EXPR:
450       if (TYPE_UNSIGNED (type))
451 	break;
452       /* INT_MIN/n * n doesn't overflow while negating one operand it does
453          if n is a (negative) power of two.  */
454       if (INTEGRAL_TYPE_P (TREE_TYPE (t))
455 	  && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
456 	  && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
457 		 && (wi::popcount
458 		     (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
459 		|| (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
460 		    && (wi::popcount
461 			(wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
462 	break;
463 
464       /* Fall through.  */
465 
466     case RDIV_EXPR:
467       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
468 	return negate_expr_p (TREE_OPERAND (t, 1))
469 	       || negate_expr_p (TREE_OPERAND (t, 0));
470       break;
471 
472     case TRUNC_DIV_EXPR:
473     case ROUND_DIV_EXPR:
474     case EXACT_DIV_EXPR:
475       if (TYPE_UNSIGNED (type))
476 	break;
477       if (negate_expr_p (TREE_OPERAND (t, 0)))
478 	return true;
479       /* In general we can't negate B in A / B, because if A is INT_MIN and
480 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
481 	 and actually traps on some architectures.  */
482       if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
483 	  || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
484 	  || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
485 	      && ! integer_onep (TREE_OPERAND (t, 1))))
486 	return negate_expr_p (TREE_OPERAND (t, 1));
487       break;
488 
489     case NOP_EXPR:
490       /* Negate -((double)float) as (double)(-float).  */
491       if (TREE_CODE (type) == REAL_TYPE)
492 	{
493 	  tree tem = strip_float_extensions (t);
494 	  if (tem != t)
495 	    return negate_expr_p (tem);
496 	}
497       break;
498 
499     case CALL_EXPR:
500       /* Negate -f(x) as f(-x).  */
501       if (negate_mathfn_p (get_call_combined_fn (t)))
502 	return negate_expr_p (CALL_EXPR_ARG (t, 0));
503       break;
504 
505     case RSHIFT_EXPR:
506       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
507       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
508 	{
509 	  tree op1 = TREE_OPERAND (t, 1);
510 	  if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
511 	    return true;
512 	}
513       break;
514 
515     default:
516       break;
517     }
518   return false;
519 }
520 
521 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
522    simplification is possible.
523    If negate_expr_p would return true for T, NULL_TREE will never be
524    returned.  */
525 
526 static tree
527 fold_negate_expr_1 (location_t loc, tree t)
528 {
529   tree type = TREE_TYPE (t);
530   tree tem;
531 
532   switch (TREE_CODE (t))
533     {
534     /* Convert - (~A) to A + 1.  */
535     case BIT_NOT_EXPR:
536       if (INTEGRAL_TYPE_P (type))
537         return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
538 				build_one_cst (type));
539       break;
540 
541     case INTEGER_CST:
542       tem = fold_negate_const (t, type);
543       if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
544 	  || (ANY_INTEGRAL_TYPE_P (type)
545 	      && !TYPE_OVERFLOW_TRAPS (type)
546 	      && TYPE_OVERFLOW_WRAPS (type))
547 	  || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
548 	return tem;
549       break;
550 
551     case POLY_INT_CST:
552     case REAL_CST:
553     case FIXED_CST:
554       tem = fold_negate_const (t, type);
555       return tem;
556 
557     case COMPLEX_CST:
558       {
559 	tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
560 	tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
561 	if (rpart && ipart)
562 	  return build_complex (type, rpart, ipart);
563       }
564       break;
565 
566     case VECTOR_CST:
567       {
568 	tree_vector_builder elts;
569 	elts.new_unary_operation (type, t, true);
570 	unsigned int count = elts.encoded_nelts ();
571 	for (unsigned int i = 0; i < count; ++i)
572 	  {
573 	    tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
574 	    if (elt == NULL_TREE)
575 	      return NULL_TREE;
576 	    elts.quick_push (elt);
577 	  }
578 
579 	return elts.build ();
580       }
581 
582     case COMPLEX_EXPR:
583       if (negate_expr_p (t))
584 	return fold_build2_loc (loc, COMPLEX_EXPR, type,
585 				fold_negate_expr (loc, TREE_OPERAND (t, 0)),
586 				fold_negate_expr (loc, TREE_OPERAND (t, 1)));
587       break;
588 
589     case CONJ_EXPR:
590       if (negate_expr_p (t))
591 	return fold_build1_loc (loc, CONJ_EXPR, type,
592 				fold_negate_expr (loc, TREE_OPERAND (t, 0)));
593       break;
594 
595     case NEGATE_EXPR:
596       if (!TYPE_OVERFLOW_SANITIZED (type))
597 	return TREE_OPERAND (t, 0);
598       break;
599 
600     case PLUS_EXPR:
601       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
602 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
603 	{
604 	  /* -(A + B) -> (-B) - A.  */
605 	  if (negate_expr_p (TREE_OPERAND (t, 1)))
606 	    {
607 	      tem = negate_expr (TREE_OPERAND (t, 1));
608 	      return fold_build2_loc (loc, MINUS_EXPR, type,
609 				      tem, TREE_OPERAND (t, 0));
610 	    }
611 
612 	  /* -(A + B) -> (-A) - B.  */
613 	  if (negate_expr_p (TREE_OPERAND (t, 0)))
614 	    {
615 	      tem = negate_expr (TREE_OPERAND (t, 0));
616 	      return fold_build2_loc (loc, MINUS_EXPR, type,
617 				      tem, TREE_OPERAND (t, 1));
618 	    }
619 	}
620       break;
621 
622     case MINUS_EXPR:
623       /* - (A - B) -> B - A  */
624       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
625 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
626 	return fold_build2_loc (loc, MINUS_EXPR, type,
627 				TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
628       break;
629 
630     case MULT_EXPR:
631       if (TYPE_UNSIGNED (type))
632         break;
633 
634       /* Fall through.  */
635 
636     case RDIV_EXPR:
637       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
638 	{
639 	  tem = TREE_OPERAND (t, 1);
640 	  if (negate_expr_p (tem))
641 	    return fold_build2_loc (loc, TREE_CODE (t), type,
642 				    TREE_OPERAND (t, 0), negate_expr (tem));
643 	  tem = TREE_OPERAND (t, 0);
644 	  if (negate_expr_p (tem))
645 	    return fold_build2_loc (loc, TREE_CODE (t), type,
646 				    negate_expr (tem), TREE_OPERAND (t, 1));
647 	}
648       break;
649 
650     case TRUNC_DIV_EXPR:
651     case ROUND_DIV_EXPR:
652     case EXACT_DIV_EXPR:
653       if (TYPE_UNSIGNED (type))
654 	break;
655       if (negate_expr_p (TREE_OPERAND (t, 0)))
656 	return fold_build2_loc (loc, TREE_CODE (t), type,
657 				negate_expr (TREE_OPERAND (t, 0)),
658 				TREE_OPERAND (t, 1));
659       /* In general we can't negate B in A / B, because if A is INT_MIN and
660 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
661 	 and actually traps on some architectures.  */
662       if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
663 	   || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
664 	   || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
665 	       && ! integer_onep (TREE_OPERAND (t, 1))))
666 	  && negate_expr_p (TREE_OPERAND (t, 1)))
667 	return fold_build2_loc (loc, TREE_CODE (t), type,
668 				TREE_OPERAND (t, 0),
669 				negate_expr (TREE_OPERAND (t, 1)));
670       break;
671 
672     case NOP_EXPR:
673       /* Convert -((double)float) into (double)(-float).  */
674       if (TREE_CODE (type) == REAL_TYPE)
675 	{
676 	  tem = strip_float_extensions (t);
677 	  if (tem != t && negate_expr_p (tem))
678 	    return fold_convert_loc (loc, type, negate_expr (tem));
679 	}
680       break;
681 
682     case CALL_EXPR:
683       /* Negate -f(x) as f(-x).  */
684       if (negate_mathfn_p (get_call_combined_fn (t))
685 	  && negate_expr_p (CALL_EXPR_ARG (t, 0)))
686 	{
687 	  tree fndecl, arg;
688 
689 	  fndecl = get_callee_fndecl (t);
690 	  arg = negate_expr (CALL_EXPR_ARG (t, 0));
691 	  return build_call_expr_loc (loc, fndecl, 1, arg);
692 	}
693       break;
694 
695     case RSHIFT_EXPR:
696       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
697       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
698 	{
699 	  tree op1 = TREE_OPERAND (t, 1);
700 	  if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
701 	    {
702 	      tree ntype = TYPE_UNSIGNED (type)
703 			   ? signed_type_for (type)
704 			   : unsigned_type_for (type);
705 	      tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
706 	      temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
707 	      return fold_convert_loc (loc, type, temp);
708 	    }
709 	}
710       break;
711 
712     default:
713       break;
714     }
715 
716   return NULL_TREE;
717 }
718 
719 /* A wrapper for fold_negate_expr_1.  */
720 
721 static tree
722 fold_negate_expr (location_t loc, tree t)
723 {
724   tree type = TREE_TYPE (t);
725   STRIP_SIGN_NOPS (t);
726   tree tem = fold_negate_expr_1 (loc, t);
727   if (tem == NULL_TREE)
728     return NULL_TREE;
729   return fold_convert_loc (loc, type, tem);
730 }
731 
732 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
733    negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
734    return NULL_TREE. */
735 
736 static tree
737 negate_expr (tree t)
738 {
739   tree type, tem;
740   location_t loc;
741 
742   if (t == NULL_TREE)
743     return NULL_TREE;
744 
745   loc = EXPR_LOCATION (t);
746   type = TREE_TYPE (t);
747   STRIP_SIGN_NOPS (t);
748 
749   tem = fold_negate_expr (loc, t);
750   if (!tem)
751     tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
752   return fold_convert_loc (loc, type, tem);
753 }
754 
755 /* Split a tree IN into a constant, literal and variable parts that could be
756    combined with CODE to make IN.  "constant" means an expression with
757    TREE_CONSTANT but that isn't an actual constant.  CODE must be a
758    commutative arithmetic operation.  Store the constant part into *CONP,
759    the literal in *LITP and return the variable part.  If a part isn't
760    present, set it to null.  If the tree does not decompose in this way,
761    return the entire tree as the variable part and the other parts as null.
762 
763    If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
764    case, we negate an operand that was subtracted.  Except if it is a
765    literal for which we use *MINUS_LITP instead.
766 
767    If NEGATE_P is true, we are negating all of IN, again except a literal
768    for which we use *MINUS_LITP instead.  If a variable part is of pointer
769    type, it is negated after converting to TYPE.  This prevents us from
770    generating illegal MINUS pointer expression.  LOC is the location of
771    the converted variable part.
772 
773    If IN is itself a literal or constant, return it as appropriate.
774 
775    Note that we do not guarantee that any of the three values will be the
776    same type as IN, but they will have the same signedness and mode.  */
777 
778 static tree
779 split_tree (tree in, tree type, enum tree_code code,
780 	    tree *minus_varp, tree *conp, tree *minus_conp,
781 	    tree *litp, tree *minus_litp, int negate_p)
782 {
783   tree var = 0;
784   *minus_varp = 0;
785   *conp = 0;
786   *minus_conp = 0;
787   *litp = 0;
788   *minus_litp = 0;
789 
790   /* Strip any conversions that don't change the machine mode or signedness.  */
791   STRIP_SIGN_NOPS (in);
792 
793   if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
794       || TREE_CODE (in) == FIXED_CST)
795     *litp = in;
796   else if (TREE_CODE (in) == code
797 	   || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
798 	       && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
799 	       /* We can associate addition and subtraction together (even
800 		  though the C standard doesn't say so) for integers because
801 		  the value is not affected.  For reals, the value might be
802 		  affected, so we can't.  */
803 	       && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
804 		   || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 		   || (code == MINUS_EXPR
806 		       && (TREE_CODE (in) == PLUS_EXPR
807 			   || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
808     {
809       tree op0 = TREE_OPERAND (in, 0);
810       tree op1 = TREE_OPERAND (in, 1);
811       int neg1_p = TREE_CODE (in) == MINUS_EXPR;
812       int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
813 
814       /* First see if either of the operands is a literal, then a constant.  */
815       if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
816 	  || TREE_CODE (op0) == FIXED_CST)
817 	*litp = op0, op0 = 0;
818       else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
819 	       || TREE_CODE (op1) == FIXED_CST)
820 	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
821 
822       if (op0 != 0 && TREE_CONSTANT (op0))
823 	*conp = op0, op0 = 0;
824       else if (op1 != 0 && TREE_CONSTANT (op1))
825 	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
826 
827       /* If we haven't dealt with either operand, this is not a case we can
828 	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
829       if (op0 != 0 && op1 != 0)
830 	var = in;
831       else if (op0 != 0)
832 	var = op0;
833       else
834 	var = op1, neg_var_p = neg1_p;
835 
836       /* Now do any needed negations.  */
837       if (neg_litp_p)
838 	*minus_litp = *litp, *litp = 0;
839       if (neg_conp_p && *conp)
840 	*minus_conp = *conp, *conp = 0;
841       if (neg_var_p && var)
842 	*minus_varp = var, var = 0;
843     }
844   else if (TREE_CONSTANT (in))
845     *conp = in;
846   else if (TREE_CODE (in) == BIT_NOT_EXPR
847 	   && code == PLUS_EXPR)
848     {
849       /* -1 - X is folded to ~X, undo that here.  Do _not_ do this
850          when IN is constant.  */
851       *litp = build_minus_one_cst (type);
852       *minus_varp = TREE_OPERAND (in, 0);
853     }
854   else
855     var = in;
856 
857   if (negate_p)
858     {
859       if (*litp)
860 	*minus_litp = *litp, *litp = 0;
861       else if (*minus_litp)
862 	*litp = *minus_litp, *minus_litp = 0;
863       if (*conp)
864 	*minus_conp = *conp, *conp = 0;
865       else if (*minus_conp)
866 	*conp = *minus_conp, *minus_conp = 0;
867       if (var)
868 	*minus_varp = var, var = 0;
869       else if (*minus_varp)
870 	var = *minus_varp, *minus_varp = 0;
871     }
872 
873   if (*litp
874       && TREE_OVERFLOW_P (*litp))
875     *litp = drop_tree_overflow (*litp);
876   if (*minus_litp
877       && TREE_OVERFLOW_P (*minus_litp))
878     *minus_litp = drop_tree_overflow (*minus_litp);
879 
880   return var;
881 }
882 
883 /* Re-associate trees split by the above function.  T1 and T2 are
884    either expressions to associate or null.  Return the new
885    expression, if any.  LOC is the location of the new expression.  If
886    we build an operation, do it in TYPE and with CODE.  */
887 
888 static tree
889 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
890 {
891   if (t1 == 0)
892     {
893       gcc_assert (t2 == 0 || code != MINUS_EXPR);
894       return t2;
895     }
896   else if (t2 == 0)
897     return t1;
898 
899   /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
900      try to fold this since we will have infinite recursion.  But do
901      deal with any NEGATE_EXPRs.  */
902   if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
903       || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
904       || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
905     {
906       if (code == PLUS_EXPR)
907 	{
908 	  if (TREE_CODE (t1) == NEGATE_EXPR)
909 	    return build2_loc (loc, MINUS_EXPR, type,
910 			       fold_convert_loc (loc, type, t2),
911 			       fold_convert_loc (loc, type,
912 						 TREE_OPERAND (t1, 0)));
913 	  else if (TREE_CODE (t2) == NEGATE_EXPR)
914 	    return build2_loc (loc, MINUS_EXPR, type,
915 			       fold_convert_loc (loc, type, t1),
916 			       fold_convert_loc (loc, type,
917 						 TREE_OPERAND (t2, 0)));
918 	  else if (integer_zerop (t2))
919 	    return fold_convert_loc (loc, type, t1);
920 	}
921       else if (code == MINUS_EXPR)
922 	{
923 	  if (integer_zerop (t2))
924 	    return fold_convert_loc (loc, type, t1);
925 	}
926 
927       return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
928 			 fold_convert_loc (loc, type, t2));
929     }
930 
931   return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
932 			  fold_convert_loc (loc, type, t2));
933 }
934 
935 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
936    for use in int_const_binop, size_binop and size_diffop.  */
937 
938 static bool
939 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
940 {
941   if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
942     return false;
943   if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
944     return false;
945 
946   switch (code)
947     {
948     case LSHIFT_EXPR:
949     case RSHIFT_EXPR:
950     case LROTATE_EXPR:
951     case RROTATE_EXPR:
952       return true;
953 
954     default:
955       break;
956     }
957 
958   return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
959 	 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
960 	 && TYPE_MODE (type1) == TYPE_MODE (type2);
961 }
962 
963 /* Subroutine of int_const_binop_1 that handles two INTEGER_CSTs.  */
964 
965 static tree
966 int_const_binop_2 (enum tree_code code, const_tree parg1, const_tree parg2,
967 		   int overflowable)
968 {
969   wide_int res;
970   tree t;
971   tree type = TREE_TYPE (parg1);
972   signop sign = TYPE_SIGN (type);
973   bool overflow = false;
974 
975   wi::tree_to_wide_ref arg1 = wi::to_wide (parg1);
976   wide_int arg2 = wi::to_wide (parg2, TYPE_PRECISION (type));
977 
978   switch (code)
979     {
980     case BIT_IOR_EXPR:
981       res = wi::bit_or (arg1, arg2);
982       break;
983 
984     case BIT_XOR_EXPR:
985       res = wi::bit_xor (arg1, arg2);
986       break;
987 
988     case BIT_AND_EXPR:
989       res = wi::bit_and (arg1, arg2);
990       break;
991 
992     case RSHIFT_EXPR:
993     case LSHIFT_EXPR:
994       if (wi::neg_p (arg2))
995 	{
996 	  arg2 = -arg2;
997 	  if (code == RSHIFT_EXPR)
998 	    code = LSHIFT_EXPR;
999 	  else
1000 	    code = RSHIFT_EXPR;
1001 	}
1002 
1003       if (code == RSHIFT_EXPR)
1004 	/* It's unclear from the C standard whether shifts can overflow.
1005 	   The following code ignores overflow; perhaps a C standard
1006 	   interpretation ruling is needed.  */
1007 	res = wi::rshift (arg1, arg2, sign);
1008       else
1009 	res = wi::lshift (arg1, arg2);
1010       break;
1011 
1012     case RROTATE_EXPR:
1013     case LROTATE_EXPR:
1014       if (wi::neg_p (arg2))
1015 	{
1016 	  arg2 = -arg2;
1017 	  if (code == RROTATE_EXPR)
1018 	    code = LROTATE_EXPR;
1019 	  else
1020 	    code = RROTATE_EXPR;
1021 	}
1022 
1023       if (code == RROTATE_EXPR)
1024 	res = wi::rrotate (arg1, arg2);
1025       else
1026 	res = wi::lrotate (arg1, arg2);
1027       break;
1028 
1029     case PLUS_EXPR:
1030       res = wi::add (arg1, arg2, sign, &overflow);
1031       break;
1032 
1033     case MINUS_EXPR:
1034       res = wi::sub (arg1, arg2, sign, &overflow);
1035       break;
1036 
1037     case MULT_EXPR:
1038       res = wi::mul (arg1, arg2, sign, &overflow);
1039       break;
1040 
1041     case MULT_HIGHPART_EXPR:
1042       res = wi::mul_high (arg1, arg2, sign);
1043       break;
1044 
1045     case TRUNC_DIV_EXPR:
1046     case EXACT_DIV_EXPR:
1047       if (arg2 == 0)
1048 	return NULL_TREE;
1049       res = wi::div_trunc (arg1, arg2, sign, &overflow);
1050       break;
1051 
1052     case FLOOR_DIV_EXPR:
1053       if (arg2 == 0)
1054 	return NULL_TREE;
1055       res = wi::div_floor (arg1, arg2, sign, &overflow);
1056       break;
1057 
1058     case CEIL_DIV_EXPR:
1059       if (arg2 == 0)
1060 	return NULL_TREE;
1061       res = wi::div_ceil (arg1, arg2, sign, &overflow);
1062       break;
1063 
1064     case ROUND_DIV_EXPR:
1065       if (arg2 == 0)
1066 	return NULL_TREE;
1067       res = wi::div_round (arg1, arg2, sign, &overflow);
1068       break;
1069 
1070     case TRUNC_MOD_EXPR:
1071       if (arg2 == 0)
1072 	return NULL_TREE;
1073       res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1074       break;
1075 
1076     case FLOOR_MOD_EXPR:
1077       if (arg2 == 0)
1078 	return NULL_TREE;
1079       res = wi::mod_floor (arg1, arg2, sign, &overflow);
1080       break;
1081 
1082     case CEIL_MOD_EXPR:
1083       if (arg2 == 0)
1084 	return NULL_TREE;
1085       res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1086       break;
1087 
1088     case ROUND_MOD_EXPR:
1089       if (arg2 == 0)
1090 	return NULL_TREE;
1091       res = wi::mod_round (arg1, arg2, sign, &overflow);
1092       break;
1093 
1094     case MIN_EXPR:
1095       res = wi::min (arg1, arg2, sign);
1096       break;
1097 
1098     case MAX_EXPR:
1099       res = wi::max (arg1, arg2, sign);
1100       break;
1101 
1102     default:
1103       return NULL_TREE;
1104     }
1105 
1106   t = force_fit_type (type, res, overflowable,
1107 		      (((sign == SIGNED || overflowable == -1)
1108 			&& overflow)
1109 		       | TREE_OVERFLOW (parg1) | TREE_OVERFLOW (parg2)));
1110 
1111   return t;
1112 }
1113 
1114 /* Combine two integer constants PARG1 and PARG2 under operation CODE
1115    to produce a new constant.  Return NULL_TREE if we don't know how
1116    to evaluate CODE at compile-time.  */
1117 
1118 static tree
1119 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
1120 		   int overflowable)
1121 {
1122   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1123     return int_const_binop_2 (code, arg1, arg2, overflowable);
1124 
1125   gcc_assert (NUM_POLY_INT_COEFFS != 1);
1126 
1127   if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1128     {
1129       poly_wide_int res;
1130       bool overflow;
1131       tree type = TREE_TYPE (arg1);
1132       signop sign = TYPE_SIGN (type);
1133       switch (code)
1134 	{
1135 	case PLUS_EXPR:
1136 	  res = wi::add (wi::to_poly_wide (arg1),
1137 			 wi::to_poly_wide (arg2), sign, &overflow);
1138 	  break;
1139 
1140 	case MINUS_EXPR:
1141 	  res = wi::sub (wi::to_poly_wide (arg1),
1142 			 wi::to_poly_wide (arg2), sign, &overflow);
1143 	  break;
1144 
1145 	case MULT_EXPR:
1146 	  if (TREE_CODE (arg2) == INTEGER_CST)
1147 	    res = wi::mul (wi::to_poly_wide (arg1),
1148 			   wi::to_wide (arg2), sign, &overflow);
1149 	  else if (TREE_CODE (arg1) == INTEGER_CST)
1150 	    res = wi::mul (wi::to_poly_wide (arg2),
1151 			   wi::to_wide (arg1), sign, &overflow);
1152 	  else
1153 	    return NULL_TREE;
1154 	  break;
1155 
1156 	case LSHIFT_EXPR:
1157 	  if (TREE_CODE (arg2) == INTEGER_CST)
1158 	    res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1159 	  else
1160 	    return NULL_TREE;
1161 	  break;
1162 
1163 	case BIT_IOR_EXPR:
1164 	  if (TREE_CODE (arg2) != INTEGER_CST
1165 	      || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1166 			     &res))
1167 	    return NULL_TREE;
1168 	  break;
1169 
1170 	default:
1171 	  return NULL_TREE;
1172 	}
1173       return force_fit_type (type, res, overflowable,
1174 			     (((sign == SIGNED || overflowable == -1)
1175 			       && overflow)
1176 			      | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1177     }
1178 
1179   return NULL_TREE;
1180 }
1181 
1182 tree
1183 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1184 {
1185   return int_const_binop_1 (code, arg1, arg2, 1);
1186 }
1187 
1188 /* Return true if binary operation OP distributes over addition in operand
1189    OPNO, with the other operand being held constant.  OPNO counts from 1.  */
1190 
1191 static bool
1192 distributes_over_addition_p (tree_code op, int opno)
1193 {
1194   switch (op)
1195     {
1196     case PLUS_EXPR:
1197     case MINUS_EXPR:
1198     case MULT_EXPR:
1199       return true;
1200 
1201     case LSHIFT_EXPR:
1202       return opno == 1;
1203 
1204     default:
1205       return false;
1206     }
1207 }
1208 
1209 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1210    constant.  We assume ARG1 and ARG2 have the same data type, or at least
1211    are the same kind of constant and the same machine mode.  Return zero if
1212    combining the constants is not allowed in the current operating mode.  */
1213 
1214 static tree
1215 const_binop (enum tree_code code, tree arg1, tree arg2)
1216 {
1217   /* Sanity check for the recursive cases.  */
1218   if (!arg1 || !arg2)
1219     return NULL_TREE;
1220 
1221   STRIP_NOPS (arg1);
1222   STRIP_NOPS (arg2);
1223 
1224   if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1225     {
1226       if (code == POINTER_PLUS_EXPR)
1227 	return int_const_binop (PLUS_EXPR,
1228 				arg1, fold_convert (TREE_TYPE (arg1), arg2));
1229 
1230       return int_const_binop (code, arg1, arg2);
1231     }
1232 
1233   if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1234     {
1235       machine_mode mode;
1236       REAL_VALUE_TYPE d1;
1237       REAL_VALUE_TYPE d2;
1238       REAL_VALUE_TYPE value;
1239       REAL_VALUE_TYPE result;
1240       bool inexact;
1241       tree t, type;
1242 
1243       /* The following codes are handled by real_arithmetic.  */
1244       switch (code)
1245 	{
1246 	case PLUS_EXPR:
1247 	case MINUS_EXPR:
1248 	case MULT_EXPR:
1249 	case RDIV_EXPR:
1250 	case MIN_EXPR:
1251 	case MAX_EXPR:
1252 	  break;
1253 
1254 	default:
1255 	  return NULL_TREE;
1256 	}
1257 
1258       d1 = TREE_REAL_CST (arg1);
1259       d2 = TREE_REAL_CST (arg2);
1260 
1261       type = TREE_TYPE (arg1);
1262       mode = TYPE_MODE (type);
1263 
1264       /* Don't perform operation if we honor signaling NaNs and
1265 	 either operand is a signaling NaN.  */
1266       if (HONOR_SNANS (mode)
1267 	  && (REAL_VALUE_ISSIGNALING_NAN (d1)
1268 	      || REAL_VALUE_ISSIGNALING_NAN (d2)))
1269 	return NULL_TREE;
1270 
1271       /* Don't perform operation if it would raise a division
1272 	 by zero exception.  */
1273       if (code == RDIV_EXPR
1274 	  && real_equal (&d2, &dconst0)
1275 	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1276 	return NULL_TREE;
1277 
1278       /* If either operand is a NaN, just return it.  Otherwise, set up
1279 	 for floating-point trap; we return an overflow.  */
1280       if (REAL_VALUE_ISNAN (d1))
1281       {
1282 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1283 	   is off.  */
1284 	d1.signalling = 0;
1285 	t = build_real (type, d1);
1286 	return t;
1287       }
1288       else if (REAL_VALUE_ISNAN (d2))
1289       {
1290 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1291 	   is off.  */
1292 	d2.signalling = 0;
1293 	t = build_real (type, d2);
1294 	return t;
1295       }
1296 
1297       inexact = real_arithmetic (&value, code, &d1, &d2);
1298       real_convert (&result, mode, &value);
1299 
1300       /* Don't constant fold this floating point operation if
1301 	 the result has overflowed and flag_trapping_math.  */
1302       if (flag_trapping_math
1303 	  && MODE_HAS_INFINITIES (mode)
1304 	  && REAL_VALUE_ISINF (result)
1305 	  && !REAL_VALUE_ISINF (d1)
1306 	  && !REAL_VALUE_ISINF (d2))
1307 	return NULL_TREE;
1308 
1309       /* Don't constant fold this floating point operation if the
1310 	 result may dependent upon the run-time rounding mode and
1311 	 flag_rounding_math is set, or if GCC's software emulation
1312 	 is unable to accurately represent the result.  */
1313       if ((flag_rounding_math
1314 	   || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1315 	  && (inexact || !real_identical (&result, &value)))
1316 	return NULL_TREE;
1317 
1318       t = build_real (type, result);
1319 
1320       TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1321       return t;
1322     }
1323 
1324   if (TREE_CODE (arg1) == FIXED_CST)
1325     {
1326       FIXED_VALUE_TYPE f1;
1327       FIXED_VALUE_TYPE f2;
1328       FIXED_VALUE_TYPE result;
1329       tree t, type;
1330       int sat_p;
1331       bool overflow_p;
1332 
1333       /* The following codes are handled by fixed_arithmetic.  */
1334       switch (code)
1335         {
1336 	case PLUS_EXPR:
1337 	case MINUS_EXPR:
1338 	case MULT_EXPR:
1339 	case TRUNC_DIV_EXPR:
1340 	  if (TREE_CODE (arg2) != FIXED_CST)
1341 	    return NULL_TREE;
1342 	  f2 = TREE_FIXED_CST (arg2);
1343 	  break;
1344 
1345 	case LSHIFT_EXPR:
1346 	case RSHIFT_EXPR:
1347 	  {
1348 	    if (TREE_CODE (arg2) != INTEGER_CST)
1349 	      return NULL_TREE;
1350 	    wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1351 	    f2.data.high = w2.elt (1);
1352 	    f2.data.low = w2.ulow ();
1353 	    f2.mode = SImode;
1354 	  }
1355 	  break;
1356 
1357         default:
1358 	  return NULL_TREE;
1359         }
1360 
1361       f1 = TREE_FIXED_CST (arg1);
1362       type = TREE_TYPE (arg1);
1363       sat_p = TYPE_SATURATING (type);
1364       overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1365       t = build_fixed (type, result);
1366       /* Propagate overflow flags.  */
1367       if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1368 	TREE_OVERFLOW (t) = 1;
1369       return t;
1370     }
1371 
1372   if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1373     {
1374       tree type = TREE_TYPE (arg1);
1375       tree r1 = TREE_REALPART (arg1);
1376       tree i1 = TREE_IMAGPART (arg1);
1377       tree r2 = TREE_REALPART (arg2);
1378       tree i2 = TREE_IMAGPART (arg2);
1379       tree real, imag;
1380 
1381       switch (code)
1382 	{
1383 	case PLUS_EXPR:
1384 	case MINUS_EXPR:
1385 	  real = const_binop (code, r1, r2);
1386 	  imag = const_binop (code, i1, i2);
1387 	  break;
1388 
1389 	case MULT_EXPR:
1390 	  if (COMPLEX_FLOAT_TYPE_P (type))
1391 	    return do_mpc_arg2 (arg1, arg2, type,
1392 				/* do_nonfinite= */ folding_initializer,
1393 				mpc_mul);
1394 
1395 	  real = const_binop (MINUS_EXPR,
1396 			      const_binop (MULT_EXPR, r1, r2),
1397 			      const_binop (MULT_EXPR, i1, i2));
1398 	  imag = const_binop (PLUS_EXPR,
1399 			      const_binop (MULT_EXPR, r1, i2),
1400 			      const_binop (MULT_EXPR, i1, r2));
1401 	  break;
1402 
1403 	case RDIV_EXPR:
1404 	  if (COMPLEX_FLOAT_TYPE_P (type))
1405 	    return do_mpc_arg2 (arg1, arg2, type,
1406                                 /* do_nonfinite= */ folding_initializer,
1407 				mpc_div);
1408 	  /* Fallthru. */
1409 	case TRUNC_DIV_EXPR:
1410 	case CEIL_DIV_EXPR:
1411 	case FLOOR_DIV_EXPR:
1412 	case ROUND_DIV_EXPR:
1413 	  if (flag_complex_method == 0)
1414 	  {
1415 	    /* Keep this algorithm in sync with
1416 	       tree-complex.c:expand_complex_div_straight().
1417 
1418 	       Expand complex division to scalars, straightforward algorithm.
1419 	       a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1420 	       t = br*br + bi*bi
1421 	    */
1422 	    tree magsquared
1423 	      = const_binop (PLUS_EXPR,
1424 			     const_binop (MULT_EXPR, r2, r2),
1425 			     const_binop (MULT_EXPR, i2, i2));
1426 	    tree t1
1427 	      = const_binop (PLUS_EXPR,
1428 			     const_binop (MULT_EXPR, r1, r2),
1429 			     const_binop (MULT_EXPR, i1, i2));
1430 	    tree t2
1431 	      = const_binop (MINUS_EXPR,
1432 			     const_binop (MULT_EXPR, i1, r2),
1433 			     const_binop (MULT_EXPR, r1, i2));
1434 
1435 	    real = const_binop (code, t1, magsquared);
1436 	    imag = const_binop (code, t2, magsquared);
1437 	  }
1438 	  else
1439 	  {
1440 	    /* Keep this algorithm in sync with
1441                tree-complex.c:expand_complex_div_wide().
1442 
1443 	       Expand complex division to scalars, modified algorithm to minimize
1444 	       overflow with wide input ranges.  */
1445 	    tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1446 					fold_abs_const (r2, TREE_TYPE (type)),
1447 					fold_abs_const (i2, TREE_TYPE (type)));
1448 
1449 	    if (integer_nonzerop (compare))
1450 	      {
1451 		/* In the TRUE branch, we compute
1452 		   ratio = br/bi;
1453 		   div = (br * ratio) + bi;
1454 		   tr = (ar * ratio) + ai;
1455 		   ti = (ai * ratio) - ar;
1456 		   tr = tr / div;
1457 		   ti = ti / div;  */
1458 		tree ratio = const_binop (code, r2, i2);
1459 		tree div = const_binop (PLUS_EXPR, i2,
1460 					const_binop (MULT_EXPR, r2, ratio));
1461 		real = const_binop (MULT_EXPR, r1, ratio);
1462 		real = const_binop (PLUS_EXPR, real, i1);
1463 		real = const_binop (code, real, div);
1464 
1465 		imag = const_binop (MULT_EXPR, i1, ratio);
1466 		imag = const_binop (MINUS_EXPR, imag, r1);
1467 		imag = const_binop (code, imag, div);
1468 	      }
1469 	    else
1470 	      {
1471 		/* In the FALSE branch, we compute
1472 		   ratio = d/c;
1473 		   divisor = (d * ratio) + c;
1474 		   tr = (b * ratio) + a;
1475 		   ti = b - (a * ratio);
1476 		   tr = tr / div;
1477 		   ti = ti / div;  */
1478 		tree ratio = const_binop (code, i2, r2);
1479 		tree div = const_binop (PLUS_EXPR, r2,
1480                                         const_binop (MULT_EXPR, i2, ratio));
1481 
1482 		real = const_binop (MULT_EXPR, i1, ratio);
1483 		real = const_binop (PLUS_EXPR, real, r1);
1484 		real = const_binop (code, real, div);
1485 
1486 		imag = const_binop (MULT_EXPR, r1, ratio);
1487 		imag = const_binop (MINUS_EXPR, i1, imag);
1488 		imag = const_binop (code, imag, div);
1489 	      }
1490 	  }
1491 	  break;
1492 
1493 	default:
1494 	  return NULL_TREE;
1495 	}
1496 
1497       if (real && imag)
1498 	return build_complex (type, real, imag);
1499     }
1500 
1501   if (TREE_CODE (arg1) == VECTOR_CST
1502       && TREE_CODE (arg2) == VECTOR_CST
1503       && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1504 		   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1505     {
1506       tree type = TREE_TYPE (arg1);
1507       bool step_ok_p;
1508       if (VECTOR_CST_STEPPED_P (arg1)
1509 	  && VECTOR_CST_STEPPED_P (arg2))
1510 	/* We can operate directly on the encoding if:
1511 
1512 	      a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1513 	    implies
1514 	      (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1515 
1516 	   Addition and subtraction are the supported operators
1517 	   for which this is true.  */
1518 	step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1519       else if (VECTOR_CST_STEPPED_P (arg1))
1520 	/* We can operate directly on stepped encodings if:
1521 
1522 	     a3 - a2 == a2 - a1
1523 	   implies:
1524 	     (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1525 
1526 	   which is true if (x -> x op c) distributes over addition.  */
1527 	step_ok_p = distributes_over_addition_p (code, 1);
1528       else
1529 	/* Similarly in reverse.  */
1530 	step_ok_p = distributes_over_addition_p (code, 2);
1531       tree_vector_builder elts;
1532       if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1533 	return NULL_TREE;
1534       unsigned int count = elts.encoded_nelts ();
1535       for (unsigned int i = 0; i < count; ++i)
1536 	{
1537 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1538 	  tree elem2 = VECTOR_CST_ELT (arg2, i);
1539 
1540 	  tree elt = const_binop (code, elem1, elem2);
1541 
1542 	  /* It is possible that const_binop cannot handle the given
1543 	     code and return NULL_TREE */
1544 	  if (elt == NULL_TREE)
1545 	    return NULL_TREE;
1546 	  elts.quick_push (elt);
1547 	}
1548 
1549       return elts.build ();
1550     }
1551 
1552   /* Shifts allow a scalar offset for a vector.  */
1553   if (TREE_CODE (arg1) == VECTOR_CST
1554       && TREE_CODE (arg2) == INTEGER_CST)
1555     {
1556       tree type = TREE_TYPE (arg1);
1557       bool step_ok_p = distributes_over_addition_p (code, 1);
1558       tree_vector_builder elts;
1559       if (!elts.new_unary_operation (type, arg1, step_ok_p))
1560 	return NULL_TREE;
1561       unsigned int count = elts.encoded_nelts ();
1562       for (unsigned int i = 0; i < count; ++i)
1563 	{
1564 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1565 
1566 	  tree elt = const_binop (code, elem1, arg2);
1567 
1568 	  /* It is possible that const_binop cannot handle the given
1569 	     code and return NULL_TREE.  */
1570 	  if (elt == NULL_TREE)
1571 	    return NULL_TREE;
1572 	  elts.quick_push (elt);
1573 	}
1574 
1575       return elts.build ();
1576     }
1577   return NULL_TREE;
1578 }
1579 
1580 /* Overload that adds a TYPE parameter to be able to dispatch
1581    to fold_relational_const.  */
1582 
1583 tree
1584 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1585 {
1586   if (TREE_CODE_CLASS (code) == tcc_comparison)
1587     return fold_relational_const (code, type, arg1, arg2);
1588 
1589   /* ???  Until we make the const_binop worker take the type of the
1590      result as argument put those cases that need it here.  */
1591   switch (code)
1592     {
1593     case VEC_SERIES_EXPR:
1594       if (CONSTANT_CLASS_P (arg1)
1595 	  && CONSTANT_CLASS_P (arg2))
1596 	return build_vec_series (type, arg1, arg2);
1597       return NULL_TREE;
1598 
1599     case COMPLEX_EXPR:
1600       if ((TREE_CODE (arg1) == REAL_CST
1601 	   && TREE_CODE (arg2) == REAL_CST)
1602 	  || (TREE_CODE (arg1) == INTEGER_CST
1603 	      && TREE_CODE (arg2) == INTEGER_CST))
1604 	return build_complex (type, arg1, arg2);
1605       return NULL_TREE;
1606 
1607     case POINTER_DIFF_EXPR:
1608       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1609 	{
1610 	  offset_int res = wi::sub (wi::to_offset (arg1),
1611 				    wi::to_offset (arg2));
1612 	  return force_fit_type (type, res, 1,
1613 				 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1614 	}
1615       return NULL_TREE;
1616 
1617     case VEC_PACK_TRUNC_EXPR:
1618     case VEC_PACK_FIX_TRUNC_EXPR:
1619       {
1620 	unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1621 
1622 	if (TREE_CODE (arg1) != VECTOR_CST
1623 	    || TREE_CODE (arg2) != VECTOR_CST)
1624 	  return NULL_TREE;
1625 
1626 	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1627 	  return NULL_TREE;
1628 
1629 	out_nelts = in_nelts * 2;
1630 	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1631 		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1632 
1633 	tree_vector_builder elts (type, out_nelts, 1);
1634 	for (i = 0; i < out_nelts; i++)
1635 	  {
1636 	    tree elt = (i < in_nelts
1637 			? VECTOR_CST_ELT (arg1, i)
1638 			: VECTOR_CST_ELT (arg2, i - in_nelts));
1639 	    elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1640 				      ? NOP_EXPR : FIX_TRUNC_EXPR,
1641 				      TREE_TYPE (type), elt);
1642 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1643 	      return NULL_TREE;
1644 	    elts.quick_push (elt);
1645 	  }
1646 
1647 	return elts.build ();
1648       }
1649 
1650     case VEC_WIDEN_MULT_LO_EXPR:
1651     case VEC_WIDEN_MULT_HI_EXPR:
1652     case VEC_WIDEN_MULT_EVEN_EXPR:
1653     case VEC_WIDEN_MULT_ODD_EXPR:
1654       {
1655 	unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1656 
1657 	if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1658 	  return NULL_TREE;
1659 
1660 	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1661 	  return NULL_TREE;
1662 	out_nelts = in_nelts / 2;
1663 	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1664 		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1665 
1666 	if (code == VEC_WIDEN_MULT_LO_EXPR)
1667 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1668 	else if (code == VEC_WIDEN_MULT_HI_EXPR)
1669 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1670 	else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1671 	  scale = 1, ofs = 0;
1672 	else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1673 	  scale = 1, ofs = 1;
1674 
1675 	tree_vector_builder elts (type, out_nelts, 1);
1676 	for (out = 0; out < out_nelts; out++)
1677 	  {
1678 	    unsigned int in = (out << scale) + ofs;
1679 	    tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1680 					  VECTOR_CST_ELT (arg1, in));
1681 	    tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1682 					  VECTOR_CST_ELT (arg2, in));
1683 
1684 	    if (t1 == NULL_TREE || t2 == NULL_TREE)
1685 	      return NULL_TREE;
1686 	    tree elt = const_binop (MULT_EXPR, t1, t2);
1687 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1688 	      return NULL_TREE;
1689 	    elts.quick_push (elt);
1690 	  }
1691 
1692 	return elts.build ();
1693       }
1694 
1695     default:;
1696     }
1697 
1698   if (TREE_CODE_CLASS (code) != tcc_binary)
1699     return NULL_TREE;
1700 
1701   /* Make sure type and arg0 have the same saturating flag.  */
1702   gcc_checking_assert (TYPE_SATURATING (type)
1703 		       == TYPE_SATURATING (TREE_TYPE (arg1)));
1704 
1705   return const_binop (code, arg1, arg2);
1706 }
1707 
1708 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1709    Return zero if computing the constants is not possible.  */
1710 
1711 tree
1712 const_unop (enum tree_code code, tree type, tree arg0)
1713 {
1714   /* Don't perform the operation, other than NEGATE and ABS, if
1715      flag_signaling_nans is on and the operand is a signaling NaN.  */
1716   if (TREE_CODE (arg0) == REAL_CST
1717       && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1718       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1719       && code != NEGATE_EXPR
1720       && code != ABS_EXPR)
1721     return NULL_TREE;
1722 
1723   switch (code)
1724     {
1725     CASE_CONVERT:
1726     case FLOAT_EXPR:
1727     case FIX_TRUNC_EXPR:
1728     case FIXED_CONVERT_EXPR:
1729       return fold_convert_const (code, type, arg0);
1730 
1731     case ADDR_SPACE_CONVERT_EXPR:
1732       /* If the source address is 0, and the source address space
1733 	 cannot have a valid object at 0, fold to dest type null.  */
1734       if (integer_zerop (arg0)
1735 	  && !(targetm.addr_space.zero_address_valid
1736 	       (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1737 	return fold_convert_const (code, type, arg0);
1738       break;
1739 
1740     case VIEW_CONVERT_EXPR:
1741       return fold_view_convert_expr (type, arg0);
1742 
1743     case NEGATE_EXPR:
1744       {
1745 	/* Can't call fold_negate_const directly here as that doesn't
1746 	   handle all cases and we might not be able to negate some
1747 	   constants.  */
1748 	tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1749 	if (tem && CONSTANT_CLASS_P (tem))
1750 	  return tem;
1751 	break;
1752       }
1753 
1754     case ABS_EXPR:
1755       if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1756 	return fold_abs_const (arg0, type);
1757       break;
1758 
1759     case CONJ_EXPR:
1760       if (TREE_CODE (arg0) == COMPLEX_CST)
1761 	{
1762 	  tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1763 					  TREE_TYPE (type));
1764 	  return build_complex (type, TREE_REALPART (arg0), ipart);
1765 	}
1766       break;
1767 
1768     case BIT_NOT_EXPR:
1769       if (TREE_CODE (arg0) == INTEGER_CST)
1770 	return fold_not_const (arg0, type);
1771       else if (POLY_INT_CST_P (arg0))
1772 	return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1773       /* Perform BIT_NOT_EXPR on each element individually.  */
1774       else if (TREE_CODE (arg0) == VECTOR_CST)
1775 	{
1776 	  tree elem;
1777 
1778 	  /* This can cope with stepped encodings because ~x == -1 - x.  */
1779 	  tree_vector_builder elements;
1780 	  elements.new_unary_operation (type, arg0, true);
1781 	  unsigned int i, count = elements.encoded_nelts ();
1782 	  for (i = 0; i < count; ++i)
1783 	    {
1784 	      elem = VECTOR_CST_ELT (arg0, i);
1785 	      elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1786 	      if (elem == NULL_TREE)
1787 		break;
1788 	      elements.quick_push (elem);
1789 	    }
1790 	  if (i == count)
1791 	    return elements.build ();
1792 	}
1793       break;
1794 
1795     case TRUTH_NOT_EXPR:
1796       if (TREE_CODE (arg0) == INTEGER_CST)
1797 	return constant_boolean_node (integer_zerop (arg0), type);
1798       break;
1799 
1800     case REALPART_EXPR:
1801       if (TREE_CODE (arg0) == COMPLEX_CST)
1802 	return fold_convert (type, TREE_REALPART (arg0));
1803       break;
1804 
1805     case IMAGPART_EXPR:
1806       if (TREE_CODE (arg0) == COMPLEX_CST)
1807 	return fold_convert (type, TREE_IMAGPART (arg0));
1808       break;
1809 
1810     case VEC_UNPACK_LO_EXPR:
1811     case VEC_UNPACK_HI_EXPR:
1812     case VEC_UNPACK_FLOAT_LO_EXPR:
1813     case VEC_UNPACK_FLOAT_HI_EXPR:
1814       {
1815 	unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1816 	enum tree_code subcode;
1817 
1818 	if (TREE_CODE (arg0) != VECTOR_CST)
1819 	  return NULL_TREE;
1820 
1821 	if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1822 	  return NULL_TREE;
1823 	out_nelts = in_nelts / 2;
1824 	gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1825 
1826 	unsigned int offset = 0;
1827 	if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1828 				   || code == VEC_UNPACK_FLOAT_LO_EXPR))
1829 	  offset = out_nelts;
1830 
1831 	if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1832 	  subcode = NOP_EXPR;
1833 	else
1834 	  subcode = FLOAT_EXPR;
1835 
1836 	tree_vector_builder elts (type, out_nelts, 1);
1837 	for (i = 0; i < out_nelts; i++)
1838 	  {
1839 	    tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1840 					   VECTOR_CST_ELT (arg0, i + offset));
1841 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1842 	      return NULL_TREE;
1843 	    elts.quick_push (elt);
1844 	  }
1845 
1846 	return elts.build ();
1847       }
1848 
1849     case VEC_DUPLICATE_EXPR:
1850       if (CONSTANT_CLASS_P (arg0))
1851 	return build_vector_from_val (type, arg0);
1852       return NULL_TREE;
1853 
1854     default:
1855       break;
1856     }
1857 
1858   return NULL_TREE;
1859 }
1860 
1861 /* Create a sizetype INT_CST node with NUMBER sign extended.  KIND
1862    indicates which particular sizetype to create.  */
1863 
1864 tree
1865 size_int_kind (poly_int64 number, enum size_type_kind kind)
1866 {
1867   return build_int_cst (sizetype_tab[(int) kind], number);
1868 }
1869 
1870 /* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1871    is a tree code.  The type of the result is taken from the operands.
1872    Both must be equivalent integer types, ala int_binop_types_match_p.
1873    If the operands are constant, so is the result.  */
1874 
1875 tree
1876 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1877 {
1878   tree type = TREE_TYPE (arg0);
1879 
1880   if (arg0 == error_mark_node || arg1 == error_mark_node)
1881     return error_mark_node;
1882 
1883   gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1884                                        TREE_TYPE (arg1)));
1885 
1886   /* Handle the special case of two poly_int constants faster.  */
1887   if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1888     {
1889       /* And some specific cases even faster than that.  */
1890       if (code == PLUS_EXPR)
1891 	{
1892 	  if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1893 	    return arg1;
1894 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1895 	    return arg0;
1896 	}
1897       else if (code == MINUS_EXPR)
1898 	{
1899 	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1900 	    return arg0;
1901 	}
1902       else if (code == MULT_EXPR)
1903 	{
1904 	  if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1905 	    return arg1;
1906 	}
1907 
1908       /* Handle general case of two integer constants.  For sizetype
1909          constant calculations we always want to know about overflow,
1910 	 even in the unsigned case.  */
1911       tree res = int_const_binop_1 (code, arg0, arg1, -1);
1912       if (res != NULL_TREE)
1913 	return res;
1914     }
1915 
1916   return fold_build2_loc (loc, code, type, arg0, arg1);
1917 }
1918 
1919 /* Given two values, either both of sizetype or both of bitsizetype,
1920    compute the difference between the two values.  Return the value
1921    in signed type corresponding to the type of the operands.  */
1922 
1923 tree
1924 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1925 {
1926   tree type = TREE_TYPE (arg0);
1927   tree ctype;
1928 
1929   gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1930 				       TREE_TYPE (arg1)));
1931 
1932   /* If the type is already signed, just do the simple thing.  */
1933   if (!TYPE_UNSIGNED (type))
1934     return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1935 
1936   if (type == sizetype)
1937     ctype = ssizetype;
1938   else if (type == bitsizetype)
1939     ctype = sbitsizetype;
1940   else
1941     ctype = signed_type_for (type);
1942 
1943   /* If either operand is not a constant, do the conversions to the signed
1944      type and subtract.  The hardware will do the right thing with any
1945      overflow in the subtraction.  */
1946   if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1947     return size_binop_loc (loc, MINUS_EXPR,
1948 			   fold_convert_loc (loc, ctype, arg0),
1949 			   fold_convert_loc (loc, ctype, arg1));
1950 
1951   /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1952      Otherwise, subtract the other way, convert to CTYPE (we know that can't
1953      overflow) and negate (which can't either).  Special-case a result
1954      of zero while we're here.  */
1955   if (tree_int_cst_equal (arg0, arg1))
1956     return build_int_cst (ctype, 0);
1957   else if (tree_int_cst_lt (arg1, arg0))
1958     return fold_convert_loc (loc, ctype,
1959 			     size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1960   else
1961     return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1962 			   fold_convert_loc (loc, ctype,
1963 					     size_binop_loc (loc,
1964 							     MINUS_EXPR,
1965 							     arg1, arg0)));
1966 }
1967 
1968 /* A subroutine of fold_convert_const handling conversions of an
1969    INTEGER_CST to another integer type.  */
1970 
1971 static tree
1972 fold_convert_const_int_from_int (tree type, const_tree arg1)
1973 {
1974   /* Given an integer constant, make new constant with new type,
1975      appropriately sign-extended or truncated.  Use widest_int
1976      so that any extension is done according ARG1's type.  */
1977   return force_fit_type (type, wi::to_widest (arg1),
1978 			 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1979 			 TREE_OVERFLOW (arg1));
1980 }
1981 
1982 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1983    to an integer type.  */
1984 
1985 static tree
1986 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1987 {
1988   bool overflow = false;
1989   tree t;
1990 
1991   /* The following code implements the floating point to integer
1992      conversion rules required by the Java Language Specification,
1993      that IEEE NaNs are mapped to zero and values that overflow
1994      the target precision saturate, i.e. values greater than
1995      INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1996      are mapped to INT_MIN.  These semantics are allowed by the
1997      C and C++ standards that simply state that the behavior of
1998      FP-to-integer conversion is unspecified upon overflow.  */
1999 
2000   wide_int val;
2001   REAL_VALUE_TYPE r;
2002   REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2003 
2004   switch (code)
2005     {
2006     case FIX_TRUNC_EXPR:
2007       real_trunc (&r, VOIDmode, &x);
2008       break;
2009 
2010     default:
2011       gcc_unreachable ();
2012     }
2013 
2014   /* If R is NaN, return zero and show we have an overflow.  */
2015   if (REAL_VALUE_ISNAN (r))
2016     {
2017       overflow = true;
2018       val = wi::zero (TYPE_PRECISION (type));
2019     }
2020 
2021   /* See if R is less than the lower bound or greater than the
2022      upper bound.  */
2023 
2024   if (! overflow)
2025     {
2026       tree lt = TYPE_MIN_VALUE (type);
2027       REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2028       if (real_less (&r, &l))
2029 	{
2030 	  overflow = true;
2031 	  val = wi::to_wide (lt);
2032 	}
2033     }
2034 
2035   if (! overflow)
2036     {
2037       tree ut = TYPE_MAX_VALUE (type);
2038       if (ut)
2039 	{
2040 	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2041 	  if (real_less (&u, &r))
2042 	    {
2043 	      overflow = true;
2044 	      val = wi::to_wide (ut);
2045 	    }
2046 	}
2047     }
2048 
2049   if (! overflow)
2050     val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2051 
2052   t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2053   return t;
2054 }
2055 
2056 /* A subroutine of fold_convert_const handling conversions of a
2057    FIXED_CST to an integer type.  */
2058 
2059 static tree
2060 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2061 {
2062   tree t;
2063   double_int temp, temp_trunc;
2064   scalar_mode mode;
2065 
2066   /* Right shift FIXED_CST to temp by fbit.  */
2067   temp = TREE_FIXED_CST (arg1).data;
2068   mode = TREE_FIXED_CST (arg1).mode;
2069   if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2070     {
2071       temp = temp.rshift (GET_MODE_FBIT (mode),
2072 			  HOST_BITS_PER_DOUBLE_INT,
2073 			  SIGNED_FIXED_POINT_MODE_P (mode));
2074 
2075       /* Left shift temp to temp_trunc by fbit.  */
2076       temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2077 				HOST_BITS_PER_DOUBLE_INT,
2078 				SIGNED_FIXED_POINT_MODE_P (mode));
2079     }
2080   else
2081     {
2082       temp = double_int_zero;
2083       temp_trunc = double_int_zero;
2084     }
2085 
2086   /* If FIXED_CST is negative, we need to round the value toward 0.
2087      By checking if the fractional bits are not zero to add 1 to temp.  */
2088   if (SIGNED_FIXED_POINT_MODE_P (mode)
2089       && temp_trunc.is_negative ()
2090       && TREE_FIXED_CST (arg1).data != temp_trunc)
2091     temp += double_int_one;
2092 
2093   /* Given a fixed-point constant, make new constant with new type,
2094      appropriately sign-extended or truncated.  */
2095   t = force_fit_type (type, temp, -1,
2096 		      (temp.is_negative ()
2097 		       && (TYPE_UNSIGNED (type)
2098 			   < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2099 		      | TREE_OVERFLOW (arg1));
2100 
2101   return t;
2102 }
2103 
2104 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2105    to another floating point type.  */
2106 
2107 static tree
2108 fold_convert_const_real_from_real (tree type, const_tree arg1)
2109 {
2110   REAL_VALUE_TYPE value;
2111   tree t;
2112 
2113   /* Don't perform the operation if flag_signaling_nans is on
2114      and the operand is a signaling NaN.  */
2115   if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2116       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2117     return NULL_TREE;
2118 
2119   real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2120   t = build_real (type, value);
2121 
2122   /* If converting an infinity or NAN to a representation that doesn't
2123      have one, set the overflow bit so that we can produce some kind of
2124      error message at the appropriate point if necessary.  It's not the
2125      most user-friendly message, but it's better than nothing.  */
2126   if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2127       && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2128     TREE_OVERFLOW (t) = 1;
2129   else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2130 	   && !MODE_HAS_NANS (TYPE_MODE (type)))
2131     TREE_OVERFLOW (t) = 1;
2132   /* Regular overflow, conversion produced an infinity in a mode that
2133      can't represent them.  */
2134   else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2135 	   && REAL_VALUE_ISINF (value)
2136 	   && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2137     TREE_OVERFLOW (t) = 1;
2138   else
2139     TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2140   return t;
2141 }
2142 
2143 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2144    to a floating point type.  */
2145 
2146 static tree
2147 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2148 {
2149   REAL_VALUE_TYPE value;
2150   tree t;
2151 
2152   real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2153 			   &TREE_FIXED_CST (arg1));
2154   t = build_real (type, value);
2155 
2156   TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2157   return t;
2158 }
2159 
2160 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2161    to another fixed-point type.  */
2162 
2163 static tree
2164 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2165 {
2166   FIXED_VALUE_TYPE value;
2167   tree t;
2168   bool overflow_p;
2169 
2170   overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2171 			      &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2172   t = build_fixed (type, value);
2173 
2174   /* Propagate overflow flags.  */
2175   if (overflow_p | TREE_OVERFLOW (arg1))
2176     TREE_OVERFLOW (t) = 1;
2177   return t;
2178 }
2179 
2180 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2181    to a fixed-point type.  */
2182 
2183 static tree
2184 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2185 {
2186   FIXED_VALUE_TYPE value;
2187   tree t;
2188   bool overflow_p;
2189   double_int di;
2190 
2191   gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2192 
2193   di.low = TREE_INT_CST_ELT (arg1, 0);
2194   if (TREE_INT_CST_NUNITS (arg1) == 1)
2195     di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2196   else
2197     di.high = TREE_INT_CST_ELT (arg1, 1);
2198 
2199   overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2200 				       TYPE_UNSIGNED (TREE_TYPE (arg1)),
2201 				       TYPE_SATURATING (type));
2202   t = build_fixed (type, value);
2203 
2204   /* Propagate overflow flags.  */
2205   if (overflow_p | TREE_OVERFLOW (arg1))
2206     TREE_OVERFLOW (t) = 1;
2207   return t;
2208 }
2209 
2210 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2211    to a fixed-point type.  */
2212 
2213 static tree
2214 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2215 {
2216   FIXED_VALUE_TYPE value;
2217   tree t;
2218   bool overflow_p;
2219 
2220   overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2221 					&TREE_REAL_CST (arg1),
2222 					TYPE_SATURATING (type));
2223   t = build_fixed (type, value);
2224 
2225   /* Propagate overflow flags.  */
2226   if (overflow_p | TREE_OVERFLOW (arg1))
2227     TREE_OVERFLOW (t) = 1;
2228   return t;
2229 }
2230 
2231 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2232    type TYPE.  If no simplification can be done return NULL_TREE.  */
2233 
2234 static tree
2235 fold_convert_const (enum tree_code code, tree type, tree arg1)
2236 {
2237   tree arg_type = TREE_TYPE (arg1);
2238   if (arg_type == type)
2239     return arg1;
2240 
2241   /* We can't widen types, since the runtime value could overflow the
2242      original type before being extended to the new type.  */
2243   if (POLY_INT_CST_P (arg1)
2244       && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2245       && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2246     return build_poly_int_cst (type,
2247 			       poly_wide_int::from (poly_int_cst_value (arg1),
2248 						    TYPE_PRECISION (type),
2249 						    TYPE_SIGN (arg_type)));
2250 
2251   if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2252       || TREE_CODE (type) == OFFSET_TYPE)
2253     {
2254       if (TREE_CODE (arg1) == INTEGER_CST)
2255 	return fold_convert_const_int_from_int (type, arg1);
2256       else if (TREE_CODE (arg1) == REAL_CST)
2257 	return fold_convert_const_int_from_real (code, type, arg1);
2258       else if (TREE_CODE (arg1) == FIXED_CST)
2259 	return fold_convert_const_int_from_fixed (type, arg1);
2260     }
2261   else if (TREE_CODE (type) == REAL_TYPE)
2262     {
2263       if (TREE_CODE (arg1) == INTEGER_CST)
2264 	return build_real_from_int_cst (type, arg1);
2265       else if (TREE_CODE (arg1) == REAL_CST)
2266 	return fold_convert_const_real_from_real (type, arg1);
2267       else if (TREE_CODE (arg1) == FIXED_CST)
2268 	return fold_convert_const_real_from_fixed (type, arg1);
2269     }
2270   else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2271     {
2272       if (TREE_CODE (arg1) == FIXED_CST)
2273 	return fold_convert_const_fixed_from_fixed (type, arg1);
2274       else if (TREE_CODE (arg1) == INTEGER_CST)
2275 	return fold_convert_const_fixed_from_int (type, arg1);
2276       else if (TREE_CODE (arg1) == REAL_CST)
2277 	return fold_convert_const_fixed_from_real (type, arg1);
2278     }
2279   else if (TREE_CODE (type) == VECTOR_TYPE)
2280     {
2281       if (TREE_CODE (arg1) == VECTOR_CST
2282 	  && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2283 	{
2284 	  tree elttype = TREE_TYPE (type);
2285 	  tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2286 	  /* We can't handle steps directly when extending, since the
2287 	     values need to wrap at the original precision first.  */
2288 	  bool step_ok_p
2289 	    = (INTEGRAL_TYPE_P (elttype)
2290 	       && INTEGRAL_TYPE_P (arg1_elttype)
2291 	       && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2292 	  tree_vector_builder v;
2293 	  if (!v.new_unary_operation (type, arg1, step_ok_p))
2294 	    return NULL_TREE;
2295 	  unsigned int len = v.encoded_nelts ();
2296 	  for (unsigned int i = 0; i < len; ++i)
2297 	    {
2298 	      tree elt = VECTOR_CST_ELT (arg1, i);
2299 	      tree cvt = fold_convert_const (code, elttype, elt);
2300 	      if (cvt == NULL_TREE)
2301 		return NULL_TREE;
2302 	      v.quick_push (cvt);
2303 	    }
2304 	  return v.build ();
2305 	}
2306     }
2307   return NULL_TREE;
2308 }
2309 
2310 /* Construct a vector of zero elements of vector type TYPE.  */
2311 
2312 static tree
2313 build_zero_vector (tree type)
2314 {
2315   tree t;
2316 
2317   t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2318   return build_vector_from_val (type, t);
2319 }
2320 
2321 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR.  */
2322 
2323 bool
2324 fold_convertible_p (const_tree type, const_tree arg)
2325 {
2326   tree orig = TREE_TYPE (arg);
2327 
2328   if (type == orig)
2329     return true;
2330 
2331   if (TREE_CODE (arg) == ERROR_MARK
2332       || TREE_CODE (type) == ERROR_MARK
2333       || TREE_CODE (orig) == ERROR_MARK)
2334     return false;
2335 
2336   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2337     return true;
2338 
2339   switch (TREE_CODE (type))
2340     {
2341     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2342     case POINTER_TYPE: case REFERENCE_TYPE:
2343     case OFFSET_TYPE:
2344       return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2345 	      || TREE_CODE (orig) == OFFSET_TYPE);
2346 
2347     case REAL_TYPE:
2348     case FIXED_POINT_TYPE:
2349     case VECTOR_TYPE:
2350     case VOID_TYPE:
2351       return TREE_CODE (type) == TREE_CODE (orig);
2352 
2353     default:
2354       return false;
2355     }
2356 }
2357 
2358 /* Convert expression ARG to type TYPE.  Used by the middle-end for
2359    simple conversions in preference to calling the front-end's convert.  */
2360 
2361 tree
2362 fold_convert_loc (location_t loc, tree type, tree arg)
2363 {
2364   tree orig = TREE_TYPE (arg);
2365   tree tem;
2366 
2367   if (type == orig)
2368     return arg;
2369 
2370   if (TREE_CODE (arg) == ERROR_MARK
2371       || TREE_CODE (type) == ERROR_MARK
2372       || TREE_CODE (orig) == ERROR_MARK)
2373     return error_mark_node;
2374 
2375   switch (TREE_CODE (type))
2376     {
2377     case POINTER_TYPE:
2378     case REFERENCE_TYPE:
2379       /* Handle conversions between pointers to different address spaces.  */
2380       if (POINTER_TYPE_P (orig)
2381 	  && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2382 	      != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2383 	return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2384       /* fall through */
2385 
2386     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2387     case OFFSET_TYPE:
2388       if (TREE_CODE (arg) == INTEGER_CST)
2389 	{
2390 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2391 	  if (tem != NULL_TREE)
2392 	    return tem;
2393 	}
2394       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2395 	  || TREE_CODE (orig) == OFFSET_TYPE)
2396 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2397       if (TREE_CODE (orig) == COMPLEX_TYPE)
2398 	return fold_convert_loc (loc, type,
2399 				 fold_build1_loc (loc, REALPART_EXPR,
2400 						  TREE_TYPE (orig), arg));
2401       gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2402 		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2403       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2404 
2405     case REAL_TYPE:
2406       if (TREE_CODE (arg) == INTEGER_CST)
2407 	{
2408 	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
2409 	  if (tem != NULL_TREE)
2410 	    return tem;
2411 	}
2412       else if (TREE_CODE (arg) == REAL_CST)
2413 	{
2414 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2415 	  if (tem != NULL_TREE)
2416 	    return tem;
2417 	}
2418       else if (TREE_CODE (arg) == FIXED_CST)
2419 	{
2420 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2421 	  if (tem != NULL_TREE)
2422 	    return tem;
2423 	}
2424 
2425       switch (TREE_CODE (orig))
2426 	{
2427 	case INTEGER_TYPE:
2428 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2429 	case POINTER_TYPE: case REFERENCE_TYPE:
2430 	  return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2431 
2432 	case REAL_TYPE:
2433 	  return fold_build1_loc (loc, NOP_EXPR, type, arg);
2434 
2435 	case FIXED_POINT_TYPE:
2436 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2437 
2438 	case COMPLEX_TYPE:
2439 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2440 	  return fold_convert_loc (loc, type, tem);
2441 
2442 	default:
2443 	  gcc_unreachable ();
2444 	}
2445 
2446     case FIXED_POINT_TYPE:
2447       if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2448 	  || TREE_CODE (arg) == REAL_CST)
2449 	{
2450 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2451 	  if (tem != NULL_TREE)
2452 	    goto fold_convert_exit;
2453 	}
2454 
2455       switch (TREE_CODE (orig))
2456 	{
2457 	case FIXED_POINT_TYPE:
2458 	case INTEGER_TYPE:
2459 	case ENUMERAL_TYPE:
2460 	case BOOLEAN_TYPE:
2461 	case REAL_TYPE:
2462 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2463 
2464 	case COMPLEX_TYPE:
2465 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2466 	  return fold_convert_loc (loc, type, tem);
2467 
2468 	default:
2469 	  gcc_unreachable ();
2470 	}
2471 
2472     case COMPLEX_TYPE:
2473       switch (TREE_CODE (orig))
2474 	{
2475 	case INTEGER_TYPE:
2476 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2477 	case POINTER_TYPE: case REFERENCE_TYPE:
2478 	case REAL_TYPE:
2479 	case FIXED_POINT_TYPE:
2480 	  return fold_build2_loc (loc, COMPLEX_EXPR, type,
2481 			      fold_convert_loc (loc, TREE_TYPE (type), arg),
2482 			      fold_convert_loc (loc, TREE_TYPE (type),
2483 					    integer_zero_node));
2484 	case COMPLEX_TYPE:
2485 	  {
2486 	    tree rpart, ipart;
2487 
2488 	    if (TREE_CODE (arg) == COMPLEX_EXPR)
2489 	      {
2490 		rpart = fold_convert_loc (loc, TREE_TYPE (type),
2491 				      TREE_OPERAND (arg, 0));
2492 		ipart = fold_convert_loc (loc, TREE_TYPE (type),
2493 				      TREE_OPERAND (arg, 1));
2494 		return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2495 	      }
2496 
2497 	    arg = save_expr (arg);
2498 	    rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2499 	    ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2500 	    rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2501 	    ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2502 	    return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2503 	  }
2504 
2505 	default:
2506 	  gcc_unreachable ();
2507 	}
2508 
2509     case VECTOR_TYPE:
2510       if (integer_zerop (arg))
2511 	return build_zero_vector (type);
2512       gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2513       gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2514 		  || TREE_CODE (orig) == VECTOR_TYPE);
2515       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2516 
2517     case VOID_TYPE:
2518       tem = fold_ignored_result (arg);
2519       return fold_build1_loc (loc, NOP_EXPR, type, tem);
2520 
2521     default:
2522       if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2523 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2524       gcc_unreachable ();
2525     }
2526  fold_convert_exit:
2527   protected_set_expr_location_unshare (tem, loc);
2528   return tem;
2529 }
2530 
2531 /* Return false if expr can be assumed not to be an lvalue, true
2532    otherwise.  */
2533 
2534 static bool
2535 maybe_lvalue_p (const_tree x)
2536 {
2537   /* We only need to wrap lvalue tree codes.  */
2538   switch (TREE_CODE (x))
2539   {
2540   case VAR_DECL:
2541   case PARM_DECL:
2542   case RESULT_DECL:
2543   case LABEL_DECL:
2544   case FUNCTION_DECL:
2545   case SSA_NAME:
2546 
2547   case COMPONENT_REF:
2548   case MEM_REF:
2549   case INDIRECT_REF:
2550   case ARRAY_REF:
2551   case ARRAY_RANGE_REF:
2552   case BIT_FIELD_REF:
2553   case OBJ_TYPE_REF:
2554 
2555   case REALPART_EXPR:
2556   case IMAGPART_EXPR:
2557   case PREINCREMENT_EXPR:
2558   case PREDECREMENT_EXPR:
2559   case SAVE_EXPR:
2560   case TRY_CATCH_EXPR:
2561   case WITH_CLEANUP_EXPR:
2562   case COMPOUND_EXPR:
2563   case MODIFY_EXPR:
2564   case TARGET_EXPR:
2565   case COND_EXPR:
2566   case BIND_EXPR:
2567     break;
2568 
2569   default:
2570     /* Assume the worst for front-end tree codes.  */
2571     if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2572       break;
2573     return false;
2574   }
2575 
2576   return true;
2577 }
2578 
2579 /* Return an expr equal to X but certainly not valid as an lvalue.  */
2580 
2581 tree
2582 non_lvalue_loc (location_t loc, tree x)
2583 {
2584   /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2585      us.  */
2586   if (in_gimple_form)
2587     return x;
2588 
2589   if (! maybe_lvalue_p (x))
2590     return x;
2591   return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2592 }
2593 
2594 /* When pedantic, return an expr equal to X but certainly not valid as a
2595    pedantic lvalue.  Otherwise, return X.  */
2596 
2597 static tree
2598 pedantic_non_lvalue_loc (location_t loc, tree x)
2599 {
2600   return protected_set_expr_location_unshare (x, loc);
2601 }
2602 
2603 /* Given a tree comparison code, return the code that is the logical inverse.
2604    It is generally not safe to do this for floating-point comparisons, except
2605    for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2606    ERROR_MARK in this case.  */
2607 
2608 enum tree_code
2609 invert_tree_comparison (enum tree_code code, bool honor_nans)
2610 {
2611   if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2612       && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2613     return ERROR_MARK;
2614 
2615   switch (code)
2616     {
2617     case EQ_EXPR:
2618       return NE_EXPR;
2619     case NE_EXPR:
2620       return EQ_EXPR;
2621     case GT_EXPR:
2622       return honor_nans ? UNLE_EXPR : LE_EXPR;
2623     case GE_EXPR:
2624       return honor_nans ? UNLT_EXPR : LT_EXPR;
2625     case LT_EXPR:
2626       return honor_nans ? UNGE_EXPR : GE_EXPR;
2627     case LE_EXPR:
2628       return honor_nans ? UNGT_EXPR : GT_EXPR;
2629     case LTGT_EXPR:
2630       return UNEQ_EXPR;
2631     case UNEQ_EXPR:
2632       return LTGT_EXPR;
2633     case UNGT_EXPR:
2634       return LE_EXPR;
2635     case UNGE_EXPR:
2636       return LT_EXPR;
2637     case UNLT_EXPR:
2638       return GE_EXPR;
2639     case UNLE_EXPR:
2640       return GT_EXPR;
2641     case ORDERED_EXPR:
2642       return UNORDERED_EXPR;
2643     case UNORDERED_EXPR:
2644       return ORDERED_EXPR;
2645     default:
2646       gcc_unreachable ();
2647     }
2648 }
2649 
2650 /* Similar, but return the comparison that results if the operands are
2651    swapped.  This is safe for floating-point.  */
2652 
2653 enum tree_code
2654 swap_tree_comparison (enum tree_code code)
2655 {
2656   switch (code)
2657     {
2658     case EQ_EXPR:
2659     case NE_EXPR:
2660     case ORDERED_EXPR:
2661     case UNORDERED_EXPR:
2662     case LTGT_EXPR:
2663     case UNEQ_EXPR:
2664       return code;
2665     case GT_EXPR:
2666       return LT_EXPR;
2667     case GE_EXPR:
2668       return LE_EXPR;
2669     case LT_EXPR:
2670       return GT_EXPR;
2671     case LE_EXPR:
2672       return GE_EXPR;
2673     case UNGT_EXPR:
2674       return UNLT_EXPR;
2675     case UNGE_EXPR:
2676       return UNLE_EXPR;
2677     case UNLT_EXPR:
2678       return UNGT_EXPR;
2679     case UNLE_EXPR:
2680       return UNGE_EXPR;
2681     default:
2682       gcc_unreachable ();
2683     }
2684 }
2685 
2686 
2687 /* Convert a comparison tree code from an enum tree_code representation
2688    into a compcode bit-based encoding.  This function is the inverse of
2689    compcode_to_comparison.  */
2690 
2691 static enum comparison_code
2692 comparison_to_compcode (enum tree_code code)
2693 {
2694   switch (code)
2695     {
2696     case LT_EXPR:
2697       return COMPCODE_LT;
2698     case EQ_EXPR:
2699       return COMPCODE_EQ;
2700     case LE_EXPR:
2701       return COMPCODE_LE;
2702     case GT_EXPR:
2703       return COMPCODE_GT;
2704     case NE_EXPR:
2705       return COMPCODE_NE;
2706     case GE_EXPR:
2707       return COMPCODE_GE;
2708     case ORDERED_EXPR:
2709       return COMPCODE_ORD;
2710     case UNORDERED_EXPR:
2711       return COMPCODE_UNORD;
2712     case UNLT_EXPR:
2713       return COMPCODE_UNLT;
2714     case UNEQ_EXPR:
2715       return COMPCODE_UNEQ;
2716     case UNLE_EXPR:
2717       return COMPCODE_UNLE;
2718     case UNGT_EXPR:
2719       return COMPCODE_UNGT;
2720     case LTGT_EXPR:
2721       return COMPCODE_LTGT;
2722     case UNGE_EXPR:
2723       return COMPCODE_UNGE;
2724     default:
2725       gcc_unreachable ();
2726     }
2727 }
2728 
2729 /* Convert a compcode bit-based encoding of a comparison operator back
2730    to GCC's enum tree_code representation.  This function is the
2731    inverse of comparison_to_compcode.  */
2732 
2733 static enum tree_code
2734 compcode_to_comparison (enum comparison_code code)
2735 {
2736   switch (code)
2737     {
2738     case COMPCODE_LT:
2739       return LT_EXPR;
2740     case COMPCODE_EQ:
2741       return EQ_EXPR;
2742     case COMPCODE_LE:
2743       return LE_EXPR;
2744     case COMPCODE_GT:
2745       return GT_EXPR;
2746     case COMPCODE_NE:
2747       return NE_EXPR;
2748     case COMPCODE_GE:
2749       return GE_EXPR;
2750     case COMPCODE_ORD:
2751       return ORDERED_EXPR;
2752     case COMPCODE_UNORD:
2753       return UNORDERED_EXPR;
2754     case COMPCODE_UNLT:
2755       return UNLT_EXPR;
2756     case COMPCODE_UNEQ:
2757       return UNEQ_EXPR;
2758     case COMPCODE_UNLE:
2759       return UNLE_EXPR;
2760     case COMPCODE_UNGT:
2761       return UNGT_EXPR;
2762     case COMPCODE_LTGT:
2763       return LTGT_EXPR;
2764     case COMPCODE_UNGE:
2765       return UNGE_EXPR;
2766     default:
2767       gcc_unreachable ();
2768     }
2769 }
2770 
2771 /* Return a tree for the comparison which is the combination of
2772    doing the AND or OR (depending on CODE) of the two operations LCODE
2773    and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2774    the possibility of trapping if the mode has NaNs, and return NULL_TREE
2775    if this makes the transformation invalid.  */
2776 
2777 tree
2778 combine_comparisons (location_t loc,
2779 		     enum tree_code code, enum tree_code lcode,
2780 		     enum tree_code rcode, tree truth_type,
2781 		     tree ll_arg, tree lr_arg)
2782 {
2783   bool honor_nans = HONOR_NANS (ll_arg);
2784   enum comparison_code lcompcode = comparison_to_compcode (lcode);
2785   enum comparison_code rcompcode = comparison_to_compcode (rcode);
2786   int compcode;
2787 
2788   switch (code)
2789     {
2790     case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2791       compcode = lcompcode & rcompcode;
2792       break;
2793 
2794     case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2795       compcode = lcompcode | rcompcode;
2796       break;
2797 
2798     default:
2799       return NULL_TREE;
2800     }
2801 
2802   if (!honor_nans)
2803     {
2804       /* Eliminate unordered comparisons, as well as LTGT and ORD
2805 	 which are not used unless the mode has NaNs.  */
2806       compcode &= ~COMPCODE_UNORD;
2807       if (compcode == COMPCODE_LTGT)
2808 	compcode = COMPCODE_NE;
2809       else if (compcode == COMPCODE_ORD)
2810 	compcode = COMPCODE_TRUE;
2811     }
2812    else if (flag_trapping_math)
2813      {
2814 	/* Check that the original operation and the optimized ones will trap
2815 	   under the same condition.  */
2816 	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2817 		     && (lcompcode != COMPCODE_EQ)
2818 		     && (lcompcode != COMPCODE_ORD);
2819 	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2820 		     && (rcompcode != COMPCODE_EQ)
2821 		     && (rcompcode != COMPCODE_ORD);
2822 	bool trap = (compcode & COMPCODE_UNORD) == 0
2823 		    && (compcode != COMPCODE_EQ)
2824 		    && (compcode != COMPCODE_ORD);
2825 
2826         /* In a short-circuited boolean expression the LHS might be
2827 	   such that the RHS, if evaluated, will never trap.  For
2828 	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2829 	   if neither x nor y is NaN.  (This is a mixed blessing: for
2830 	   example, the expression above will never trap, hence
2831 	   optimizing it to x < y would be invalid).  */
2832         if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2833             || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2834           rtrap = false;
2835 
2836         /* If the comparison was short-circuited, and only the RHS
2837 	   trapped, we may now generate a spurious trap.  */
2838 	if (rtrap && !ltrap
2839 	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2840 	  return NULL_TREE;
2841 
2842 	/* If we changed the conditions that cause a trap, we lose.  */
2843 	if ((ltrap || rtrap) != trap)
2844 	  return NULL_TREE;
2845       }
2846 
2847   if (compcode == COMPCODE_TRUE)
2848     return constant_boolean_node (true, truth_type);
2849   else if (compcode == COMPCODE_FALSE)
2850     return constant_boolean_node (false, truth_type);
2851   else
2852     {
2853       enum tree_code tcode;
2854 
2855       tcode = compcode_to_comparison ((enum comparison_code) compcode);
2856       return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2857     }
2858 }
2859 
2860 /* Return nonzero if two operands (typically of the same tree node)
2861    are necessarily equal. FLAGS modifies behavior as follows:
2862 
2863    If OEP_ONLY_CONST is set, only return nonzero for constants.
2864    This function tests whether the operands are indistinguishable;
2865    it does not test whether they are equal using C's == operation.
2866    The distinction is important for IEEE floating point, because
2867    (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2868    (2) two NaNs may be indistinguishable, but NaN!=NaN.
2869 
2870    If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2871    even though it may hold multiple values during a function.
2872    This is because a GCC tree node guarantees that nothing else is
2873    executed between the evaluation of its "operands" (which may often
2874    be evaluated in arbitrary order).  Hence if the operands themselves
2875    don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2876    same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2877    unset means assuming isochronic (or instantaneous) tree equivalence.
2878    Unless comparing arbitrary expression trees, such as from different
2879    statements, this flag can usually be left unset.
2880 
2881    If OEP_PURE_SAME is set, then pure functions with identical arguments
2882    are considered the same.  It is used when the caller has other ways
2883    to ensure that global memory is unchanged in between.
2884 
2885    If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2886    not values of expressions.
2887 
2888    If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2889    such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2890 
2891    Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2892    any operand with side effect.  This is unnecesarily conservative in the
2893    case we know that arg0 and arg1 are in disjoint code paths (such as in
2894    ?: operator).  In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2895    addresses with TREE_CONSTANT flag set so we know that &var == &var
2896    even if var is volatile.  */
2897 
2898 int
2899 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2900 {
2901   /* When checking, verify at the outermost operand_equal_p call that
2902      if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2903      hash value.  */
2904   if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2905     {
2906       if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2907 	{
2908 	  if (arg0 != arg1)
2909 	    {
2910 	      inchash::hash hstate0 (0), hstate1 (0);
2911 	      inchash::add_expr (arg0, hstate0, flags | OEP_HASH_CHECK);
2912 	      inchash::add_expr (arg1, hstate1, flags | OEP_HASH_CHECK);
2913 	      hashval_t h0 = hstate0.end ();
2914 	      hashval_t h1 = hstate1.end ();
2915 	      gcc_assert (h0 == h1);
2916 	    }
2917 	  return 1;
2918 	}
2919       else
2920 	return 0;
2921     }
2922 
2923   /* If either is ERROR_MARK, they aren't equal.  */
2924   if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2925       || TREE_TYPE (arg0) == error_mark_node
2926       || TREE_TYPE (arg1) == error_mark_node)
2927     return 0;
2928 
2929   /* Similar, if either does not have a type (like a released SSA name),
2930      they aren't equal.  */
2931   if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2932     return 0;
2933 
2934   /* We cannot consider pointers to different address space equal.  */
2935   if (POINTER_TYPE_P (TREE_TYPE (arg0))
2936       && POINTER_TYPE_P (TREE_TYPE (arg1))
2937       && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2938 	  != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2939     return 0;
2940 
2941   /* Check equality of integer constants before bailing out due to
2942      precision differences.  */
2943   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2944     {
2945       /* Address of INTEGER_CST is not defined; check that we did not forget
2946 	 to drop the OEP_ADDRESS_OF flags.  */
2947       gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2948       return tree_int_cst_equal (arg0, arg1);
2949     }
2950 
2951   if (!(flags & OEP_ADDRESS_OF))
2952     {
2953       /* If both types don't have the same signedness, then we can't consider
2954 	 them equal.  We must check this before the STRIP_NOPS calls
2955 	 because they may change the signedness of the arguments.  As pointers
2956 	 strictly don't have a signedness, require either two pointers or
2957 	 two non-pointers as well.  */
2958       if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2959 	  || POINTER_TYPE_P (TREE_TYPE (arg0))
2960 			     != POINTER_TYPE_P (TREE_TYPE (arg1)))
2961 	return 0;
2962 
2963       /* If both types don't have the same precision, then it is not safe
2964 	 to strip NOPs.  */
2965       if (element_precision (TREE_TYPE (arg0))
2966 	  != element_precision (TREE_TYPE (arg1)))
2967 	return 0;
2968 
2969       STRIP_NOPS (arg0);
2970       STRIP_NOPS (arg1);
2971     }
2972 #if 0
2973   /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2974      sanity check once the issue is solved.  */
2975   else
2976     /* Addresses of conversions and SSA_NAMEs (and many other things)
2977        are not defined.  Check that we did not forget to drop the
2978        OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags.  */
2979     gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2980 			 && TREE_CODE (arg0) != SSA_NAME);
2981 #endif
2982 
2983   /* In case both args are comparisons but with different comparison
2984      code, try to swap the comparison operands of one arg to produce
2985      a match and compare that variant.  */
2986   if (TREE_CODE (arg0) != TREE_CODE (arg1)
2987       && COMPARISON_CLASS_P (arg0)
2988       && COMPARISON_CLASS_P (arg1))
2989     {
2990       enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2991 
2992       if (TREE_CODE (arg0) == swap_code)
2993 	return operand_equal_p (TREE_OPERAND (arg0, 0),
2994 			        TREE_OPERAND (arg1, 1), flags)
2995 	       && operand_equal_p (TREE_OPERAND (arg0, 1),
2996 				   TREE_OPERAND (arg1, 0), flags);
2997     }
2998 
2999   if (TREE_CODE (arg0) != TREE_CODE (arg1))
3000     {
3001       /* NOP_EXPR and CONVERT_EXPR are considered equal.  */
3002       if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3003 	;
3004       else if (flags & OEP_ADDRESS_OF)
3005 	{
3006 	  /* If we are interested in comparing addresses ignore
3007 	     MEM_REF wrappings of the base that can appear just for
3008 	     TBAA reasons.  */
3009 	  if (TREE_CODE (arg0) == MEM_REF
3010 	      && DECL_P (arg1)
3011 	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3012 	      && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3013 	      && integer_zerop (TREE_OPERAND (arg0, 1)))
3014 	    return 1;
3015 	  else if (TREE_CODE (arg1) == MEM_REF
3016 		   && DECL_P (arg0)
3017 		   && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3018 		   && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3019 		   && integer_zerop (TREE_OPERAND (arg1, 1)))
3020 	    return 1;
3021 	  return 0;
3022 	}
3023       else
3024 	return 0;
3025     }
3026 
3027   /* When not checking adddresses, this is needed for conversions and for
3028      COMPONENT_REF.  Might as well play it safe and always test this.  */
3029   if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3030       || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3031       || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3032 	  && !(flags & OEP_ADDRESS_OF)))
3033     return 0;
3034 
3035   /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3036      We don't care about side effects in that case because the SAVE_EXPR
3037      takes care of that for us. In all other cases, two expressions are
3038      equal if they have no side effects.  If we have two identical
3039      expressions with side effects that should be treated the same due
3040      to the only side effects being identical SAVE_EXPR's, that will
3041      be detected in the recursive calls below.
3042      If we are taking an invariant address of two identical objects
3043      they are necessarily equal as well.  */
3044   if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3045       && (TREE_CODE (arg0) == SAVE_EXPR
3046 	  || (flags & OEP_MATCH_SIDE_EFFECTS)
3047 	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3048     return 1;
3049 
3050   /* Next handle constant cases, those for which we can return 1 even
3051      if ONLY_CONST is set.  */
3052   if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3053     switch (TREE_CODE (arg0))
3054       {
3055       case INTEGER_CST:
3056 	return tree_int_cst_equal (arg0, arg1);
3057 
3058       case FIXED_CST:
3059 	return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3060 				       TREE_FIXED_CST (arg1));
3061 
3062       case REAL_CST:
3063 	if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3064 	  return 1;
3065 
3066 
3067 	if (!HONOR_SIGNED_ZEROS (arg0))
3068 	  {
3069 	    /* If we do not distinguish between signed and unsigned zero,
3070 	       consider them equal.  */
3071 	    if (real_zerop (arg0) && real_zerop (arg1))
3072 	      return 1;
3073 	  }
3074 	return 0;
3075 
3076       case VECTOR_CST:
3077 	{
3078 	  if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3079 	      != VECTOR_CST_LOG2_NPATTERNS (arg1))
3080 	    return 0;
3081 
3082 	  if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3083 	      != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3084 	    return 0;
3085 
3086 	  unsigned int count = vector_cst_encoded_nelts (arg0);
3087 	  for (unsigned int i = 0; i < count; ++i)
3088 	    if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3089 				  VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3090 	      return 0;
3091 	  return 1;
3092 	}
3093 
3094       case COMPLEX_CST:
3095 	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3096 				 flags)
3097 		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3098 				    flags));
3099 
3100       case STRING_CST:
3101 	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3102 		&& ! memcmp (TREE_STRING_POINTER (arg0),
3103 			      TREE_STRING_POINTER (arg1),
3104 			      TREE_STRING_LENGTH (arg0)));
3105 
3106       case ADDR_EXPR:
3107 	gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3108 	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3109 				flags | OEP_ADDRESS_OF
3110 				| OEP_MATCH_SIDE_EFFECTS);
3111       case CONSTRUCTOR:
3112 	/* In GIMPLE empty constructors are allowed in initializers of
3113 	   aggregates.  */
3114 	return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3115       default:
3116 	break;
3117       }
3118 
3119   if (flags & OEP_ONLY_CONST)
3120     return 0;
3121 
3122 /* Define macros to test an operand from arg0 and arg1 for equality and a
3123    variant that allows null and views null as being different from any
3124    non-null value.  In the latter case, if either is null, the both
3125    must be; otherwise, do the normal comparison.  */
3126 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
3127 				    TREE_OPERAND (arg1, N), flags)
3128 
3129 #define OP_SAME_WITH_NULL(N)				\
3130   ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
3131    ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3132 
3133   switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3134     {
3135     case tcc_unary:
3136       /* Two conversions are equal only if signedness and modes match.  */
3137       switch (TREE_CODE (arg0))
3138         {
3139 	CASE_CONVERT:
3140         case FIX_TRUNC_EXPR:
3141 	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3142 	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3143 	    return 0;
3144 	  break;
3145 	default:
3146 	  break;
3147 	}
3148 
3149       return OP_SAME (0);
3150 
3151 
3152     case tcc_comparison:
3153     case tcc_binary:
3154       if (OP_SAME (0) && OP_SAME (1))
3155 	return 1;
3156 
3157       /* For commutative ops, allow the other order.  */
3158       return (commutative_tree_code (TREE_CODE (arg0))
3159 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
3160 				  TREE_OPERAND (arg1, 1), flags)
3161 	      && operand_equal_p (TREE_OPERAND (arg0, 1),
3162 				  TREE_OPERAND (arg1, 0), flags));
3163 
3164     case tcc_reference:
3165       /* If either of the pointer (or reference) expressions we are
3166 	 dereferencing contain a side effect, these cannot be equal,
3167 	 but their addresses can be.  */
3168       if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3169 	  && (TREE_SIDE_EFFECTS (arg0)
3170 	      || TREE_SIDE_EFFECTS (arg1)))
3171 	return 0;
3172 
3173       switch (TREE_CODE (arg0))
3174 	{
3175 	case INDIRECT_REF:
3176 	  if (!(flags & OEP_ADDRESS_OF)
3177 	      && (TYPE_ALIGN (TREE_TYPE (arg0))
3178 		  != TYPE_ALIGN (TREE_TYPE (arg1))))
3179 	    return 0;
3180 	  flags &= ~OEP_ADDRESS_OF;
3181 	  return OP_SAME (0);
3182 
3183 	case IMAGPART_EXPR:
3184 	  /* Require the same offset.  */
3185 	  if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3186 				TYPE_SIZE (TREE_TYPE (arg1)),
3187 				flags & ~OEP_ADDRESS_OF))
3188 	    return 0;
3189 
3190 	/* Fallthru.  */
3191 	case REALPART_EXPR:
3192 	case VIEW_CONVERT_EXPR:
3193 	  return OP_SAME (0);
3194 
3195 	case TARGET_MEM_REF:
3196 	case MEM_REF:
3197 	  if (!(flags & OEP_ADDRESS_OF))
3198 	    {
3199 	      /* Require equal access sizes */
3200 	      if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3201 		  && (!TYPE_SIZE (TREE_TYPE (arg0))
3202 		      || !TYPE_SIZE (TREE_TYPE (arg1))
3203 		      || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3204 					   TYPE_SIZE (TREE_TYPE (arg1)),
3205 					   flags)))
3206 		return 0;
3207 	      /* Verify that access happens in similar types.  */
3208 	      if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3209 		return 0;
3210 	      /* Verify that accesses are TBAA compatible.  */
3211 	      if (!alias_ptr_types_compatible_p
3212 		    (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3213 		     TREE_TYPE (TREE_OPERAND (arg1, 1)))
3214 		  || (MR_DEPENDENCE_CLIQUE (arg0)
3215 		      != MR_DEPENDENCE_CLIQUE (arg1))
3216 		  || (MR_DEPENDENCE_BASE (arg0)
3217 		      != MR_DEPENDENCE_BASE (arg1)))
3218 		return 0;
3219 	     /* Verify that alignment is compatible.  */
3220 	     if (TYPE_ALIGN (TREE_TYPE (arg0))
3221 		 != TYPE_ALIGN (TREE_TYPE (arg1)))
3222 		return 0;
3223 	    }
3224 	  flags &= ~OEP_ADDRESS_OF;
3225 	  return (OP_SAME (0) && OP_SAME (1)
3226 		  /* TARGET_MEM_REF require equal extra operands.  */
3227 		  && (TREE_CODE (arg0) != TARGET_MEM_REF
3228 		      || (OP_SAME_WITH_NULL (2)
3229 			  && OP_SAME_WITH_NULL (3)
3230 			  && OP_SAME_WITH_NULL (4))));
3231 
3232 	case ARRAY_REF:
3233 	case ARRAY_RANGE_REF:
3234 	  if (!OP_SAME (0))
3235 	    return 0;
3236 	  flags &= ~OEP_ADDRESS_OF;
3237 	  /* Compare the array index by value if it is constant first as we
3238 	     may have different types but same value here.  */
3239 	  return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3240 				       TREE_OPERAND (arg1, 1))
3241 		   || OP_SAME (1))
3242 		  && OP_SAME_WITH_NULL (2)
3243 		  && OP_SAME_WITH_NULL (3)
3244 		  /* Compare low bound and element size as with OEP_ADDRESS_OF
3245 		     we have to account for the offset of the ref.  */
3246 		  && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3247 		      == TREE_TYPE (TREE_OPERAND (arg1, 0))
3248 		      || (operand_equal_p (array_ref_low_bound
3249 					     (CONST_CAST_TREE (arg0)),
3250 					   array_ref_low_bound
3251 					     (CONST_CAST_TREE (arg1)), flags)
3252 			  && operand_equal_p (array_ref_element_size
3253 					        (CONST_CAST_TREE (arg0)),
3254 					      array_ref_element_size
3255 					        (CONST_CAST_TREE (arg1)),
3256 					      flags))));
3257 
3258 	case COMPONENT_REF:
3259 	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
3260 	     may be NULL when we're called to compare MEM_EXPRs.  */
3261 	  if (!OP_SAME_WITH_NULL (0)
3262 	      || !OP_SAME (1))
3263 	    return 0;
3264 	  flags &= ~OEP_ADDRESS_OF;
3265 	  return OP_SAME_WITH_NULL (2);
3266 
3267 	case BIT_FIELD_REF:
3268 	  if (!OP_SAME (0))
3269 	    return 0;
3270 	  flags &= ~OEP_ADDRESS_OF;
3271 	  return OP_SAME (1) && OP_SAME (2);
3272 
3273 	default:
3274 	  return 0;
3275 	}
3276 
3277     case tcc_expression:
3278       switch (TREE_CODE (arg0))
3279 	{
3280 	case ADDR_EXPR:
3281 	  /* Be sure we pass right ADDRESS_OF flag.  */
3282 	  gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3283 	  return operand_equal_p (TREE_OPERAND (arg0, 0),
3284 				  TREE_OPERAND (arg1, 0),
3285 				  flags | OEP_ADDRESS_OF);
3286 
3287 	case TRUTH_NOT_EXPR:
3288 	  return OP_SAME (0);
3289 
3290 	case TRUTH_ANDIF_EXPR:
3291 	case TRUTH_ORIF_EXPR:
3292 	  return OP_SAME (0) && OP_SAME (1);
3293 
3294 	case FMA_EXPR:
3295 	case WIDEN_MULT_PLUS_EXPR:
3296 	case WIDEN_MULT_MINUS_EXPR:
3297 	  if (!OP_SAME (2))
3298 	    return 0;
3299 	  /* The multiplcation operands are commutative.  */
3300 	  /* FALLTHRU */
3301 
3302 	case TRUTH_AND_EXPR:
3303 	case TRUTH_OR_EXPR:
3304 	case TRUTH_XOR_EXPR:
3305 	  if (OP_SAME (0) && OP_SAME (1))
3306 	    return 1;
3307 
3308 	  /* Otherwise take into account this is a commutative operation.  */
3309 	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
3310 				   TREE_OPERAND (arg1, 1), flags)
3311 		  && operand_equal_p (TREE_OPERAND (arg0, 1),
3312 				      TREE_OPERAND (arg1, 0), flags));
3313 
3314 	case COND_EXPR:
3315 	  if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3316 	    return 0;
3317 	  flags &= ~OEP_ADDRESS_OF;
3318 	  return OP_SAME (0);
3319 
3320 	case BIT_INSERT_EXPR:
3321 	  /* BIT_INSERT_EXPR has an implict operand as the type precision
3322 	     of op1.  Need to check to make sure they are the same.  */
3323 	  if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3324 	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3325 	      && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3326 		 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3327 	    return false;
3328 	  /* FALLTHRU */
3329 
3330 	case VEC_COND_EXPR:
3331 	case DOT_PROD_EXPR:
3332 	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3333 
3334 	case MODIFY_EXPR:
3335 	case INIT_EXPR:
3336 	case COMPOUND_EXPR:
3337 	case PREDECREMENT_EXPR:
3338 	case PREINCREMENT_EXPR:
3339 	case POSTDECREMENT_EXPR:
3340 	case POSTINCREMENT_EXPR:
3341 	  if (flags & OEP_LEXICOGRAPHIC)
3342 	    return OP_SAME (0) && OP_SAME (1);
3343 	  return 0;
3344 
3345 	case CLEANUP_POINT_EXPR:
3346 	case EXPR_STMT:
3347 	  if (flags & OEP_LEXICOGRAPHIC)
3348 	    return OP_SAME (0);
3349 	  return 0;
3350 
3351 	default:
3352 	  return 0;
3353 	}
3354 
3355     case tcc_vl_exp:
3356       switch (TREE_CODE (arg0))
3357 	{
3358 	case CALL_EXPR:
3359 	  if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3360 	      != (CALL_EXPR_FN (arg1) == NULL_TREE))
3361 	    /* If not both CALL_EXPRs are either internal or normal function
3362 	       functions, then they are not equal.  */
3363 	    return 0;
3364 	  else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3365 	    {
3366 	      /* If the CALL_EXPRs call different internal functions, then they
3367 		 are not equal.  */
3368 	      if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3369 		return 0;
3370 	    }
3371 	  else
3372 	    {
3373 	      /* If the CALL_EXPRs call different functions, then they are not
3374 		 equal.  */
3375 	      if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3376 				     flags))
3377 		return 0;
3378 	    }
3379 
3380 	  /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS.  */
3381 	  {
3382 	    unsigned int cef = call_expr_flags (arg0);
3383 	    if (flags & OEP_PURE_SAME)
3384 	      cef &= ECF_CONST | ECF_PURE;
3385 	    else
3386 	      cef &= ECF_CONST;
3387 	    if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3388 	      return 0;
3389 	  }
3390 
3391 	  /* Now see if all the arguments are the same.  */
3392 	  {
3393 	    const_call_expr_arg_iterator iter0, iter1;
3394 	    const_tree a0, a1;
3395 	    for (a0 = first_const_call_expr_arg (arg0, &iter0),
3396 		   a1 = first_const_call_expr_arg (arg1, &iter1);
3397 		 a0 && a1;
3398 		 a0 = next_const_call_expr_arg (&iter0),
3399 		   a1 = next_const_call_expr_arg (&iter1))
3400 	      if (! operand_equal_p (a0, a1, flags))
3401 		return 0;
3402 
3403 	    /* If we get here and both argument lists are exhausted
3404 	       then the CALL_EXPRs are equal.  */
3405 	    return ! (a0 || a1);
3406 	  }
3407 	default:
3408 	  return 0;
3409 	}
3410 
3411     case tcc_declaration:
3412       /* Consider __builtin_sqrt equal to sqrt.  */
3413       return (TREE_CODE (arg0) == FUNCTION_DECL
3414 	      && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3415 	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3416 	      && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3417 
3418     case tcc_exceptional:
3419       if (TREE_CODE (arg0) == CONSTRUCTOR)
3420 	{
3421 	  /* In GIMPLE constructors are used only to build vectors from
3422 	     elements.  Individual elements in the constructor must be
3423 	     indexed in increasing order and form an initial sequence.
3424 
3425 	     We make no effort to compare constructors in generic.
3426 	     (see sem_variable::equals in ipa-icf which can do so for
3427 	      constants).  */
3428 	  if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3429 	      || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3430 	    return 0;
3431 
3432 	  /* Be sure that vectors constructed have the same representation.
3433 	     We only tested element precision and modes to match.
3434 	     Vectors may be BLKmode and thus also check that the number of
3435 	     parts match.  */
3436 	  if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3437 			TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3438 	    return 0;
3439 
3440 	  vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3441 	  vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3442 	  unsigned int len = vec_safe_length (v0);
3443 
3444 	  if (len != vec_safe_length (v1))
3445 	    return 0;
3446 
3447 	  for (unsigned int i = 0; i < len; i++)
3448 	    {
3449 	      constructor_elt *c0 = &(*v0)[i];
3450 	      constructor_elt *c1 = &(*v1)[i];
3451 
3452 	      if (!operand_equal_p (c0->value, c1->value, flags)
3453 		  /* In GIMPLE the indexes can be either NULL or matching i.
3454 		     Double check this so we won't get false
3455 		     positives for GENERIC.  */
3456 		  || (c0->index
3457 		      && (TREE_CODE (c0->index) != INTEGER_CST
3458 			  || !compare_tree_int (c0->index, i)))
3459 		  || (c1->index
3460 		      && (TREE_CODE (c1->index) != INTEGER_CST
3461 			  || !compare_tree_int (c1->index, i))))
3462 		return 0;
3463 	    }
3464 	  return 1;
3465 	}
3466       else if (TREE_CODE (arg0) == STATEMENT_LIST
3467 	       && (flags & OEP_LEXICOGRAPHIC))
3468 	{
3469 	  /* Compare the STATEMENT_LISTs.  */
3470 	  tree_stmt_iterator tsi1, tsi2;
3471 	  tree body1 = CONST_CAST_TREE (arg0);
3472 	  tree body2 = CONST_CAST_TREE (arg1);
3473 	  for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3474 	       tsi_next (&tsi1), tsi_next (&tsi2))
3475 	    {
3476 	      /* The lists don't have the same number of statements.  */
3477 	      if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3478 		return 0;
3479 	      if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3480 		return 1;
3481 	      if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3482 				    flags & (OEP_LEXICOGRAPHIC
3483 					     | OEP_NO_HASH_CHECK)))
3484 		return 0;
3485 	    }
3486 	}
3487       return 0;
3488 
3489     case tcc_statement:
3490       switch (TREE_CODE (arg0))
3491 	{
3492 	case RETURN_EXPR:
3493 	  if (flags & OEP_LEXICOGRAPHIC)
3494 	    return OP_SAME_WITH_NULL (0);
3495 	  return 0;
3496 	case DEBUG_BEGIN_STMT:
3497 	  if (flags & OEP_LEXICOGRAPHIC)
3498 	    return 1;
3499 	  return 0;
3500 	default:
3501 	  return 0;
3502 	 }
3503 
3504     default:
3505       return 0;
3506     }
3507 
3508 #undef OP_SAME
3509 #undef OP_SAME_WITH_NULL
3510 }
3511 
3512 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3513    with a different signedness or a narrower precision.  */
3514 
3515 static bool
3516 operand_equal_for_comparison_p (tree arg0, tree arg1)
3517 {
3518   if (operand_equal_p (arg0, arg1, 0))
3519     return true;
3520 
3521   if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3522       || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3523     return false;
3524 
3525   /* Discard any conversions that don't change the modes of ARG0 and ARG1
3526      and see if the inner values are the same.  This removes any
3527      signedness comparison, which doesn't matter here.  */
3528   tree op0 = arg0;
3529   tree op1 = arg1;
3530   STRIP_NOPS (op0);
3531   STRIP_NOPS (op1);
3532   if (operand_equal_p (op0, op1, 0))
3533     return true;
3534 
3535   /* Discard a single widening conversion from ARG1 and see if the inner
3536      value is the same as ARG0.  */
3537   if (CONVERT_EXPR_P (arg1)
3538       && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3539       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3540          < TYPE_PRECISION (TREE_TYPE (arg1))
3541       && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3542     return true;
3543 
3544   return false;
3545 }
3546 
3547 /* See if ARG is an expression that is either a comparison or is performing
3548    arithmetic on comparisons.  The comparisons must only be comparing
3549    two different values, which will be stored in *CVAL1 and *CVAL2; if
3550    they are nonzero it means that some operands have already been found.
3551    No variables may be used anywhere else in the expression except in the
3552    comparisons.
3553 
3554    If this is true, return 1.  Otherwise, return zero.  */
3555 
3556 static int
3557 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3558 {
3559   enum tree_code code = TREE_CODE (arg);
3560   enum tree_code_class tclass = TREE_CODE_CLASS (code);
3561 
3562   /* We can handle some of the tcc_expression cases here.  */
3563   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3564     tclass = tcc_unary;
3565   else if (tclass == tcc_expression
3566 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3567 	       || code == COMPOUND_EXPR))
3568     tclass = tcc_binary;
3569 
3570   switch (tclass)
3571     {
3572     case tcc_unary:
3573       return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3574 
3575     case tcc_binary:
3576       return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3577 	      && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
3578 
3579     case tcc_constant:
3580       return 1;
3581 
3582     case tcc_expression:
3583       if (code == COND_EXPR)
3584 	return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3585 		&& twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
3586 		&& twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
3587       return 0;
3588 
3589     case tcc_comparison:
3590       /* First see if we can handle the first operand, then the second.  For
3591 	 the second operand, we know *CVAL1 can't be zero.  It must be that
3592 	 one side of the comparison is each of the values; test for the
3593 	 case where this isn't true by failing if the two operands
3594 	 are the same.  */
3595 
3596       if (operand_equal_p (TREE_OPERAND (arg, 0),
3597 			   TREE_OPERAND (arg, 1), 0))
3598 	return 0;
3599 
3600       if (*cval1 == 0)
3601 	*cval1 = TREE_OPERAND (arg, 0);
3602       else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3603 	;
3604       else if (*cval2 == 0)
3605 	*cval2 = TREE_OPERAND (arg, 0);
3606       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3607 	;
3608       else
3609 	return 0;
3610 
3611       if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3612 	;
3613       else if (*cval2 == 0)
3614 	*cval2 = TREE_OPERAND (arg, 1);
3615       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3616 	;
3617       else
3618 	return 0;
3619 
3620       return 1;
3621 
3622     default:
3623       return 0;
3624     }
3625 }
3626 
3627 /* ARG is a tree that is known to contain just arithmetic operations and
3628    comparisons.  Evaluate the operations in the tree substituting NEW0 for
3629    any occurrence of OLD0 as an operand of a comparison and likewise for
3630    NEW1 and OLD1.  */
3631 
3632 static tree
3633 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3634 	    tree old1, tree new1)
3635 {
3636   tree type = TREE_TYPE (arg);
3637   enum tree_code code = TREE_CODE (arg);
3638   enum tree_code_class tclass = TREE_CODE_CLASS (code);
3639 
3640   /* We can handle some of the tcc_expression cases here.  */
3641   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3642     tclass = tcc_unary;
3643   else if (tclass == tcc_expression
3644 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3645     tclass = tcc_binary;
3646 
3647   switch (tclass)
3648     {
3649     case tcc_unary:
3650       return fold_build1_loc (loc, code, type,
3651 			  eval_subst (loc, TREE_OPERAND (arg, 0),
3652 				      old0, new0, old1, new1));
3653 
3654     case tcc_binary:
3655       return fold_build2_loc (loc, code, type,
3656 			  eval_subst (loc, TREE_OPERAND (arg, 0),
3657 				      old0, new0, old1, new1),
3658 			  eval_subst (loc, TREE_OPERAND (arg, 1),
3659 				      old0, new0, old1, new1));
3660 
3661     case tcc_expression:
3662       switch (code)
3663 	{
3664 	case SAVE_EXPR:
3665 	  return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3666 			     old1, new1);
3667 
3668 	case COMPOUND_EXPR:
3669 	  return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3670 			     old1, new1);
3671 
3672 	case COND_EXPR:
3673 	  return fold_build3_loc (loc, code, type,
3674 			      eval_subst (loc, TREE_OPERAND (arg, 0),
3675 					  old0, new0, old1, new1),
3676 			      eval_subst (loc, TREE_OPERAND (arg, 1),
3677 					  old0, new0, old1, new1),
3678 			      eval_subst (loc, TREE_OPERAND (arg, 2),
3679 					  old0, new0, old1, new1));
3680 	default:
3681 	  break;
3682 	}
3683       /* Fall through - ???  */
3684 
3685     case tcc_comparison:
3686       {
3687 	tree arg0 = TREE_OPERAND (arg, 0);
3688 	tree arg1 = TREE_OPERAND (arg, 1);
3689 
3690 	/* We need to check both for exact equality and tree equality.  The
3691 	   former will be true if the operand has a side-effect.  In that
3692 	   case, we know the operand occurred exactly once.  */
3693 
3694 	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3695 	  arg0 = new0;
3696 	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3697 	  arg0 = new1;
3698 
3699 	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3700 	  arg1 = new0;
3701 	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3702 	  arg1 = new1;
3703 
3704 	return fold_build2_loc (loc, code, type, arg0, arg1);
3705       }
3706 
3707     default:
3708       return arg;
3709     }
3710 }
3711 
3712 /* Return a tree for the case when the result of an expression is RESULT
3713    converted to TYPE and OMITTED was previously an operand of the expression
3714    but is now not needed (e.g., we folded OMITTED * 0).
3715 
3716    If OMITTED has side effects, we must evaluate it.  Otherwise, just do
3717    the conversion of RESULT to TYPE.  */
3718 
3719 tree
3720 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3721 {
3722   tree t = fold_convert_loc (loc, type, result);
3723 
3724   /* If the resulting operand is an empty statement, just return the omitted
3725      statement casted to void. */
3726   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3727     return build1_loc (loc, NOP_EXPR, void_type_node,
3728 		       fold_ignored_result (omitted));
3729 
3730   if (TREE_SIDE_EFFECTS (omitted))
3731     return build2_loc (loc, COMPOUND_EXPR, type,
3732 		       fold_ignored_result (omitted), t);
3733 
3734   return non_lvalue_loc (loc, t);
3735 }
3736 
3737 /* Return a tree for the case when the result of an expression is RESULT
3738    converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3739    of the expression but are now not needed.
3740 
3741    If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3742    If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3743    evaluated before OMITTED2.  Otherwise, if neither has side effects,
3744    just do the conversion of RESULT to TYPE.  */
3745 
3746 tree
3747 omit_two_operands_loc (location_t loc, tree type, tree result,
3748 		       tree omitted1, tree omitted2)
3749 {
3750   tree t = fold_convert_loc (loc, type, result);
3751 
3752   if (TREE_SIDE_EFFECTS (omitted2))
3753     t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3754   if (TREE_SIDE_EFFECTS (omitted1))
3755     t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3756 
3757   return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3758 }
3759 
3760 
3761 /* Return a simplified tree node for the truth-negation of ARG.  This
3762    never alters ARG itself.  We assume that ARG is an operation that
3763    returns a truth value (0 or 1).
3764 
3765    FIXME: one would think we would fold the result, but it causes
3766    problems with the dominator optimizer.  */
3767 
3768 static tree
3769 fold_truth_not_expr (location_t loc, tree arg)
3770 {
3771   tree type = TREE_TYPE (arg);
3772   enum tree_code code = TREE_CODE (arg);
3773   location_t loc1, loc2;
3774 
3775   /* If this is a comparison, we can simply invert it, except for
3776      floating-point non-equality comparisons, in which case we just
3777      enclose a TRUTH_NOT_EXPR around what we have.  */
3778 
3779   if (TREE_CODE_CLASS (code) == tcc_comparison)
3780     {
3781       tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3782       if (FLOAT_TYPE_P (op_type)
3783 	  && flag_trapping_math
3784 	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
3785 	  && code != NE_EXPR && code != EQ_EXPR)
3786 	return NULL_TREE;
3787 
3788       code = invert_tree_comparison (code, HONOR_NANS (op_type));
3789       if (code == ERROR_MARK)
3790 	return NULL_TREE;
3791 
3792       tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3793 			     TREE_OPERAND (arg, 1));
3794       if (TREE_NO_WARNING (arg))
3795 	TREE_NO_WARNING (ret) = 1;
3796       return ret;
3797     }
3798 
3799   switch (code)
3800     {
3801     case INTEGER_CST:
3802       return constant_boolean_node (integer_zerop (arg), type);
3803 
3804     case TRUTH_AND_EXPR:
3805       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3806       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3807       return build2_loc (loc, TRUTH_OR_EXPR, type,
3808 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3809 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3810 
3811     case TRUTH_OR_EXPR:
3812       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3813       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3814       return build2_loc (loc, TRUTH_AND_EXPR, type,
3815 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3816 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3817 
3818     case TRUTH_XOR_EXPR:
3819       /* Here we can invert either operand.  We invert the first operand
3820 	 unless the second operand is a TRUTH_NOT_EXPR in which case our
3821 	 result is the XOR of the first operand with the inside of the
3822 	 negation of the second operand.  */
3823 
3824       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3825 	return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3826 			   TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3827       else
3828 	return build2_loc (loc, TRUTH_XOR_EXPR, type,
3829 			   invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3830 			   TREE_OPERAND (arg, 1));
3831 
3832     case TRUTH_ANDIF_EXPR:
3833       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3834       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3835       return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3836 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3837 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3838 
3839     case TRUTH_ORIF_EXPR:
3840       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3841       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3842       return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3843 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3844 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3845 
3846     case TRUTH_NOT_EXPR:
3847       return TREE_OPERAND (arg, 0);
3848 
3849     case COND_EXPR:
3850       {
3851 	tree arg1 = TREE_OPERAND (arg, 1);
3852 	tree arg2 = TREE_OPERAND (arg, 2);
3853 
3854 	loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3855 	loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3856 
3857 	/* A COND_EXPR may have a throw as one operand, which
3858 	   then has void type.  Just leave void operands
3859 	   as they are.  */
3860 	return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3861 			   VOID_TYPE_P (TREE_TYPE (arg1))
3862 			   ? arg1 : invert_truthvalue_loc (loc1, arg1),
3863 			   VOID_TYPE_P (TREE_TYPE (arg2))
3864 			   ? arg2 : invert_truthvalue_loc (loc2, arg2));
3865       }
3866 
3867     case COMPOUND_EXPR:
3868       loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3869       return build2_loc (loc, COMPOUND_EXPR, type,
3870 			 TREE_OPERAND (arg, 0),
3871 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3872 
3873     case NON_LVALUE_EXPR:
3874       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3875       return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3876 
3877     CASE_CONVERT:
3878       if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3879 	return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3880 
3881       /* fall through */
3882 
3883     case FLOAT_EXPR:
3884       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3885       return build1_loc (loc, TREE_CODE (arg), type,
3886 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3887 
3888     case BIT_AND_EXPR:
3889       if (!integer_onep (TREE_OPERAND (arg, 1)))
3890 	return NULL_TREE;
3891       return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3892 
3893     case SAVE_EXPR:
3894       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3895 
3896     case CLEANUP_POINT_EXPR:
3897       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3898       return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3899 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3900 
3901     default:
3902       return NULL_TREE;
3903     }
3904 }
3905 
3906 /* Fold the truth-negation of ARG.  This never alters ARG itself.  We
3907    assume that ARG is an operation that returns a truth value (0 or 1
3908    for scalars, 0 or -1 for vectors).  Return the folded expression if
3909    folding is successful.  Otherwise, return NULL_TREE.  */
3910 
3911 static tree
3912 fold_invert_truthvalue (location_t loc, tree arg)
3913 {
3914   tree type = TREE_TYPE (arg);
3915   return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3916 			      ? BIT_NOT_EXPR
3917 			      : TRUTH_NOT_EXPR,
3918 			 type, arg);
3919 }
3920 
3921 /* Return a simplified tree node for the truth-negation of ARG.  This
3922    never alters ARG itself.  We assume that ARG is an operation that
3923    returns a truth value (0 or 1 for scalars, 0 or -1 for vectors).  */
3924 
3925 tree
3926 invert_truthvalue_loc (location_t loc, tree arg)
3927 {
3928   if (TREE_CODE (arg) == ERROR_MARK)
3929     return arg;
3930 
3931   tree type = TREE_TYPE (arg);
3932   return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3933 			       ? BIT_NOT_EXPR
3934 			       : TRUTH_NOT_EXPR,
3935 			  type, arg);
3936 }
3937 
3938 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3939    starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero
3940    and uses reverse storage order if REVERSEP is nonzero.  ORIG_INNER
3941    is the original memory reference used to preserve the alias set of
3942    the access.  */
3943 
3944 static tree
3945 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
3946 		    HOST_WIDE_INT bitsize, poly_int64 bitpos,
3947 		    int unsignedp, int reversep)
3948 {
3949   tree result, bftype;
3950 
3951   /* Attempt not to lose the access path if possible.  */
3952   if (TREE_CODE (orig_inner) == COMPONENT_REF)
3953     {
3954       tree ninner = TREE_OPERAND (orig_inner, 0);
3955       machine_mode nmode;
3956       poly_int64 nbitsize, nbitpos;
3957       tree noffset;
3958       int nunsignedp, nreversep, nvolatilep = 0;
3959       tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
3960 				       &noffset, &nmode, &nunsignedp,
3961 				       &nreversep, &nvolatilep);
3962       if (base == inner
3963 	  && noffset == NULL_TREE
3964 	  && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
3965 	  && !reversep
3966 	  && !nreversep
3967 	  && !nvolatilep)
3968 	{
3969 	  inner = ninner;
3970 	  bitpos -= nbitpos;
3971 	}
3972     }
3973 
3974   alias_set_type iset = get_alias_set (orig_inner);
3975   if (iset == 0 && get_alias_set (inner) != iset)
3976     inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
3977 			 build_fold_addr_expr (inner),
3978 			 build_int_cst (ptr_type_node, 0));
3979 
3980   if (known_eq (bitpos, 0) && !reversep)
3981     {
3982       tree size = TYPE_SIZE (TREE_TYPE (inner));
3983       if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3984 	   || POINTER_TYPE_P (TREE_TYPE (inner)))
3985 	  && tree_fits_shwi_p (size)
3986 	  && tree_to_shwi (size) == bitsize)
3987 	return fold_convert_loc (loc, type, inner);
3988     }
3989 
3990   bftype = type;
3991   if (TYPE_PRECISION (bftype) != bitsize
3992       || TYPE_UNSIGNED (bftype) == !unsignedp)
3993     bftype = build_nonstandard_integer_type (bitsize, 0);
3994 
3995   result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3996 		       bitsize_int (bitsize), bitsize_int (bitpos));
3997   REF_REVERSE_STORAGE_ORDER (result) = reversep;
3998 
3999   if (bftype != type)
4000     result = fold_convert_loc (loc, type, result);
4001 
4002   return result;
4003 }
4004 
4005 /* Optimize a bit-field compare.
4006 
4007    There are two cases:  First is a compare against a constant and the
4008    second is a comparison of two items where the fields are at the same
4009    bit position relative to the start of a chunk (byte, halfword, word)
4010    large enough to contain it.  In these cases we can avoid the shift
4011    implicit in bitfield extractions.
4012 
4013    For constants, we emit a compare of the shifted constant with the
4014    BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4015    compared.  For two fields at the same position, we do the ANDs with the
4016    similar mask and compare the result of the ANDs.
4017 
4018    CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4019    COMPARE_TYPE is the type of the comparison, and LHS and RHS
4020    are the left and right operands of the comparison, respectively.
4021 
4022    If the optimization described above can be done, we return the resulting
4023    tree.  Otherwise we return zero.  */
4024 
4025 static tree
4026 optimize_bit_field_compare (location_t loc, enum tree_code code,
4027 			    tree compare_type, tree lhs, tree rhs)
4028 {
4029   poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4030   HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4031   tree type = TREE_TYPE (lhs);
4032   tree unsigned_type;
4033   int const_p = TREE_CODE (rhs) == INTEGER_CST;
4034   machine_mode lmode, rmode;
4035   scalar_int_mode nmode;
4036   int lunsignedp, runsignedp;
4037   int lreversep, rreversep;
4038   int lvolatilep = 0, rvolatilep = 0;
4039   tree linner, rinner = NULL_TREE;
4040   tree mask;
4041   tree offset;
4042 
4043   /* Get all the information about the extractions being done.  If the bit size
4044      is the same as the size of the underlying object, we aren't doing an
4045      extraction at all and so can do nothing.  We also don't want to
4046      do anything if the inner expression is a PLACEHOLDER_EXPR since we
4047      then will no longer be able to replace it.  */
4048   linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4049 				&lunsignedp, &lreversep, &lvolatilep);
4050   if (linner == lhs
4051       || !known_size_p (plbitsize)
4052       || !plbitsize.is_constant (&lbitsize)
4053       || !plbitpos.is_constant (&lbitpos)
4054       || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4055       || offset != 0
4056       || TREE_CODE (linner) == PLACEHOLDER_EXPR
4057       || lvolatilep)
4058     return 0;
4059 
4060   if (const_p)
4061     rreversep = lreversep;
4062   else
4063    {
4064      /* If this is not a constant, we can only do something if bit positions,
4065 	sizes, signedness and storage order are the same.  */
4066      rinner
4067        = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4068 			      &runsignedp, &rreversep, &rvolatilep);
4069 
4070      if (rinner == rhs
4071 	 || maybe_ne (lbitpos, rbitpos)
4072 	 || maybe_ne (lbitsize, rbitsize)
4073 	 || lunsignedp != runsignedp
4074 	 || lreversep != rreversep
4075 	 || offset != 0
4076 	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4077 	 || rvolatilep)
4078        return 0;
4079    }
4080 
4081   /* Honor the C++ memory model and mimic what RTL expansion does.  */
4082   poly_uint64 bitstart = 0;
4083   poly_uint64 bitend = 0;
4084   if (TREE_CODE (lhs) == COMPONENT_REF)
4085     {
4086       get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4087       if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4088 	return 0;
4089     }
4090 
4091   /* See if we can find a mode to refer to this field.  We should be able to,
4092      but fail if we can't.  */
4093   if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4094 		      const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4095 		      : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4096 			     TYPE_ALIGN (TREE_TYPE (rinner))),
4097 		      BITS_PER_WORD, false, &nmode))
4098     return 0;
4099 
4100   /* Set signed and unsigned types of the precision of this mode for the
4101      shifts below.  */
4102   unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4103 
4104   /* Compute the bit position and size for the new reference and our offset
4105      within it. If the new reference is the same size as the original, we
4106      won't optimize anything, so return zero.  */
4107   nbitsize = GET_MODE_BITSIZE (nmode);
4108   nbitpos = lbitpos & ~ (nbitsize - 1);
4109   lbitpos -= nbitpos;
4110   if (nbitsize == lbitsize)
4111     return 0;
4112 
4113   if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4114     lbitpos = nbitsize - lbitsize - lbitpos;
4115 
4116   /* Make the mask to be used against the extracted field.  */
4117   mask = build_int_cst_type (unsigned_type, -1);
4118   mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4119   mask = const_binop (RSHIFT_EXPR, mask,
4120 		      size_int (nbitsize - lbitsize - lbitpos));
4121 
4122   if (! const_p)
4123     {
4124       if (nbitpos < 0)
4125 	return 0;
4126 
4127       /* If not comparing with constant, just rework the comparison
4128 	 and return.  */
4129       tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4130 				    nbitsize, nbitpos, 1, lreversep);
4131       t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4132       tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4133 				    nbitsize, nbitpos, 1, rreversep);
4134       t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4135       return fold_build2_loc (loc, code, compare_type, t1, t2);
4136     }
4137 
4138   /* Otherwise, we are handling the constant case.  See if the constant is too
4139      big for the field.  Warn and return a tree for 0 (false) if so.  We do
4140      this not only for its own sake, but to avoid having to test for this
4141      error case below.  If we didn't, we might generate wrong code.
4142 
4143      For unsigned fields, the constant shifted right by the field length should
4144      be all zero.  For signed fields, the high-order bits should agree with
4145      the sign bit.  */
4146 
4147   if (lunsignedp)
4148     {
4149       if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4150 	{
4151 	  warning (0, "comparison is always %d due to width of bit-field",
4152 		   code == NE_EXPR);
4153 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4154 	}
4155     }
4156   else
4157     {
4158       wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4159       if (tem != 0 && tem != -1)
4160 	{
4161 	  warning (0, "comparison is always %d due to width of bit-field",
4162 		   code == NE_EXPR);
4163 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4164 	}
4165     }
4166 
4167   if (nbitpos < 0)
4168     return 0;
4169 
4170   /* Single-bit compares should always be against zero.  */
4171   if (lbitsize == 1 && ! integer_zerop (rhs))
4172     {
4173       code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4174       rhs = build_int_cst (type, 0);
4175     }
4176 
4177   /* Make a new bitfield reference, shift the constant over the
4178      appropriate number of bits and mask it with the computed mask
4179      (in case this was a signed field).  If we changed it, make a new one.  */
4180   lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4181 			    nbitsize, nbitpos, 1, lreversep);
4182 
4183   rhs = const_binop (BIT_AND_EXPR,
4184 		     const_binop (LSHIFT_EXPR,
4185 				  fold_convert_loc (loc, unsigned_type, rhs),
4186 				  size_int (lbitpos)),
4187 		     mask);
4188 
4189   lhs = build2_loc (loc, code, compare_type,
4190 		    build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4191   return lhs;
4192 }
4193 
4194 /* Subroutine for fold_truth_andor_1: decode a field reference.
4195 
4196    If EXP is a comparison reference, we return the innermost reference.
4197 
4198    *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4199    set to the starting bit number.
4200 
4201    If the innermost field can be completely contained in a mode-sized
4202    unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
4203 
4204    *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4205    otherwise it is not changed.
4206 
4207    *PUNSIGNEDP is set to the signedness of the field.
4208 
4209    *PREVERSEP is set to the storage order of the field.
4210 
4211    *PMASK is set to the mask used.  This is either contained in a
4212    BIT_AND_EXPR or derived from the width of the field.
4213 
4214    *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4215 
4216    Return 0 if this is not a component reference or is one that we can't
4217    do anything with.  */
4218 
4219 static tree
4220 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4221 			HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4222 			int *punsignedp, int *preversep, int *pvolatilep,
4223 			tree *pmask, tree *pand_mask)
4224 {
4225   tree exp = *exp_;
4226   tree outer_type = 0;
4227   tree and_mask = 0;
4228   tree mask, inner, offset;
4229   tree unsigned_type;
4230   unsigned int precision;
4231 
4232   /* All the optimizations using this function assume integer fields.
4233      There are problems with FP fields since the type_for_size call
4234      below can fail for, e.g., XFmode.  */
4235   if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4236     return 0;
4237 
4238   /* We are interested in the bare arrangement of bits, so strip everything
4239      that doesn't affect the machine mode.  However, record the type of the
4240      outermost expression if it may matter below.  */
4241   if (CONVERT_EXPR_P (exp)
4242       || TREE_CODE (exp) == NON_LVALUE_EXPR)
4243     outer_type = TREE_TYPE (exp);
4244   STRIP_NOPS (exp);
4245 
4246   if (TREE_CODE (exp) == BIT_AND_EXPR)
4247     {
4248       and_mask = TREE_OPERAND (exp, 1);
4249       exp = TREE_OPERAND (exp, 0);
4250       STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4251       if (TREE_CODE (and_mask) != INTEGER_CST)
4252 	return 0;
4253     }
4254 
4255   poly_int64 poly_bitsize, poly_bitpos;
4256   inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4257 			       pmode, punsignedp, preversep, pvolatilep);
4258   if ((inner == exp && and_mask == 0)
4259       || !poly_bitsize.is_constant (pbitsize)
4260       || !poly_bitpos.is_constant (pbitpos)
4261       || *pbitsize < 0
4262       || offset != 0
4263       || TREE_CODE (inner) == PLACEHOLDER_EXPR
4264       /* Reject out-of-bound accesses (PR79731).  */
4265       || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4266 	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4267 			       *pbitpos + *pbitsize) < 0))
4268     return 0;
4269 
4270   *exp_ = exp;
4271 
4272   /* If the number of bits in the reference is the same as the bitsize of
4273      the outer type, then the outer type gives the signedness. Otherwise
4274      (in case of a small bitfield) the signedness is unchanged.  */
4275   if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4276     *punsignedp = TYPE_UNSIGNED (outer_type);
4277 
4278   /* Compute the mask to access the bitfield.  */
4279   unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4280   precision = TYPE_PRECISION (unsigned_type);
4281 
4282   mask = build_int_cst_type (unsigned_type, -1);
4283 
4284   mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4285   mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4286 
4287   /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
4288   if (and_mask != 0)
4289     mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4290 			fold_convert_loc (loc, unsigned_type, and_mask), mask);
4291 
4292   *pmask = mask;
4293   *pand_mask = and_mask;
4294   return inner;
4295 }
4296 
4297 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4298    bit positions and MASK is SIGNED.  */
4299 
4300 static int
4301 all_ones_mask_p (const_tree mask, unsigned int size)
4302 {
4303   tree type = TREE_TYPE (mask);
4304   unsigned int precision = TYPE_PRECISION (type);
4305 
4306   /* If this function returns true when the type of the mask is
4307      UNSIGNED, then there will be errors.  In particular see
4308      gcc.c-torture/execute/990326-1.c.  There does not appear to be
4309      any documentation paper trail as to why this is so.  But the pre
4310      wide-int worked with that restriction and it has been preserved
4311      here.  */
4312   if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4313     return false;
4314 
4315   return wi::mask (size, false, precision) == wi::to_wide (mask);
4316 }
4317 
4318 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4319    represents the sign bit of EXP's type.  If EXP represents a sign
4320    or zero extension, also test VAL against the unextended type.
4321    The return value is the (sub)expression whose sign bit is VAL,
4322    or NULL_TREE otherwise.  */
4323 
4324 tree
4325 sign_bit_p (tree exp, const_tree val)
4326 {
4327   int width;
4328   tree t;
4329 
4330   /* Tree EXP must have an integral type.  */
4331   t = TREE_TYPE (exp);
4332   if (! INTEGRAL_TYPE_P (t))
4333     return NULL_TREE;
4334 
4335   /* Tree VAL must be an integer constant.  */
4336   if (TREE_CODE (val) != INTEGER_CST
4337       || TREE_OVERFLOW (val))
4338     return NULL_TREE;
4339 
4340   width = TYPE_PRECISION (t);
4341   if (wi::only_sign_bit_p (wi::to_wide (val), width))
4342     return exp;
4343 
4344   /* Handle extension from a narrower type.  */
4345   if (TREE_CODE (exp) == NOP_EXPR
4346       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4347     return sign_bit_p (TREE_OPERAND (exp, 0), val);
4348 
4349   return NULL_TREE;
4350 }
4351 
4352 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4353    to be evaluated unconditionally.  */
4354 
4355 static int
4356 simple_operand_p (const_tree exp)
4357 {
4358   /* Strip any conversions that don't change the machine mode.  */
4359   STRIP_NOPS (exp);
4360 
4361   return (CONSTANT_CLASS_P (exp)
4362   	  || TREE_CODE (exp) == SSA_NAME
4363 	  || (DECL_P (exp)
4364 	      && ! TREE_ADDRESSABLE (exp)
4365 	      && ! TREE_THIS_VOLATILE (exp)
4366 	      && ! DECL_NONLOCAL (exp)
4367 	      /* Don't regard global variables as simple.  They may be
4368 		 allocated in ways unknown to the compiler (shared memory,
4369 		 #pragma weak, etc).  */
4370 	      && ! TREE_PUBLIC (exp)
4371 	      && ! DECL_EXTERNAL (exp)
4372 	      /* Weakrefs are not safe to be read, since they can be NULL.
4373  		 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4374 		 have DECL_WEAK flag set.  */
4375 	      && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4376 	      /* Loading a static variable is unduly expensive, but global
4377 		 registers aren't expensive.  */
4378 	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4379 }
4380 
4381 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4382    to be evaluated unconditionally.
4383    I addition to simple_operand_p, we assume that comparisons, conversions,
4384    and logic-not operations are simple, if their operands are simple, too.  */
4385 
4386 static bool
4387 simple_operand_p_2 (tree exp)
4388 {
4389   enum tree_code code;
4390 
4391   if (TREE_SIDE_EFFECTS (exp)
4392       || tree_could_trap_p (exp))
4393     return false;
4394 
4395   while (CONVERT_EXPR_P (exp))
4396     exp = TREE_OPERAND (exp, 0);
4397 
4398   code = TREE_CODE (exp);
4399 
4400   if (TREE_CODE_CLASS (code) == tcc_comparison)
4401     return (simple_operand_p (TREE_OPERAND (exp, 0))
4402 	    && simple_operand_p (TREE_OPERAND (exp, 1)));
4403 
4404   if (code == TRUTH_NOT_EXPR)
4405       return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4406 
4407   return simple_operand_p (exp);
4408 }
4409 
4410 
4411 /* The following functions are subroutines to fold_range_test and allow it to
4412    try to change a logical combination of comparisons into a range test.
4413 
4414    For example, both
4415 	X == 2 || X == 3 || X == 4 || X == 5
4416    and
4417 	X >= 2 && X <= 5
4418    are converted to
4419 	(unsigned) (X - 2) <= 3
4420 
4421    We describe each set of comparisons as being either inside or outside
4422    a range, using a variable named like IN_P, and then describe the
4423    range with a lower and upper bound.  If one of the bounds is omitted,
4424    it represents either the highest or lowest value of the type.
4425 
4426    In the comments below, we represent a range by two numbers in brackets
4427    preceded by a "+" to designate being inside that range, or a "-" to
4428    designate being outside that range, so the condition can be inverted by
4429    flipping the prefix.  An omitted bound is represented by a "-".  For
4430    example, "- [-, 10]" means being outside the range starting at the lowest
4431    possible value and ending at 10, in other words, being greater than 10.
4432    The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4433    always false.
4434 
4435    We set up things so that the missing bounds are handled in a consistent
4436    manner so neither a missing bound nor "true" and "false" need to be
4437    handled using a special case.  */
4438 
4439 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4440    of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4441    and UPPER1_P are nonzero if the respective argument is an upper bound
4442    and zero for a lower.  TYPE, if nonzero, is the type of the result; it
4443    must be specified for a comparison.  ARG1 will be converted to ARG0's
4444    type if both are specified.  */
4445 
4446 static tree
4447 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4448 	     tree arg1, int upper1_p)
4449 {
4450   tree tem;
4451   int result;
4452   int sgn0, sgn1;
4453 
4454   /* If neither arg represents infinity, do the normal operation.
4455      Else, if not a comparison, return infinity.  Else handle the special
4456      comparison rules. Note that most of the cases below won't occur, but
4457      are handled for consistency.  */
4458 
4459   if (arg0 != 0 && arg1 != 0)
4460     {
4461       tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4462 			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4463       STRIP_NOPS (tem);
4464       return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4465     }
4466 
4467   if (TREE_CODE_CLASS (code) != tcc_comparison)
4468     return 0;
4469 
4470   /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4471      for neither.  In real maths, we cannot assume open ended ranges are
4472      the same. But, this is computer arithmetic, where numbers are finite.
4473      We can therefore make the transformation of any unbounded range with
4474      the value Z, Z being greater than any representable number. This permits
4475      us to treat unbounded ranges as equal.  */
4476   sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4477   sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4478   switch (code)
4479     {
4480     case EQ_EXPR:
4481       result = sgn0 == sgn1;
4482       break;
4483     case NE_EXPR:
4484       result = sgn0 != sgn1;
4485       break;
4486     case LT_EXPR:
4487       result = sgn0 < sgn1;
4488       break;
4489     case LE_EXPR:
4490       result = sgn0 <= sgn1;
4491       break;
4492     case GT_EXPR:
4493       result = sgn0 > sgn1;
4494       break;
4495     case GE_EXPR:
4496       result = sgn0 >= sgn1;
4497       break;
4498     default:
4499       gcc_unreachable ();
4500     }
4501 
4502   return constant_boolean_node (result, type);
4503 }
4504 
4505 /* Helper routine for make_range.  Perform one step for it, return
4506    new expression if the loop should continue or NULL_TREE if it should
4507    stop.  */
4508 
4509 tree
4510 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4511 		 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4512 		 bool *strict_overflow_p)
4513 {
4514   tree arg0_type = TREE_TYPE (arg0);
4515   tree n_low, n_high, low = *p_low, high = *p_high;
4516   int in_p = *p_in_p, n_in_p;
4517 
4518   switch (code)
4519     {
4520     case TRUTH_NOT_EXPR:
4521       /* We can only do something if the range is testing for zero.  */
4522       if (low == NULL_TREE || high == NULL_TREE
4523 	  || ! integer_zerop (low) || ! integer_zerop (high))
4524 	return NULL_TREE;
4525       *p_in_p = ! in_p;
4526       return arg0;
4527 
4528     case EQ_EXPR: case NE_EXPR:
4529     case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4530       /* We can only do something if the range is testing for zero
4531 	 and if the second operand is an integer constant.  Note that
4532 	 saying something is "in" the range we make is done by
4533 	 complementing IN_P since it will set in the initial case of
4534 	 being not equal to zero; "out" is leaving it alone.  */
4535       if (low == NULL_TREE || high == NULL_TREE
4536 	  || ! integer_zerop (low) || ! integer_zerop (high)
4537 	  || TREE_CODE (arg1) != INTEGER_CST)
4538 	return NULL_TREE;
4539 
4540       switch (code)
4541 	{
4542 	case NE_EXPR:  /* - [c, c]  */
4543 	  low = high = arg1;
4544 	  break;
4545 	case EQ_EXPR:  /* + [c, c]  */
4546 	  in_p = ! in_p, low = high = arg1;
4547 	  break;
4548 	case GT_EXPR:  /* - [-, c] */
4549 	  low = 0, high = arg1;
4550 	  break;
4551 	case GE_EXPR:  /* + [c, -] */
4552 	  in_p = ! in_p, low = arg1, high = 0;
4553 	  break;
4554 	case LT_EXPR:  /* - [c, -] */
4555 	  low = arg1, high = 0;
4556 	  break;
4557 	case LE_EXPR:  /* + [-, c] */
4558 	  in_p = ! in_p, low = 0, high = arg1;
4559 	  break;
4560 	default:
4561 	  gcc_unreachable ();
4562 	}
4563 
4564       /* If this is an unsigned comparison, we also know that EXP is
4565 	 greater than or equal to zero.  We base the range tests we make
4566 	 on that fact, so we record it here so we can parse existing
4567 	 range tests.  We test arg0_type since often the return type
4568 	 of, e.g. EQ_EXPR, is boolean.  */
4569       if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4570 	{
4571 	  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4572 			      in_p, low, high, 1,
4573 			      build_int_cst (arg0_type, 0),
4574 			      NULL_TREE))
4575 	    return NULL_TREE;
4576 
4577 	  in_p = n_in_p, low = n_low, high = n_high;
4578 
4579 	  /* If the high bound is missing, but we have a nonzero low
4580 	     bound, reverse the range so it goes from zero to the low bound
4581 	     minus 1.  */
4582 	  if (high == 0 && low && ! integer_zerop (low))
4583 	    {
4584 	      in_p = ! in_p;
4585 	      high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4586 				  build_int_cst (TREE_TYPE (low), 1), 0);
4587 	      low = build_int_cst (arg0_type, 0);
4588 	    }
4589 	}
4590 
4591       *p_low = low;
4592       *p_high = high;
4593       *p_in_p = in_p;
4594       return arg0;
4595 
4596     case NEGATE_EXPR:
4597       /* If flag_wrapv and ARG0_TYPE is signed, make sure
4598 	 low and high are non-NULL, then normalize will DTRT.  */
4599       if (!TYPE_UNSIGNED (arg0_type)
4600 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4601 	{
4602 	  if (low == NULL_TREE)
4603 	    low = TYPE_MIN_VALUE (arg0_type);
4604 	  if (high == NULL_TREE)
4605 	    high = TYPE_MAX_VALUE (arg0_type);
4606 	}
4607 
4608       /* (-x) IN [a,b] -> x in [-b, -a]  */
4609       n_low = range_binop (MINUS_EXPR, exp_type,
4610 			   build_int_cst (exp_type, 0),
4611 			   0, high, 1);
4612       n_high = range_binop (MINUS_EXPR, exp_type,
4613 			    build_int_cst (exp_type, 0),
4614 			    0, low, 0);
4615       if (n_high != 0 && TREE_OVERFLOW (n_high))
4616 	return NULL_TREE;
4617       goto normalize;
4618 
4619     case BIT_NOT_EXPR:
4620       /* ~ X -> -X - 1  */
4621       return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4622 			 build_int_cst (exp_type, 1));
4623 
4624     case PLUS_EXPR:
4625     case MINUS_EXPR:
4626       if (TREE_CODE (arg1) != INTEGER_CST)
4627 	return NULL_TREE;
4628 
4629       /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4630 	 move a constant to the other side.  */
4631       if (!TYPE_UNSIGNED (arg0_type)
4632 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4633 	return NULL_TREE;
4634 
4635       /* If EXP is signed, any overflow in the computation is undefined,
4636 	 so we don't worry about it so long as our computations on
4637 	 the bounds don't overflow.  For unsigned, overflow is defined
4638 	 and this is exactly the right thing.  */
4639       n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4640 			   arg0_type, low, 0, arg1, 0);
4641       n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4642 			    arg0_type, high, 1, arg1, 0);
4643       if ((n_low != 0 && TREE_OVERFLOW (n_low))
4644 	  || (n_high != 0 && TREE_OVERFLOW (n_high)))
4645 	return NULL_TREE;
4646 
4647       if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4648 	*strict_overflow_p = true;
4649 
4650       normalize:
4651 	/* Check for an unsigned range which has wrapped around the maximum
4652 	   value thus making n_high < n_low, and normalize it.  */
4653 	if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4654 	  {
4655 	    low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4656 			       build_int_cst (TREE_TYPE (n_high), 1), 0);
4657 	    high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4658 				build_int_cst (TREE_TYPE (n_low), 1), 0);
4659 
4660 	    /* If the range is of the form +/- [ x+1, x ], we won't
4661 	       be able to normalize it.  But then, it represents the
4662 	       whole range or the empty set, so make it
4663 	       +/- [ -, - ].  */
4664 	    if (tree_int_cst_equal (n_low, low)
4665 		&& tree_int_cst_equal (n_high, high))
4666 	      low = high = 0;
4667 	    else
4668 	      in_p = ! in_p;
4669 	  }
4670 	else
4671 	  low = n_low, high = n_high;
4672 
4673 	*p_low = low;
4674 	*p_high = high;
4675 	*p_in_p = in_p;
4676 	return arg0;
4677 
4678     CASE_CONVERT:
4679     case NON_LVALUE_EXPR:
4680       if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4681 	return NULL_TREE;
4682 
4683       if (! INTEGRAL_TYPE_P (arg0_type)
4684 	  || (low != 0 && ! int_fits_type_p (low, arg0_type))
4685 	  || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4686 	return NULL_TREE;
4687 
4688       n_low = low, n_high = high;
4689 
4690       if (n_low != 0)
4691 	n_low = fold_convert_loc (loc, arg0_type, n_low);
4692 
4693       if (n_high != 0)
4694 	n_high = fold_convert_loc (loc, arg0_type, n_high);
4695 
4696       /* If we're converting arg0 from an unsigned type, to exp,
4697 	 a signed type,  we will be doing the comparison as unsigned.
4698 	 The tests above have already verified that LOW and HIGH
4699 	 are both positive.
4700 
4701 	 So we have to ensure that we will handle large unsigned
4702 	 values the same way that the current signed bounds treat
4703 	 negative values.  */
4704 
4705       if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4706 	{
4707 	  tree high_positive;
4708 	  tree equiv_type;
4709 	  /* For fixed-point modes, we need to pass the saturating flag
4710 	     as the 2nd parameter.  */
4711 	  if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4712 	    equiv_type
4713 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4714 						TYPE_SATURATING (arg0_type));
4715 	  else
4716 	    equiv_type
4717 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4718 
4719 	  /* A range without an upper bound is, naturally, unbounded.
4720 	     Since convert would have cropped a very large value, use
4721 	     the max value for the destination type.  */
4722 	  high_positive
4723 	    = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4724 	      : TYPE_MAX_VALUE (arg0_type);
4725 
4726 	  if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4727 	    high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4728 					     fold_convert_loc (loc, arg0_type,
4729 							       high_positive),
4730 					     build_int_cst (arg0_type, 1));
4731 
4732 	  /* If the low bound is specified, "and" the range with the
4733 	     range for which the original unsigned value will be
4734 	     positive.  */
4735 	  if (low != 0)
4736 	    {
4737 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4738 				  1, fold_convert_loc (loc, arg0_type,
4739 						       integer_zero_node),
4740 				  high_positive))
4741 		return NULL_TREE;
4742 
4743 	      in_p = (n_in_p == in_p);
4744 	    }
4745 	  else
4746 	    {
4747 	      /* Otherwise, "or" the range with the range of the input
4748 		 that will be interpreted as negative.  */
4749 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4750 				  1, fold_convert_loc (loc, arg0_type,
4751 						       integer_zero_node),
4752 				  high_positive))
4753 		return NULL_TREE;
4754 
4755 	      in_p = (in_p != n_in_p);
4756 	    }
4757 	}
4758 
4759       *p_low = n_low;
4760       *p_high = n_high;
4761       *p_in_p = in_p;
4762       return arg0;
4763 
4764     default:
4765       return NULL_TREE;
4766     }
4767 }
4768 
4769 /* Given EXP, a logical expression, set the range it is testing into
4770    variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
4771    actually being tested.  *PLOW and *PHIGH will be made of the same
4772    type as the returned expression.  If EXP is not a comparison, we
4773    will most likely not be returning a useful value and range.  Set
4774    *STRICT_OVERFLOW_P to true if the return value is only valid
4775    because signed overflow is undefined; otherwise, do not change
4776    *STRICT_OVERFLOW_P.  */
4777 
4778 tree
4779 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4780 	    bool *strict_overflow_p)
4781 {
4782   enum tree_code code;
4783   tree arg0, arg1 = NULL_TREE;
4784   tree exp_type, nexp;
4785   int in_p;
4786   tree low, high;
4787   location_t loc = EXPR_LOCATION (exp);
4788 
4789   /* Start with simply saying "EXP != 0" and then look at the code of EXP
4790      and see if we can refine the range.  Some of the cases below may not
4791      happen, but it doesn't seem worth worrying about this.  We "continue"
4792      the outer loop when we've changed something; otherwise we "break"
4793      the switch, which will "break" the while.  */
4794 
4795   in_p = 0;
4796   low = high = build_int_cst (TREE_TYPE (exp), 0);
4797 
4798   while (1)
4799     {
4800       code = TREE_CODE (exp);
4801       exp_type = TREE_TYPE (exp);
4802       arg0 = NULL_TREE;
4803 
4804       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4805 	{
4806 	  if (TREE_OPERAND_LENGTH (exp) > 0)
4807 	    arg0 = TREE_OPERAND (exp, 0);
4808 	  if (TREE_CODE_CLASS (code) == tcc_binary
4809 	      || TREE_CODE_CLASS (code) == tcc_comparison
4810 	      || (TREE_CODE_CLASS (code) == tcc_expression
4811 		  && TREE_OPERAND_LENGTH (exp) > 1))
4812 	    arg1 = TREE_OPERAND (exp, 1);
4813 	}
4814       if (arg0 == NULL_TREE)
4815 	break;
4816 
4817       nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4818 			      &high, &in_p, strict_overflow_p);
4819       if (nexp == NULL_TREE)
4820 	break;
4821       exp = nexp;
4822     }
4823 
4824   /* If EXP is a constant, we can evaluate whether this is true or false.  */
4825   if (TREE_CODE (exp) == INTEGER_CST)
4826     {
4827       in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4828 						 exp, 0, low, 0))
4829 		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
4830 						    exp, 1, high, 1)));
4831       low = high = 0;
4832       exp = 0;
4833     }
4834 
4835   *pin_p = in_p, *plow = low, *phigh = high;
4836   return exp;
4837 }
4838 
4839 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
4840    a bitwise check i.e. when
4841      LOW  == 0xXX...X00...0
4842      HIGH == 0xXX...X11...1
4843    Return corresponding mask in MASK and stem in VALUE.  */
4844 
4845 static bool
4846 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
4847 		  tree *value)
4848 {
4849   if (TREE_CODE (low) != INTEGER_CST
4850       || TREE_CODE (high) != INTEGER_CST)
4851     return false;
4852 
4853   unsigned prec = TYPE_PRECISION (type);
4854   wide_int lo = wi::to_wide (low, prec);
4855   wide_int hi = wi::to_wide (high, prec);
4856 
4857   wide_int end_mask = lo ^ hi;
4858   if ((end_mask & (end_mask + 1)) != 0
4859       || (lo & end_mask) != 0)
4860     return false;
4861 
4862   wide_int stem_mask = ~end_mask;
4863   wide_int stem = lo & stem_mask;
4864   if (stem != (hi & stem_mask))
4865     return false;
4866 
4867   *mask = wide_int_to_tree (type, stem_mask);
4868   *value = wide_int_to_tree (type, stem);
4869 
4870   return true;
4871 }
4872 
4873 /* Helper routine for build_range_check and match.pd.  Return the type to
4874    perform the check or NULL if it shouldn't be optimized.  */
4875 
4876 tree
4877 range_check_type (tree etype)
4878 {
4879   /* First make sure that arithmetics in this type is valid, then make sure
4880      that it wraps around.  */
4881   if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4882     etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4883 					    TYPE_UNSIGNED (etype));
4884 
4885   if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4886     {
4887       tree utype, minv, maxv;
4888 
4889       /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4890 	 for the type in question, as we rely on this here.  */
4891       utype = unsigned_type_for (etype);
4892       maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4893       maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4894 			  build_int_cst (TREE_TYPE (maxv), 1), 1);
4895       minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4896 
4897       if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4898 				      minv, 1, maxv, 1)))
4899 	etype = utype;
4900       else
4901 	return NULL_TREE;
4902     }
4903   return etype;
4904 }
4905 
4906 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4907    type, TYPE, return an expression to test if EXP is in (or out of, depending
4908    on IN_P) the range.  Return 0 if the test couldn't be created.  */
4909 
4910 tree
4911 build_range_check (location_t loc, tree type, tree exp, int in_p,
4912 		   tree low, tree high)
4913 {
4914   tree etype = TREE_TYPE (exp), mask, value;
4915 
4916   /* Disable this optimization for function pointer expressions
4917      on targets that require function pointer canonicalization.  */
4918   if (targetm.have_canonicalize_funcptr_for_compare ()
4919       && TREE_CODE (etype) == POINTER_TYPE
4920       && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4921     return NULL_TREE;
4922 
4923   if (! in_p)
4924     {
4925       value = build_range_check (loc, type, exp, 1, low, high);
4926       if (value != 0)
4927         return invert_truthvalue_loc (loc, value);
4928 
4929       return 0;
4930     }
4931 
4932   if (low == 0 && high == 0)
4933     return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4934 
4935   if (low == 0)
4936     return fold_build2_loc (loc, LE_EXPR, type, exp,
4937 			    fold_convert_loc (loc, etype, high));
4938 
4939   if (high == 0)
4940     return fold_build2_loc (loc, GE_EXPR, type, exp,
4941 			    fold_convert_loc (loc, etype, low));
4942 
4943   if (operand_equal_p (low, high, 0))
4944     return fold_build2_loc (loc, EQ_EXPR, type, exp,
4945 			    fold_convert_loc (loc, etype, low));
4946 
4947   if (TREE_CODE (exp) == BIT_AND_EXPR
4948       && maskable_range_p (low, high, etype, &mask, &value))
4949     return fold_build2_loc (loc, EQ_EXPR, type,
4950 			    fold_build2_loc (loc, BIT_AND_EXPR, etype,
4951 					     exp, mask),
4952 			    value);
4953 
4954   if (integer_zerop (low))
4955     {
4956       if (! TYPE_UNSIGNED (etype))
4957 	{
4958 	  etype = unsigned_type_for (etype);
4959 	  high = fold_convert_loc (loc, etype, high);
4960 	  exp = fold_convert_loc (loc, etype, exp);
4961 	}
4962       return build_range_check (loc, type, exp, 1, 0, high);
4963     }
4964 
4965   /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
4966   if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4967     {
4968       int prec = TYPE_PRECISION (etype);
4969 
4970       if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
4971 	{
4972 	  if (TYPE_UNSIGNED (etype))
4973 	    {
4974 	      tree signed_etype = signed_type_for (etype);
4975 	      if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4976 		etype
4977 		  = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4978 	      else
4979 		etype = signed_etype;
4980 	      exp = fold_convert_loc (loc, etype, exp);
4981 	    }
4982 	  return fold_build2_loc (loc, GT_EXPR, type, exp,
4983 				  build_int_cst (etype, 0));
4984 	}
4985     }
4986 
4987   /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4988      This requires wrap-around arithmetics for the type of the expression.  */
4989   etype = range_check_type (etype);
4990   if (etype == NULL_TREE)
4991     return NULL_TREE;
4992 
4993   if (POINTER_TYPE_P (etype))
4994     etype = unsigned_type_for (etype);
4995 
4996   high = fold_convert_loc (loc, etype, high);
4997   low = fold_convert_loc (loc, etype, low);
4998   exp = fold_convert_loc (loc, etype, exp);
4999 
5000   value = const_binop (MINUS_EXPR, high, low);
5001 
5002   if (value != 0 && !TREE_OVERFLOW (value))
5003     return build_range_check (loc, type,
5004 			      fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5005 			      1, build_int_cst (etype, 0), value);
5006 
5007   return 0;
5008 }
5009 
5010 /* Return the predecessor of VAL in its type, handling the infinite case.  */
5011 
5012 static tree
5013 range_predecessor (tree val)
5014 {
5015   tree type = TREE_TYPE (val);
5016 
5017   if (INTEGRAL_TYPE_P (type)
5018       && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5019     return 0;
5020   else
5021     return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5022 			build_int_cst (TREE_TYPE (val), 1), 0);
5023 }
5024 
5025 /* Return the successor of VAL in its type, handling the infinite case.  */
5026 
5027 static tree
5028 range_successor (tree val)
5029 {
5030   tree type = TREE_TYPE (val);
5031 
5032   if (INTEGRAL_TYPE_P (type)
5033       && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5034     return 0;
5035   else
5036     return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5037 			build_int_cst (TREE_TYPE (val), 1), 0);
5038 }
5039 
5040 /* Given two ranges, see if we can merge them into one.  Return 1 if we
5041    can, 0 if we can't.  Set the output range into the specified parameters.  */
5042 
5043 bool
5044 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5045 	      tree high0, int in1_p, tree low1, tree high1)
5046 {
5047   int no_overlap;
5048   int subset;
5049   int temp;
5050   tree tem;
5051   int in_p;
5052   tree low, high;
5053   int lowequal = ((low0 == 0 && low1 == 0)
5054 		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5055 						low0, 0, low1, 0)));
5056   int highequal = ((high0 == 0 && high1 == 0)
5057 		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5058 						 high0, 1, high1, 1)));
5059 
5060   /* Make range 0 be the range that starts first, or ends last if they
5061      start at the same value.  Swap them if it isn't.  */
5062   if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5063 				 low0, 0, low1, 0))
5064       || (lowequal
5065 	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
5066 					high1, 1, high0, 1))))
5067     {
5068       temp = in0_p, in0_p = in1_p, in1_p = temp;
5069       tem = low0, low0 = low1, low1 = tem;
5070       tem = high0, high0 = high1, high1 = tem;
5071     }
5072 
5073   /* Now flag two cases, whether the ranges are disjoint or whether the
5074      second range is totally subsumed in the first.  Note that the tests
5075      below are simplified by the ones above.  */
5076   no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5077 					  high0, 1, low1, 0));
5078   subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5079 				      high1, 1, high0, 1));
5080 
5081   /* We now have four cases, depending on whether we are including or
5082      excluding the two ranges.  */
5083   if (in0_p && in1_p)
5084     {
5085       /* If they don't overlap, the result is false.  If the second range
5086 	 is a subset it is the result.  Otherwise, the range is from the start
5087 	 of the second to the end of the first.  */
5088       if (no_overlap)
5089 	in_p = 0, low = high = 0;
5090       else if (subset)
5091 	in_p = 1, low = low1, high = high1;
5092       else
5093 	in_p = 1, low = low1, high = high0;
5094     }
5095 
5096   else if (in0_p && ! in1_p)
5097     {
5098       /* If they don't overlap, the result is the first range.  If they are
5099 	 equal, the result is false.  If the second range is a subset of the
5100 	 first, and the ranges begin at the same place, we go from just after
5101 	 the end of the second range to the end of the first.  If the second
5102 	 range is not a subset of the first, or if it is a subset and both
5103 	 ranges end at the same place, the range starts at the start of the
5104 	 first range and ends just before the second range.
5105 	 Otherwise, we can't describe this as a single range.  */
5106       if (no_overlap)
5107 	in_p = 1, low = low0, high = high0;
5108       else if (lowequal && highequal)
5109 	in_p = 0, low = high = 0;
5110       else if (subset && lowequal)
5111 	{
5112 	  low = range_successor (high1);
5113 	  high = high0;
5114 	  in_p = 1;
5115 	  if (low == 0)
5116 	    {
5117 	      /* We are in the weird situation where high0 > high1 but
5118 		 high1 has no successor.  Punt.  */
5119 	      return 0;
5120 	    }
5121 	}
5122       else if (! subset || highequal)
5123 	{
5124 	  low = low0;
5125 	  high = range_predecessor (low1);
5126 	  in_p = 1;
5127 	  if (high == 0)
5128 	    {
5129 	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
5130 	      return 0;
5131 	    }
5132 	}
5133       else
5134 	return 0;
5135     }
5136 
5137   else if (! in0_p && in1_p)
5138     {
5139       /* If they don't overlap, the result is the second range.  If the second
5140 	 is a subset of the first, the result is false.  Otherwise,
5141 	 the range starts just after the first range and ends at the
5142 	 end of the second.  */
5143       if (no_overlap)
5144 	in_p = 1, low = low1, high = high1;
5145       else if (subset || highequal)
5146 	in_p = 0, low = high = 0;
5147       else
5148 	{
5149 	  low = range_successor (high0);
5150 	  high = high1;
5151 	  in_p = 1;
5152 	  if (low == 0)
5153 	    {
5154 	      /* high1 > high0 but high0 has no successor.  Punt.  */
5155 	      return 0;
5156 	    }
5157 	}
5158     }
5159 
5160   else
5161     {
5162       /* The case where we are excluding both ranges.  Here the complex case
5163 	 is if they don't overlap.  In that case, the only time we have a
5164 	 range is if they are adjacent.  If the second is a subset of the
5165 	 first, the result is the first.  Otherwise, the range to exclude
5166 	 starts at the beginning of the first range and ends at the end of the
5167 	 second.  */
5168       if (no_overlap)
5169 	{
5170 	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5171 					 range_successor (high0),
5172 					 1, low1, 0)))
5173 	    in_p = 0, low = low0, high = high1;
5174 	  else
5175 	    {
5176 	      /* Canonicalize - [min, x] into - [-, x].  */
5177 	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
5178 		switch (TREE_CODE (TREE_TYPE (low0)))
5179 		  {
5180 		  case ENUMERAL_TYPE:
5181 		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5182 				  GET_MODE_BITSIZE
5183 				    (TYPE_MODE (TREE_TYPE (low0)))))
5184 		      break;
5185 		    /* FALLTHROUGH */
5186 		  case INTEGER_TYPE:
5187 		    if (tree_int_cst_equal (low0,
5188 					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
5189 		      low0 = 0;
5190 		    break;
5191 		  case POINTER_TYPE:
5192 		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
5193 			&& integer_zerop (low0))
5194 		      low0 = 0;
5195 		    break;
5196 		  default:
5197 		    break;
5198 		  }
5199 
5200 	      /* Canonicalize - [x, max] into - [x, -].  */
5201 	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
5202 		switch (TREE_CODE (TREE_TYPE (high1)))
5203 		  {
5204 		  case ENUMERAL_TYPE:
5205 		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5206 				  GET_MODE_BITSIZE
5207 				    (TYPE_MODE (TREE_TYPE (high1)))))
5208 		      break;
5209 		    /* FALLTHROUGH */
5210 		  case INTEGER_TYPE:
5211 		    if (tree_int_cst_equal (high1,
5212 					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
5213 		      high1 = 0;
5214 		    break;
5215 		  case POINTER_TYPE:
5216 		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
5217 			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5218 						       high1, 1,
5219 						       build_int_cst (TREE_TYPE (high1), 1),
5220 						       1)))
5221 		      high1 = 0;
5222 		    break;
5223 		  default:
5224 		    break;
5225 		  }
5226 
5227 	      /* The ranges might be also adjacent between the maximum and
5228 	         minimum values of the given type.  For
5229 	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5230 	         return + [x + 1, y - 1].  */
5231 	      if (low0 == 0 && high1 == 0)
5232 	        {
5233 		  low = range_successor (high0);
5234 		  high = range_predecessor (low1);
5235 		  if (low == 0 || high == 0)
5236 		    return 0;
5237 
5238 		  in_p = 1;
5239 		}
5240 	      else
5241 		return 0;
5242 	    }
5243 	}
5244       else if (subset)
5245 	in_p = 0, low = low0, high = high0;
5246       else
5247 	in_p = 0, low = low0, high = high1;
5248     }
5249 
5250   *pin_p = in_p, *plow = low, *phigh = high;
5251   return 1;
5252 }
5253 
5254 
5255 /* Subroutine of fold, looking inside expressions of the form
5256    A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5257    of the COND_EXPR.  This function is being used also to optimize
5258    A op B ? C : A, by reversing the comparison first.
5259 
5260    Return a folded expression whose code is not a COND_EXPR
5261    anymore, or NULL_TREE if no folding opportunity is found.  */
5262 
5263 static tree
5264 fold_cond_expr_with_comparison (location_t loc, tree type,
5265 				tree arg0, tree arg1, tree arg2)
5266 {
5267   enum tree_code comp_code = TREE_CODE (arg0);
5268   tree arg00 = TREE_OPERAND (arg0, 0);
5269   tree arg01 = TREE_OPERAND (arg0, 1);
5270   tree arg1_type = TREE_TYPE (arg1);
5271   tree tem;
5272 
5273   STRIP_NOPS (arg1);
5274   STRIP_NOPS (arg2);
5275 
5276   /* If we have A op 0 ? A : -A, consider applying the following
5277      transformations:
5278 
5279      A == 0? A : -A    same as -A
5280      A != 0? A : -A    same as A
5281      A >= 0? A : -A    same as abs (A)
5282      A > 0?  A : -A    same as abs (A)
5283      A <= 0? A : -A    same as -abs (A)
5284      A < 0?  A : -A    same as -abs (A)
5285 
5286      None of these transformations work for modes with signed
5287      zeros.  If A is +/-0, the first two transformations will
5288      change the sign of the result (from +0 to -0, or vice
5289      versa).  The last four will fix the sign of the result,
5290      even though the original expressions could be positive or
5291      negative, depending on the sign of A.
5292 
5293      Note that all these transformations are correct if A is
5294      NaN, since the two alternatives (A and -A) are also NaNs.  */
5295   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5296       && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5297 	  ? real_zerop (arg01)
5298 	  : integer_zerop (arg01))
5299       && ((TREE_CODE (arg2) == NEGATE_EXPR
5300 	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5301 	     /* In the case that A is of the form X-Y, '-A' (arg2) may
5302 	        have already been folded to Y-X, check for that. */
5303 	  || (TREE_CODE (arg1) == MINUS_EXPR
5304 	      && TREE_CODE (arg2) == MINUS_EXPR
5305 	      && operand_equal_p (TREE_OPERAND (arg1, 0),
5306 				  TREE_OPERAND (arg2, 1), 0)
5307 	      && operand_equal_p (TREE_OPERAND (arg1, 1),
5308 				  TREE_OPERAND (arg2, 0), 0))))
5309     switch (comp_code)
5310       {
5311       case EQ_EXPR:
5312       case UNEQ_EXPR:
5313 	tem = fold_convert_loc (loc, arg1_type, arg1);
5314 	return fold_convert_loc (loc, type, negate_expr (tem));
5315       case NE_EXPR:
5316       case LTGT_EXPR:
5317 	return fold_convert_loc (loc, type, arg1);
5318       case UNGE_EXPR:
5319       case UNGT_EXPR:
5320 	if (flag_trapping_math)
5321 	  break;
5322 	/* Fall through.  */
5323       case GE_EXPR:
5324       case GT_EXPR:
5325 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5326 	  break;
5327 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5328 	return fold_convert_loc (loc, type, tem);
5329       case UNLE_EXPR:
5330       case UNLT_EXPR:
5331 	if (flag_trapping_math)
5332 	  break;
5333 	/* FALLTHRU */
5334       case LE_EXPR:
5335       case LT_EXPR:
5336 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5337 	  break;
5338 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5339 	return negate_expr (fold_convert_loc (loc, type, tem));
5340       default:
5341 	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5342 	break;
5343       }
5344 
5345   /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
5346      A == 0 ? A : 0 is always 0 unless A is -0.  Note that
5347      both transformations are correct when A is NaN: A != 0
5348      is then true, and A == 0 is false.  */
5349 
5350   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5351       && integer_zerop (arg01) && integer_zerop (arg2))
5352     {
5353       if (comp_code == NE_EXPR)
5354 	return fold_convert_loc (loc, type, arg1);
5355       else if (comp_code == EQ_EXPR)
5356 	return build_zero_cst (type);
5357     }
5358 
5359   /* Try some transformations of A op B ? A : B.
5360 
5361      A == B? A : B    same as B
5362      A != B? A : B    same as A
5363      A >= B? A : B    same as max (A, B)
5364      A > B?  A : B    same as max (B, A)
5365      A <= B? A : B    same as min (A, B)
5366      A < B?  A : B    same as min (B, A)
5367 
5368      As above, these transformations don't work in the presence
5369      of signed zeros.  For example, if A and B are zeros of
5370      opposite sign, the first two transformations will change
5371      the sign of the result.  In the last four, the original
5372      expressions give different results for (A=+0, B=-0) and
5373      (A=-0, B=+0), but the transformed expressions do not.
5374 
5375      The first two transformations are correct if either A or B
5376      is a NaN.  In the first transformation, the condition will
5377      be false, and B will indeed be chosen.  In the case of the
5378      second transformation, the condition A != B will be true,
5379      and A will be chosen.
5380 
5381      The conversions to max() and min() are not correct if B is
5382      a number and A is not.  The conditions in the original
5383      expressions will be false, so all four give B.  The min()
5384      and max() versions would give a NaN instead.  */
5385   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5386       && operand_equal_for_comparison_p (arg01, arg2)
5387       /* Avoid these transformations if the COND_EXPR may be used
5388 	 as an lvalue in the C++ front-end.  PR c++/19199.  */
5389       && (in_gimple_form
5390 	  || VECTOR_TYPE_P (type)
5391 	  || (! lang_GNU_CXX ()
5392 	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5393 	  || ! maybe_lvalue_p (arg1)
5394 	  || ! maybe_lvalue_p (arg2)))
5395     {
5396       tree comp_op0 = arg00;
5397       tree comp_op1 = arg01;
5398       tree comp_type = TREE_TYPE (comp_op0);
5399 
5400       switch (comp_code)
5401 	{
5402 	case EQ_EXPR:
5403 	  return fold_convert_loc (loc, type, arg2);
5404 	case NE_EXPR:
5405 	  return fold_convert_loc (loc, type, arg1);
5406 	case LE_EXPR:
5407 	case LT_EXPR:
5408 	case UNLE_EXPR:
5409 	case UNLT_EXPR:
5410 	  /* In C++ a ?: expression can be an lvalue, so put the
5411 	     operand which will be used if they are equal first
5412 	     so that we can convert this back to the
5413 	     corresponding COND_EXPR.  */
5414 	  if (!HONOR_NANS (arg1))
5415 	    {
5416 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5417 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5418 	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5419 		    ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5420 		    : fold_build2_loc (loc, MIN_EXPR, comp_type,
5421 				   comp_op1, comp_op0);
5422 	      return fold_convert_loc (loc, type, tem);
5423 	    }
5424 	  break;
5425 	case GE_EXPR:
5426 	case GT_EXPR:
5427 	case UNGE_EXPR:
5428 	case UNGT_EXPR:
5429 	  if (!HONOR_NANS (arg1))
5430 	    {
5431 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5432 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5433 	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5434 		    ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5435 		    : fold_build2_loc (loc, MAX_EXPR, comp_type,
5436 				   comp_op1, comp_op0);
5437 	      return fold_convert_loc (loc, type, tem);
5438 	    }
5439 	  break;
5440 	case UNEQ_EXPR:
5441 	  if (!HONOR_NANS (arg1))
5442 	    return fold_convert_loc (loc, type, arg2);
5443 	  break;
5444 	case LTGT_EXPR:
5445 	  if (!HONOR_NANS (arg1))
5446 	    return fold_convert_loc (loc, type, arg1);
5447 	  break;
5448 	default:
5449 	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5450 	  break;
5451 	}
5452     }
5453 
5454   return NULL_TREE;
5455 }
5456 
5457 
5458 
5459 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5460 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5461   (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5462 		false) >= 2)
5463 #endif
5464 
5465 /* EXP is some logical combination of boolean tests.  See if we can
5466    merge it into some range test.  Return the new tree if so.  */
5467 
5468 static tree
5469 fold_range_test (location_t loc, enum tree_code code, tree type,
5470 		 tree op0, tree op1)
5471 {
5472   int or_op = (code == TRUTH_ORIF_EXPR
5473 	       || code == TRUTH_OR_EXPR);
5474   int in0_p, in1_p, in_p;
5475   tree low0, low1, low, high0, high1, high;
5476   bool strict_overflow_p = false;
5477   tree tem, lhs, rhs;
5478   const char * const warnmsg = G_("assuming signed overflow does not occur "
5479 				  "when simplifying range test");
5480 
5481   if (!INTEGRAL_TYPE_P (type))
5482     return 0;
5483 
5484   lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5485   rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5486 
5487   /* If this is an OR operation, invert both sides; we will invert
5488      again at the end.  */
5489   if (or_op)
5490     in0_p = ! in0_p, in1_p = ! in1_p;
5491 
5492   /* If both expressions are the same, if we can merge the ranges, and we
5493      can build the range test, return it or it inverted.  If one of the
5494      ranges is always true or always false, consider it to be the same
5495      expression as the other.  */
5496   if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5497       && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5498 		       in1_p, low1, high1)
5499       && (tem = (build_range_check (loc, type,
5500 				    lhs != 0 ? lhs
5501 				    : rhs != 0 ? rhs : integer_zero_node,
5502 				    in_p, low, high))) != 0)
5503     {
5504       if (strict_overflow_p)
5505 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5506       return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5507     }
5508 
5509   /* On machines where the branch cost is expensive, if this is a
5510      short-circuited branch and the underlying object on both sides
5511      is the same, make a non-short-circuit operation.  */
5512   else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5513 	   && !flag_sanitize_coverage
5514 	   && lhs != 0 && rhs != 0
5515 	   && (code == TRUTH_ANDIF_EXPR
5516 	       || code == TRUTH_ORIF_EXPR)
5517 	   && operand_equal_p (lhs, rhs, 0))
5518     {
5519       /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
5520 	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5521 	 which cases we can't do this.  */
5522       if (simple_operand_p (lhs))
5523 	return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5524 			   ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5525 			   type, op0, op1);
5526 
5527       else if (!lang_hooks.decls.global_bindings_p ()
5528 	       && !CONTAINS_PLACEHOLDER_P (lhs))
5529 	{
5530 	  tree common = save_expr (lhs);
5531 
5532 	  if ((lhs = build_range_check (loc, type, common,
5533 					or_op ? ! in0_p : in0_p,
5534 					low0, high0)) != 0
5535 	      && (rhs = build_range_check (loc, type, common,
5536 					   or_op ? ! in1_p : in1_p,
5537 					   low1, high1)) != 0)
5538 	    {
5539 	      if (strict_overflow_p)
5540 		fold_overflow_warning (warnmsg,
5541 				       WARN_STRICT_OVERFLOW_COMPARISON);
5542 	      return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5543 				 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5544 				 type, lhs, rhs);
5545 	    }
5546 	}
5547     }
5548 
5549   return 0;
5550 }
5551 
5552 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5553    bit value.  Arrange things so the extra bits will be set to zero if and
5554    only if C is signed-extended to its full width.  If MASK is nonzero,
5555    it is an INTEGER_CST that should be AND'ed with the extra bits.  */
5556 
5557 static tree
5558 unextend (tree c, int p, int unsignedp, tree mask)
5559 {
5560   tree type = TREE_TYPE (c);
5561   int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
5562   tree temp;
5563 
5564   if (p == modesize || unsignedp)
5565     return c;
5566 
5567   /* We work by getting just the sign bit into the low-order bit, then
5568      into the high-order bit, then sign-extend.  We then XOR that value
5569      with C.  */
5570   temp = build_int_cst (TREE_TYPE (c),
5571 			wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
5572 
5573   /* We must use a signed type in order to get an arithmetic right shift.
5574      However, we must also avoid introducing accidental overflows, so that
5575      a subsequent call to integer_zerop will work.  Hence we must
5576      do the type conversion here.  At this point, the constant is either
5577      zero or one, and the conversion to a signed type can never overflow.
5578      We could get an overflow if this conversion is done anywhere else.  */
5579   if (TYPE_UNSIGNED (type))
5580     temp = fold_convert (signed_type_for (type), temp);
5581 
5582   temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5583   temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5584   if (mask != 0)
5585     temp = const_binop (BIT_AND_EXPR, temp,
5586 			fold_convert (TREE_TYPE (c), mask));
5587   /* If necessary, convert the type back to match the type of C.  */
5588   if (TYPE_UNSIGNED (type))
5589     temp = fold_convert (type, temp);
5590 
5591   return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5592 }
5593 
5594 /* For an expression that has the form
5595      (A && B) || ~B
5596    or
5597      (A || B) && ~B,
5598    we can drop one of the inner expressions and simplify to
5599      A || ~B
5600    or
5601      A && ~B
5602    LOC is the location of the resulting expression.  OP is the inner
5603    logical operation; the left-hand side in the examples above, while CMPOP
5604    is the right-hand side.  RHS_ONLY is used to prevent us from accidentally
5605    removing a condition that guards another, as in
5606      (A != NULL && A->...) || A == NULL
5607    which we must not transform.  If RHS_ONLY is true, only eliminate the
5608    right-most operand of the inner logical operation.  */
5609 
5610 static tree
5611 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5612 				 bool rhs_only)
5613 {
5614   tree type = TREE_TYPE (cmpop);
5615   enum tree_code code = TREE_CODE (cmpop);
5616   enum tree_code truthop_code = TREE_CODE (op);
5617   tree lhs = TREE_OPERAND (op, 0);
5618   tree rhs = TREE_OPERAND (op, 1);
5619   tree orig_lhs = lhs, orig_rhs = rhs;
5620   enum tree_code rhs_code = TREE_CODE (rhs);
5621   enum tree_code lhs_code = TREE_CODE (lhs);
5622   enum tree_code inv_code;
5623 
5624   if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5625     return NULL_TREE;
5626 
5627   if (TREE_CODE_CLASS (code) != tcc_comparison)
5628     return NULL_TREE;
5629 
5630   if (rhs_code == truthop_code)
5631     {
5632       tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5633       if (newrhs != NULL_TREE)
5634 	{
5635 	  rhs = newrhs;
5636 	  rhs_code = TREE_CODE (rhs);
5637 	}
5638     }
5639   if (lhs_code == truthop_code && !rhs_only)
5640     {
5641       tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5642       if (newlhs != NULL_TREE)
5643 	{
5644 	  lhs = newlhs;
5645 	  lhs_code = TREE_CODE (lhs);
5646 	}
5647     }
5648 
5649   inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5650   if (inv_code == rhs_code
5651       && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5652       && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5653     return lhs;
5654   if (!rhs_only && inv_code == lhs_code
5655       && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5656       && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5657     return rhs;
5658   if (rhs != orig_rhs || lhs != orig_lhs)
5659     return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5660 			    lhs, rhs);
5661   return NULL_TREE;
5662 }
5663 
5664 /* Find ways of folding logical expressions of LHS and RHS:
5665    Try to merge two comparisons to the same innermost item.
5666    Look for range tests like "ch >= '0' && ch <= '9'".
5667    Look for combinations of simple terms on machines with expensive branches
5668    and evaluate the RHS unconditionally.
5669 
5670    For example, if we have p->a == 2 && p->b == 4 and we can make an
5671    object large enough to span both A and B, we can do this with a comparison
5672    against the object ANDed with the a mask.
5673 
5674    If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5675    operations to do this with one comparison.
5676 
5677    We check for both normal comparisons and the BIT_AND_EXPRs made this by
5678    function and the one above.
5679 
5680    CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
5681    TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5682 
5683    TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5684    two operands.
5685 
5686    We return the simplified tree or 0 if no optimization is possible.  */
5687 
5688 static tree
5689 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5690 		    tree lhs, tree rhs)
5691 {
5692   /* If this is the "or" of two comparisons, we can do something if
5693      the comparisons are NE_EXPR.  If this is the "and", we can do something
5694      if the comparisons are EQ_EXPR.  I.e.,
5695 	(a->b == 2 && a->c == 4) can become (a->new == NEW).
5696 
5697      WANTED_CODE is this operation code.  For single bit fields, we can
5698      convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5699      comparison for one-bit fields.  */
5700 
5701   enum tree_code wanted_code;
5702   enum tree_code lcode, rcode;
5703   tree ll_arg, lr_arg, rl_arg, rr_arg;
5704   tree ll_inner, lr_inner, rl_inner, rr_inner;
5705   HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5706   HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5707   HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5708   HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5709   int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5710   int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5711   machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5712   scalar_int_mode lnmode, rnmode;
5713   tree ll_mask, lr_mask, rl_mask, rr_mask;
5714   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5715   tree l_const, r_const;
5716   tree lntype, rntype, result;
5717   HOST_WIDE_INT first_bit, end_bit;
5718   int volatilep;
5719 
5720   /* Start by getting the comparison codes.  Fail if anything is volatile.
5721      If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5722      it were surrounded with a NE_EXPR.  */
5723 
5724   if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5725     return 0;
5726 
5727   lcode = TREE_CODE (lhs);
5728   rcode = TREE_CODE (rhs);
5729 
5730   if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5731     {
5732       lhs = build2 (NE_EXPR, truth_type, lhs,
5733 		    build_int_cst (TREE_TYPE (lhs), 0));
5734       lcode = NE_EXPR;
5735     }
5736 
5737   if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5738     {
5739       rhs = build2 (NE_EXPR, truth_type, rhs,
5740 		    build_int_cst (TREE_TYPE (rhs), 0));
5741       rcode = NE_EXPR;
5742     }
5743 
5744   if (TREE_CODE_CLASS (lcode) != tcc_comparison
5745       || TREE_CODE_CLASS (rcode) != tcc_comparison)
5746     return 0;
5747 
5748   ll_arg = TREE_OPERAND (lhs, 0);
5749   lr_arg = TREE_OPERAND (lhs, 1);
5750   rl_arg = TREE_OPERAND (rhs, 0);
5751   rr_arg = TREE_OPERAND (rhs, 1);
5752 
5753   /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
5754   if (simple_operand_p (ll_arg)
5755       && simple_operand_p (lr_arg))
5756     {
5757       if (operand_equal_p (ll_arg, rl_arg, 0)
5758           && operand_equal_p (lr_arg, rr_arg, 0))
5759 	{
5760           result = combine_comparisons (loc, code, lcode, rcode,
5761 					truth_type, ll_arg, lr_arg);
5762 	  if (result)
5763 	    return result;
5764 	}
5765       else if (operand_equal_p (ll_arg, rr_arg, 0)
5766                && operand_equal_p (lr_arg, rl_arg, 0))
5767 	{
5768           result = combine_comparisons (loc, code, lcode,
5769 					swap_tree_comparison (rcode),
5770 					truth_type, ll_arg, lr_arg);
5771 	  if (result)
5772 	    return result;
5773 	}
5774     }
5775 
5776   code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5777 	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5778 
5779   /* If the RHS can be evaluated unconditionally and its operands are
5780      simple, it wins to evaluate the RHS unconditionally on machines
5781      with expensive branches.  In this case, this isn't a comparison
5782      that can be merged.  */
5783 
5784   if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5785 		   false) >= 2
5786       && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5787       && simple_operand_p (rl_arg)
5788       && simple_operand_p (rr_arg))
5789     {
5790       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
5791       if (code == TRUTH_OR_EXPR
5792 	  && lcode == NE_EXPR && integer_zerop (lr_arg)
5793 	  && rcode == NE_EXPR && integer_zerop (rr_arg)
5794 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5795 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5796 	return build2_loc (loc, NE_EXPR, truth_type,
5797 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5798 				   ll_arg, rl_arg),
5799 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5800 
5801       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
5802       if (code == TRUTH_AND_EXPR
5803 	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
5804 	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
5805 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5806 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5807 	return build2_loc (loc, EQ_EXPR, truth_type,
5808 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5809 				   ll_arg, rl_arg),
5810 			   build_int_cst (TREE_TYPE (ll_arg), 0));
5811     }
5812 
5813   /* See if the comparisons can be merged.  Then get all the parameters for
5814      each side.  */
5815 
5816   if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5817       || (rcode != EQ_EXPR && rcode != NE_EXPR))
5818     return 0;
5819 
5820   ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5821   volatilep = 0;
5822   ll_inner = decode_field_reference (loc, &ll_arg,
5823 				     &ll_bitsize, &ll_bitpos, &ll_mode,
5824 				     &ll_unsignedp, &ll_reversep, &volatilep,
5825 				     &ll_mask, &ll_and_mask);
5826   lr_inner = decode_field_reference (loc, &lr_arg,
5827 				     &lr_bitsize, &lr_bitpos, &lr_mode,
5828 				     &lr_unsignedp, &lr_reversep, &volatilep,
5829 				     &lr_mask, &lr_and_mask);
5830   rl_inner = decode_field_reference (loc, &rl_arg,
5831 				     &rl_bitsize, &rl_bitpos, &rl_mode,
5832 				     &rl_unsignedp, &rl_reversep, &volatilep,
5833 				     &rl_mask, &rl_and_mask);
5834   rr_inner = decode_field_reference (loc, &rr_arg,
5835 				     &rr_bitsize, &rr_bitpos, &rr_mode,
5836 				     &rr_unsignedp, &rr_reversep, &volatilep,
5837 				     &rr_mask, &rr_and_mask);
5838 
5839   /* It must be true that the inner operation on the lhs of each
5840      comparison must be the same if we are to be able to do anything.
5841      Then see if we have constants.  If not, the same must be true for
5842      the rhs's.  */
5843   if (volatilep
5844       || ll_reversep != rl_reversep
5845       || ll_inner == 0 || rl_inner == 0
5846       || ! operand_equal_p (ll_inner, rl_inner, 0))
5847     return 0;
5848 
5849   if (TREE_CODE (lr_arg) == INTEGER_CST
5850       && TREE_CODE (rr_arg) == INTEGER_CST)
5851     {
5852       l_const = lr_arg, r_const = rr_arg;
5853       lr_reversep = ll_reversep;
5854     }
5855   else if (lr_reversep != rr_reversep
5856 	   || lr_inner == 0 || rr_inner == 0
5857 	   || ! operand_equal_p (lr_inner, rr_inner, 0))
5858     return 0;
5859   else
5860     l_const = r_const = 0;
5861 
5862   /* If either comparison code is not correct for our logical operation,
5863      fail.  However, we can convert a one-bit comparison against zero into
5864      the opposite comparison against that bit being set in the field.  */
5865 
5866   wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5867   if (lcode != wanted_code)
5868     {
5869       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5870 	{
5871 	  /* Make the left operand unsigned, since we are only interested
5872 	     in the value of one bit.  Otherwise we are doing the wrong
5873 	     thing below.  */
5874 	  ll_unsignedp = 1;
5875 	  l_const = ll_mask;
5876 	}
5877       else
5878 	return 0;
5879     }
5880 
5881   /* This is analogous to the code for l_const above.  */
5882   if (rcode != wanted_code)
5883     {
5884       if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5885 	{
5886 	  rl_unsignedp = 1;
5887 	  r_const = rl_mask;
5888 	}
5889       else
5890 	return 0;
5891     }
5892 
5893   /* See if we can find a mode that contains both fields being compared on
5894      the left.  If we can't, fail.  Otherwise, update all constants and masks
5895      to be relative to a field of that size.  */
5896   first_bit = MIN (ll_bitpos, rl_bitpos);
5897   end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5898   if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5899 		      TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
5900 		      volatilep, &lnmode))
5901     return 0;
5902 
5903   lnbitsize = GET_MODE_BITSIZE (lnmode);
5904   lnbitpos = first_bit & ~ (lnbitsize - 1);
5905   lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5906   xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5907 
5908   if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5909     {
5910       xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5911       xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5912     }
5913 
5914   ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5915 			 size_int (xll_bitpos));
5916   rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5917 			 size_int (xrl_bitpos));
5918 
5919   if (l_const)
5920     {
5921       l_const = fold_convert_loc (loc, lntype, l_const);
5922       l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5923       l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5924       if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5925 					fold_build1_loc (loc, BIT_NOT_EXPR,
5926 						     lntype, ll_mask))))
5927 	{
5928 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5929 
5930 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5931 	}
5932     }
5933   if (r_const)
5934     {
5935       r_const = fold_convert_loc (loc, lntype, r_const);
5936       r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5937       r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5938       if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5939 					fold_build1_loc (loc, BIT_NOT_EXPR,
5940 						     lntype, rl_mask))))
5941 	{
5942 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5943 
5944 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5945 	}
5946     }
5947 
5948   /* If the right sides are not constant, do the same for it.  Also,
5949      disallow this optimization if a size or signedness mismatch occurs
5950      between the left and right sides.  */
5951   if (l_const == 0)
5952     {
5953       if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5954 	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5955 	  /* Make sure the two fields on the right
5956 	     correspond to the left without being swapped.  */
5957 	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5958 	return 0;
5959 
5960       first_bit = MIN (lr_bitpos, rr_bitpos);
5961       end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5962       if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5963 			  TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
5964 			  volatilep, &rnmode))
5965 	return 0;
5966 
5967       rnbitsize = GET_MODE_BITSIZE (rnmode);
5968       rnbitpos = first_bit & ~ (rnbitsize - 1);
5969       rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5970       xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5971 
5972       if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5973 	{
5974 	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5975 	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5976 	}
5977 
5978       lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5979 							    rntype, lr_mask),
5980 			     size_int (xlr_bitpos));
5981       rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5982 							    rntype, rr_mask),
5983 			     size_int (xrr_bitpos));
5984 
5985       /* Make a mask that corresponds to both fields being compared.
5986 	 Do this for both items being compared.  If the operands are the
5987 	 same size and the bits being compared are in the same position
5988 	 then we can do this by masking both and comparing the masked
5989 	 results.  */
5990       ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5991       lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5992       if (lnbitsize == rnbitsize
5993 	  && xll_bitpos == xlr_bitpos
5994 	  && lnbitpos >= 0
5995 	  && rnbitpos >= 0)
5996 	{
5997 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
5998 				    lntype, lnbitsize, lnbitpos,
5999 				    ll_unsignedp || rl_unsignedp, ll_reversep);
6000 	  if (! all_ones_mask_p (ll_mask, lnbitsize))
6001 	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6002 
6003 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6004 				    rntype, rnbitsize, rnbitpos,
6005 				    lr_unsignedp || rr_unsignedp, lr_reversep);
6006 	  if (! all_ones_mask_p (lr_mask, rnbitsize))
6007 	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6008 
6009 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6010 	}
6011 
6012       /* There is still another way we can do something:  If both pairs of
6013 	 fields being compared are adjacent, we may be able to make a wider
6014 	 field containing them both.
6015 
6016 	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
6017 	 the mask must be shifted to account for the shift done by
6018 	 make_bit_field_ref.  */
6019       if (((ll_bitsize + ll_bitpos == rl_bitpos
6020 	    && lr_bitsize + lr_bitpos == rr_bitpos)
6021 	   || (ll_bitpos == rl_bitpos + rl_bitsize
6022 	       && lr_bitpos == rr_bitpos + rr_bitsize))
6023 	  && ll_bitpos >= 0
6024 	  && rl_bitpos >= 0
6025 	  && lr_bitpos >= 0
6026 	  && rr_bitpos >= 0)
6027 	{
6028 	  tree type;
6029 
6030 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6031 				    ll_bitsize + rl_bitsize,
6032 				    MIN (ll_bitpos, rl_bitpos),
6033 				    ll_unsignedp, ll_reversep);
6034 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6035 				    lr_bitsize + rr_bitsize,
6036 				    MIN (lr_bitpos, rr_bitpos),
6037 				    lr_unsignedp, lr_reversep);
6038 
6039 	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6040 				 size_int (MIN (xll_bitpos, xrl_bitpos)));
6041 	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6042 				 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6043 
6044 	  /* Convert to the smaller type before masking out unwanted bits.  */
6045 	  type = lntype;
6046 	  if (lntype != rntype)
6047 	    {
6048 	      if (lnbitsize > rnbitsize)
6049 		{
6050 		  lhs = fold_convert_loc (loc, rntype, lhs);
6051 		  ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6052 		  type = rntype;
6053 		}
6054 	      else if (lnbitsize < rnbitsize)
6055 		{
6056 		  rhs = fold_convert_loc (loc, lntype, rhs);
6057 		  lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6058 		  type = lntype;
6059 		}
6060 	    }
6061 
6062 	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6063 	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6064 
6065 	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6066 	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6067 
6068 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6069 	}
6070 
6071       return 0;
6072     }
6073 
6074   /* Handle the case of comparisons with constants.  If there is something in
6075      common between the masks, those bits of the constants must be the same.
6076      If not, the condition is always false.  Test for this to avoid generating
6077      incorrect code below.  */
6078   result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6079   if (! integer_zerop (result)
6080       && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6081 			   const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6082     {
6083       if (wanted_code == NE_EXPR)
6084 	{
6085 	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
6086 	  return constant_boolean_node (true, truth_type);
6087 	}
6088       else
6089 	{
6090 	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6091 	  return constant_boolean_node (false, truth_type);
6092 	}
6093     }
6094 
6095   if (lnbitpos < 0)
6096     return 0;
6097 
6098   /* Construct the expression we will return.  First get the component
6099      reference we will make.  Unless the mask is all ones the width of
6100      that field, perform the mask operation.  Then compare with the
6101      merged constant.  */
6102   result = make_bit_field_ref (loc, ll_inner, ll_arg,
6103 			       lntype, lnbitsize, lnbitpos,
6104 			       ll_unsignedp || rl_unsignedp, ll_reversep);
6105 
6106   ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6107   if (! all_ones_mask_p (ll_mask, lnbitsize))
6108     result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6109 
6110   return build2_loc (loc, wanted_code, truth_type, result,
6111 		     const_binop (BIT_IOR_EXPR, l_const, r_const));
6112 }
6113 
6114 /* T is an integer expression that is being multiplied, divided, or taken a
6115    modulus (CODE says which and what kind of divide or modulus) by a
6116    constant C.  See if we can eliminate that operation by folding it with
6117    other operations already in T.  WIDE_TYPE, if non-null, is a type that
6118    should be used for the computation if wider than our type.
6119 
6120    For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6121    (X * 2) + (Y * 4).  We must, however, be assured that either the original
6122    expression would not overflow or that overflow is undefined for the type
6123    in the language in question.
6124 
6125    If we return a non-null expression, it is an equivalent form of the
6126    original computation, but need not be in the original type.
6127 
6128    We set *STRICT_OVERFLOW_P to true if the return values depends on
6129    signed overflow being undefined.  Otherwise we do not change
6130    *STRICT_OVERFLOW_P.  */
6131 
6132 static tree
6133 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6134 		bool *strict_overflow_p)
6135 {
6136   /* To avoid exponential search depth, refuse to allow recursion past
6137      three levels.  Beyond that (1) it's highly unlikely that we'll find
6138      something interesting and (2) we've probably processed it before
6139      when we built the inner expression.  */
6140 
6141   static int depth;
6142   tree ret;
6143 
6144   if (depth > 3)
6145     return NULL;
6146 
6147   depth++;
6148   ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6149   depth--;
6150 
6151   return ret;
6152 }
6153 
6154 static tree
6155 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6156 		  bool *strict_overflow_p)
6157 {
6158   tree type = TREE_TYPE (t);
6159   enum tree_code tcode = TREE_CODE (t);
6160   tree ctype = (wide_type != 0
6161 		&& (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6162 		    > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6163 		? wide_type : type);
6164   tree t1, t2;
6165   int same_p = tcode == code;
6166   tree op0 = NULL_TREE, op1 = NULL_TREE;
6167   bool sub_strict_overflow_p;
6168 
6169   /* Don't deal with constants of zero here; they confuse the code below.  */
6170   if (integer_zerop (c))
6171     return NULL_TREE;
6172 
6173   if (TREE_CODE_CLASS (tcode) == tcc_unary)
6174     op0 = TREE_OPERAND (t, 0);
6175 
6176   if (TREE_CODE_CLASS (tcode) == tcc_binary)
6177     op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6178 
6179   /* Note that we need not handle conditional operations here since fold
6180      already handles those cases.  So just do arithmetic here.  */
6181   switch (tcode)
6182     {
6183     case INTEGER_CST:
6184       /* For a constant, we can always simplify if we are a multiply
6185 	 or (for divide and modulus) if it is a multiple of our constant.  */
6186       if (code == MULT_EXPR
6187 	  || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6188 				TYPE_SIGN (type)))
6189 	{
6190 	  tree tem = const_binop (code, fold_convert (ctype, t),
6191 				  fold_convert (ctype, c));
6192 	  /* If the multiplication overflowed, we lost information on it.
6193 	     See PR68142 and PR69845.  */
6194 	  if (TREE_OVERFLOW (tem))
6195 	    return NULL_TREE;
6196 	  return tem;
6197 	}
6198       break;
6199 
6200     CASE_CONVERT: case NON_LVALUE_EXPR:
6201       /* If op0 is an expression ...  */
6202       if ((COMPARISON_CLASS_P (op0)
6203 	   || UNARY_CLASS_P (op0)
6204 	   || BINARY_CLASS_P (op0)
6205 	   || VL_EXP_CLASS_P (op0)
6206 	   || EXPRESSION_CLASS_P (op0))
6207 	  /* ... and has wrapping overflow, and its type is smaller
6208 	     than ctype, then we cannot pass through as widening.  */
6209 	  && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6210 		&& TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6211 	       && (TYPE_PRECISION (ctype)
6212 	           > TYPE_PRECISION (TREE_TYPE (op0))))
6213 	      /* ... or this is a truncation (t is narrower than op0),
6214 		 then we cannot pass through this narrowing.  */
6215 	      || (TYPE_PRECISION (type)
6216 		  < TYPE_PRECISION (TREE_TYPE (op0)))
6217 	      /* ... or signedness changes for division or modulus,
6218 		 then we cannot pass through this conversion.  */
6219 	      || (code != MULT_EXPR
6220 		  && (TYPE_UNSIGNED (ctype)
6221 		      != TYPE_UNSIGNED (TREE_TYPE (op0))))
6222 	      /* ... or has undefined overflow while the converted to
6223 		 type has not, we cannot do the operation in the inner type
6224 		 as that would introduce undefined overflow.  */
6225 	      || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6226 		   && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6227 		  && !TYPE_OVERFLOW_UNDEFINED (type))))
6228 	break;
6229 
6230       /* Pass the constant down and see if we can make a simplification.  If
6231 	 we can, replace this expression with the inner simplification for
6232 	 possible later conversion to our or some other type.  */
6233       if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6234 	  && TREE_CODE (t2) == INTEGER_CST
6235 	  && !TREE_OVERFLOW (t2)
6236 	  && (t1 = extract_muldiv (op0, t2, code,
6237 				   code == MULT_EXPR ? ctype : NULL_TREE,
6238 				   strict_overflow_p)) != 0)
6239 	return t1;
6240       break;
6241 
6242     case ABS_EXPR:
6243       /* If widening the type changes it from signed to unsigned, then we
6244          must avoid building ABS_EXPR itself as unsigned.  */
6245       if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6246         {
6247           tree cstype = (*signed_type_for) (ctype);
6248           if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6249 	      != 0)
6250             {
6251               t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6252               return fold_convert (ctype, t1);
6253             }
6254           break;
6255         }
6256       /* If the constant is negative, we cannot simplify this.  */
6257       if (tree_int_cst_sgn (c) == -1)
6258         break;
6259       /* FALLTHROUGH */
6260     case NEGATE_EXPR:
6261       /* For division and modulus, type can't be unsigned, as e.g.
6262 	 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6263 	 For signed types, even with wrapping overflow, this is fine.  */
6264       if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6265 	break;
6266       if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6267 	  != 0)
6268 	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6269       break;
6270 
6271     case MIN_EXPR:  case MAX_EXPR:
6272       /* If widening the type changes the signedness, then we can't perform
6273 	 this optimization as that changes the result.  */
6274       if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6275 	break;
6276 
6277       /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
6278       sub_strict_overflow_p = false;
6279       if ((t1 = extract_muldiv (op0, c, code, wide_type,
6280 				&sub_strict_overflow_p)) != 0
6281 	  && (t2 = extract_muldiv (op1, c, code, wide_type,
6282 				   &sub_strict_overflow_p)) != 0)
6283 	{
6284 	  if (tree_int_cst_sgn (c) < 0)
6285 	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6286 	  if (sub_strict_overflow_p)
6287 	    *strict_overflow_p = true;
6288 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6289 			      fold_convert (ctype, t2));
6290 	}
6291       break;
6292 
6293     case LSHIFT_EXPR:  case RSHIFT_EXPR:
6294       /* If the second operand is constant, this is a multiplication
6295 	 or floor division, by a power of two, so we can treat it that
6296 	 way unless the multiplier or divisor overflows.  Signed
6297 	 left-shift overflow is implementation-defined rather than
6298 	 undefined in C90, so do not convert signed left shift into
6299 	 multiplication.  */
6300       if (TREE_CODE (op1) == INTEGER_CST
6301 	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6302 	  /* const_binop may not detect overflow correctly,
6303 	     so check for it explicitly here.  */
6304 	  && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6305 			wi::to_wide (op1))
6306 	  && (t1 = fold_convert (ctype,
6307 				 const_binop (LSHIFT_EXPR, size_one_node,
6308 					      op1))) != 0
6309 	  && !TREE_OVERFLOW (t1))
6310 	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6311 				       ? MULT_EXPR : FLOOR_DIV_EXPR,
6312 				       ctype,
6313 				       fold_convert (ctype, op0),
6314 				       t1),
6315 			       c, code, wide_type, strict_overflow_p);
6316       break;
6317 
6318     case PLUS_EXPR:  case MINUS_EXPR:
6319       /* See if we can eliminate the operation on both sides.  If we can, we
6320 	 can return a new PLUS or MINUS.  If we can't, the only remaining
6321 	 cases where we can do anything are if the second operand is a
6322 	 constant.  */
6323       sub_strict_overflow_p = false;
6324       t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6325       t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6326       if (t1 != 0 && t2 != 0
6327 	  && TYPE_OVERFLOW_WRAPS (ctype)
6328 	  && (code == MULT_EXPR
6329 	      /* If not multiplication, we can only do this if both operands
6330 		 are divisible by c.  */
6331 	      || (multiple_of_p (ctype, op0, c)
6332 	          && multiple_of_p (ctype, op1, c))))
6333 	{
6334 	  if (sub_strict_overflow_p)
6335 	    *strict_overflow_p = true;
6336 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6337 			      fold_convert (ctype, t2));
6338 	}
6339 
6340       /* If this was a subtraction, negate OP1 and set it to be an addition.
6341 	 This simplifies the logic below.  */
6342       if (tcode == MINUS_EXPR)
6343 	{
6344 	  tcode = PLUS_EXPR, op1 = negate_expr (op1);
6345 	  /* If OP1 was not easily negatable, the constant may be OP0.  */
6346 	  if (TREE_CODE (op0) == INTEGER_CST)
6347 	    {
6348 	      std::swap (op0, op1);
6349 	      std::swap (t1, t2);
6350 	    }
6351 	}
6352 
6353       if (TREE_CODE (op1) != INTEGER_CST)
6354 	break;
6355 
6356       /* If either OP1 or C are negative, this optimization is not safe for
6357 	 some of the division and remainder types while for others we need
6358 	 to change the code.  */
6359       if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6360 	{
6361 	  if (code == CEIL_DIV_EXPR)
6362 	    code = FLOOR_DIV_EXPR;
6363 	  else if (code == FLOOR_DIV_EXPR)
6364 	    code = CEIL_DIV_EXPR;
6365 	  else if (code != MULT_EXPR
6366 		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6367 	    break;
6368 	}
6369 
6370       /* If it's a multiply or a division/modulus operation of a multiple
6371          of our constant, do the operation and verify it doesn't overflow.  */
6372       if (code == MULT_EXPR
6373 	  || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6374 				TYPE_SIGN (type)))
6375 	{
6376 	  op1 = const_binop (code, fold_convert (ctype, op1),
6377 			     fold_convert (ctype, c));
6378 	  /* We allow the constant to overflow with wrapping semantics.  */
6379 	  if (op1 == 0
6380 	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6381 	    break;
6382 	}
6383       else
6384 	break;
6385 
6386       /* If we have an unsigned type, we cannot widen the operation since it
6387 	 will change the result if the original computation overflowed.  */
6388       if (TYPE_UNSIGNED (ctype) && ctype != type)
6389 	break;
6390 
6391       /* The last case is if we are a multiply.  In that case, we can
6392 	 apply the distributive law to commute the multiply and addition
6393 	 if the multiplication of the constants doesn't overflow
6394 	 and overflow is defined.  With undefined overflow
6395 	 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.  */
6396       if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6397 	return fold_build2 (tcode, ctype,
6398 			    fold_build2 (code, ctype,
6399 					 fold_convert (ctype, op0),
6400 					 fold_convert (ctype, c)),
6401 			    op1);
6402 
6403       break;
6404 
6405     case MULT_EXPR:
6406       /* We have a special case here if we are doing something like
6407 	 (C * 8) % 4 since we know that's zero.  */
6408       if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6409 	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6410 	  /* If the multiplication can overflow we cannot optimize this.  */
6411 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6412 	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6413 	  && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6414 				TYPE_SIGN (type)))
6415 	{
6416 	  *strict_overflow_p = true;
6417 	  return omit_one_operand (type, integer_zero_node, op0);
6418 	}
6419 
6420       /* ... fall through ...  */
6421 
6422     case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
6423     case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
6424       /* If we can extract our operation from the LHS, do so and return a
6425 	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
6426 	 do something only if the second operand is a constant.  */
6427       if (same_p
6428 	  && TYPE_OVERFLOW_WRAPS (ctype)
6429 	  && (t1 = extract_muldiv (op0, c, code, wide_type,
6430 				   strict_overflow_p)) != 0)
6431 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6432 			    fold_convert (ctype, op1));
6433       else if (tcode == MULT_EXPR && code == MULT_EXPR
6434 	       && TYPE_OVERFLOW_WRAPS (ctype)
6435 	       && (t1 = extract_muldiv (op1, c, code, wide_type,
6436 					strict_overflow_p)) != 0)
6437 	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6438 			    fold_convert (ctype, t1));
6439       else if (TREE_CODE (op1) != INTEGER_CST)
6440 	return 0;
6441 
6442       /* If these are the same operation types, we can associate them
6443 	 assuming no overflow.  */
6444       if (tcode == code)
6445 	{
6446 	  bool overflow_p = false;
6447 	  bool overflow_mul_p;
6448 	  signop sign = TYPE_SIGN (ctype);
6449 	  unsigned prec = TYPE_PRECISION (ctype);
6450 	  wide_int mul = wi::mul (wi::to_wide (op1, prec),
6451 				  wi::to_wide (c, prec),
6452 				  sign, &overflow_mul_p);
6453 	  overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6454 	  if (overflow_mul_p
6455 	      && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6456 	    overflow_p = true;
6457 	  if (!overflow_p)
6458 	    return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6459 				wide_int_to_tree (ctype, mul));
6460 	}
6461 
6462       /* If these operations "cancel" each other, we have the main
6463 	 optimizations of this pass, which occur when either constant is a
6464 	 multiple of the other, in which case we replace this with either an
6465 	 operation or CODE or TCODE.
6466 
6467 	 If we have an unsigned type, we cannot do this since it will change
6468 	 the result if the original computation overflowed.  */
6469       if (TYPE_OVERFLOW_UNDEFINED (ctype)
6470 	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6471 	      || (tcode == MULT_EXPR
6472 		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6473 		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6474 		  && code != MULT_EXPR)))
6475 	{
6476 	  if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6477 				 TYPE_SIGN (type)))
6478 	    {
6479 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6480 		*strict_overflow_p = true;
6481 	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6482 				  fold_convert (ctype,
6483 						const_binop (TRUNC_DIV_EXPR,
6484 							     op1, c)));
6485 	    }
6486 	  else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6487 				      TYPE_SIGN (type)))
6488 	    {
6489 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6490 		*strict_overflow_p = true;
6491 	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
6492 				  fold_convert (ctype,
6493 						const_binop (TRUNC_DIV_EXPR,
6494 							     c, op1)));
6495 	    }
6496 	}
6497       break;
6498 
6499     default:
6500       break;
6501     }
6502 
6503   return 0;
6504 }
6505 
6506 /* Return a node which has the indicated constant VALUE (either 0 or
6507    1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6508    and is of the indicated TYPE.  */
6509 
6510 tree
6511 constant_boolean_node (bool value, tree type)
6512 {
6513   if (type == integer_type_node)
6514     return value ? integer_one_node : integer_zero_node;
6515   else if (type == boolean_type_node)
6516     return value ? boolean_true_node : boolean_false_node;
6517   else if (TREE_CODE (type) == VECTOR_TYPE)
6518     return build_vector_from_val (type,
6519 				  build_int_cst (TREE_TYPE (type),
6520 						 value ? -1 : 0));
6521   else
6522     return fold_convert (type, value ? integer_one_node : integer_zero_node);
6523 }
6524 
6525 
6526 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6527    Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
6528    CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6529    expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
6530    COND is the first argument to CODE; otherwise (as in the example
6531    given here), it is the second argument.  TYPE is the type of the
6532    original expression.  Return NULL_TREE if no simplification is
6533    possible.  */
6534 
6535 static tree
6536 fold_binary_op_with_conditional_arg (location_t loc,
6537 				     enum tree_code code,
6538 				     tree type, tree op0, tree op1,
6539 				     tree cond, tree arg, int cond_first_p)
6540 {
6541   tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6542   tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6543   tree test, true_value, false_value;
6544   tree lhs = NULL_TREE;
6545   tree rhs = NULL_TREE;
6546   enum tree_code cond_code = COND_EXPR;
6547 
6548   if (TREE_CODE (cond) == COND_EXPR
6549       || TREE_CODE (cond) == VEC_COND_EXPR)
6550     {
6551       test = TREE_OPERAND (cond, 0);
6552       true_value = TREE_OPERAND (cond, 1);
6553       false_value = TREE_OPERAND (cond, 2);
6554       /* If this operand throws an expression, then it does not make
6555 	 sense to try to perform a logical or arithmetic operation
6556 	 involving it.  */
6557       if (VOID_TYPE_P (TREE_TYPE (true_value)))
6558 	lhs = true_value;
6559       if (VOID_TYPE_P (TREE_TYPE (false_value)))
6560 	rhs = false_value;
6561     }
6562   else if (!(TREE_CODE (type) != VECTOR_TYPE
6563 	     && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6564     {
6565       tree testtype = TREE_TYPE (cond);
6566       test = cond;
6567       true_value = constant_boolean_node (true, testtype);
6568       false_value = constant_boolean_node (false, testtype);
6569     }
6570   else
6571     /* Detect the case of mixing vector and scalar types - bail out.  */
6572     return NULL_TREE;
6573 
6574   if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6575     cond_code = VEC_COND_EXPR;
6576 
6577   /* This transformation is only worthwhile if we don't have to wrap ARG
6578      in a SAVE_EXPR and the operation can be simplified without recursing
6579      on at least one of the branches once its pushed inside the COND_EXPR.  */
6580   if (!TREE_CONSTANT (arg)
6581       && (TREE_SIDE_EFFECTS (arg)
6582 	  || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6583 	  || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6584     return NULL_TREE;
6585 
6586   arg = fold_convert_loc (loc, arg_type, arg);
6587   if (lhs == 0)
6588     {
6589       true_value = fold_convert_loc (loc, cond_type, true_value);
6590       if (cond_first_p)
6591 	lhs = fold_build2_loc (loc, code, type, true_value, arg);
6592       else
6593 	lhs = fold_build2_loc (loc, code, type, arg, true_value);
6594     }
6595   if (rhs == 0)
6596     {
6597       false_value = fold_convert_loc (loc, cond_type, false_value);
6598       if (cond_first_p)
6599 	rhs = fold_build2_loc (loc, code, type, false_value, arg);
6600       else
6601 	rhs = fold_build2_loc (loc, code, type, arg, false_value);
6602     }
6603 
6604   /* Check that we have simplified at least one of the branches.  */
6605   if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6606     return NULL_TREE;
6607 
6608   return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6609 }
6610 
6611 
6612 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6613 
6614    If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6615    TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
6616    ADDEND is the same as X.
6617 
6618    X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6619    and finite.  The problematic cases are when X is zero, and its mode
6620    has signed zeros.  In the case of rounding towards -infinity,
6621    X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
6622    modes, X + 0 is not the same as X because -0 + 0 is 0.  */
6623 
6624 bool
6625 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6626 {
6627   if (!real_zerop (addend))
6628     return false;
6629 
6630   /* Don't allow the fold with -fsignaling-nans.  */
6631   if (HONOR_SNANS (element_mode (type)))
6632     return false;
6633 
6634   /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
6635   if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6636     return true;
6637 
6638   /* In a vector or complex, we would need to check the sign of all zeros.  */
6639   if (TREE_CODE (addend) != REAL_CST)
6640     return false;
6641 
6642   /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
6643   if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6644     negate = !negate;
6645 
6646   /* The mode has signed zeros, and we have to honor their sign.
6647      In this situation, there is only one case we can return true for.
6648      X - 0 is the same as X unless rounding towards -infinity is
6649      supported.  */
6650   return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6651 }
6652 
6653 /* Subroutine of match.pd that optimizes comparisons of a division by
6654    a nonzero integer constant against an integer constant, i.e.
6655    X/C1 op C2.
6656 
6657    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6658    GE_EXPR or LE_EXPR.  ARG01 and ARG1 must be a INTEGER_CST.  */
6659 
6660 enum tree_code
6661 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
6662 		  tree *hi, bool *neg_overflow)
6663 {
6664   tree prod, tmp, type = TREE_TYPE (c1);
6665   signop sign = TYPE_SIGN (type);
6666   bool overflow;
6667 
6668   /* We have to do this the hard way to detect unsigned overflow.
6669      prod = int_const_binop (MULT_EXPR, c1, c2);  */
6670   wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
6671   prod = force_fit_type (type, val, -1, overflow);
6672   *neg_overflow = false;
6673 
6674   if (sign == UNSIGNED)
6675     {
6676       tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6677       *lo = prod;
6678 
6679       /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp).  */
6680       val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
6681       *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
6682     }
6683   else if (tree_int_cst_sgn (c1) >= 0)
6684     {
6685       tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
6686       switch (tree_int_cst_sgn (c2))
6687 	{
6688 	case -1:
6689 	  *neg_overflow = true;
6690 	  *lo = int_const_binop (MINUS_EXPR, prod, tmp);
6691 	  *hi = prod;
6692 	  break;
6693 
6694 	case 0:
6695 	  *lo = fold_negate_const (tmp, type);
6696 	  *hi = tmp;
6697 	  break;
6698 
6699 	case 1:
6700 	  *hi = int_const_binop (PLUS_EXPR, prod, tmp);
6701 	  *lo = prod;
6702 	  break;
6703 
6704 	default:
6705 	  gcc_unreachable ();
6706 	}
6707     }
6708   else
6709     {
6710       /* A negative divisor reverses the relational operators.  */
6711       code = swap_tree_comparison (code);
6712 
6713       tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
6714       switch (tree_int_cst_sgn (c2))
6715 	{
6716 	case -1:
6717 	  *hi = int_const_binop (MINUS_EXPR, prod, tmp);
6718 	  *lo = prod;
6719 	  break;
6720 
6721 	case 0:
6722 	  *hi = fold_negate_const (tmp, type);
6723 	  *lo = tmp;
6724 	  break;
6725 
6726 	case 1:
6727 	  *neg_overflow = true;
6728 	  *lo = int_const_binop (PLUS_EXPR, prod, tmp);
6729 	  *hi = prod;
6730 	  break;
6731 
6732 	default:
6733 	  gcc_unreachable ();
6734 	}
6735     }
6736 
6737   if (code != EQ_EXPR && code != NE_EXPR)
6738     return code;
6739 
6740   if (TREE_OVERFLOW (*lo)
6741       || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
6742     *lo = NULL_TREE;
6743   if (TREE_OVERFLOW (*hi)
6744       || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
6745     *hi = NULL_TREE;
6746 
6747   return code;
6748 }
6749 
6750 
6751 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6752    equality/inequality test, then return a simplified form of the test
6753    using a sign testing.  Otherwise return NULL.  TYPE is the desired
6754    result type.  */
6755 
6756 static tree
6757 fold_single_bit_test_into_sign_test (location_t loc,
6758 				     enum tree_code code, tree arg0, tree arg1,
6759 				     tree result_type)
6760 {
6761   /* If this is testing a single bit, we can optimize the test.  */
6762   if ((code == NE_EXPR || code == EQ_EXPR)
6763       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6764       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6765     {
6766       /* If we have (A & C) != 0 where C is the sign bit of A, convert
6767 	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
6768       tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6769 
6770       if (arg00 != NULL_TREE
6771 	  /* This is only a win if casting to a signed type is cheap,
6772 	     i.e. when arg00's type is not a partial mode.  */
6773 	  && type_has_mode_precision_p (TREE_TYPE (arg00)))
6774 	{
6775 	  tree stype = signed_type_for (TREE_TYPE (arg00));
6776 	  return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6777 			      result_type,
6778 			      fold_convert_loc (loc, stype, arg00),
6779 			      build_int_cst (stype, 0));
6780 	}
6781     }
6782 
6783   return NULL_TREE;
6784 }
6785 
6786 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6787    equality/inequality test, then return a simplified form of
6788    the test using shifts and logical operations.  Otherwise return
6789    NULL.  TYPE is the desired result type.  */
6790 
6791 tree
6792 fold_single_bit_test (location_t loc, enum tree_code code,
6793 		      tree arg0, tree arg1, tree result_type)
6794 {
6795   /* If this is testing a single bit, we can optimize the test.  */
6796   if ((code == NE_EXPR || code == EQ_EXPR)
6797       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6798       && integer_pow2p (TREE_OPERAND (arg0, 1)))
6799     {
6800       tree inner = TREE_OPERAND (arg0, 0);
6801       tree type = TREE_TYPE (arg0);
6802       int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6803       scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
6804       int ops_unsigned;
6805       tree signed_type, unsigned_type, intermediate_type;
6806       tree tem, one;
6807 
6808       /* First, see if we can fold the single bit test into a sign-bit
6809 	 test.  */
6810       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6811 						 result_type);
6812       if (tem)
6813 	return tem;
6814 
6815       /* Otherwise we have (A & C) != 0 where C is a single bit,
6816 	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
6817 	 Similarly for (A & C) == 0.  */
6818 
6819       /* If INNER is a right shift of a constant and it plus BITNUM does
6820 	 not overflow, adjust BITNUM and INNER.  */
6821       if (TREE_CODE (inner) == RSHIFT_EXPR
6822 	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6823 	  && bitnum < TYPE_PRECISION (type)
6824 	  && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
6825 			TYPE_PRECISION (type) - bitnum))
6826 	{
6827 	  bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6828 	  inner = TREE_OPERAND (inner, 0);
6829 	}
6830 
6831       /* If we are going to be able to omit the AND below, we must do our
6832 	 operations as unsigned.  If we must use the AND, we have a choice.
6833 	 Normally unsigned is faster, but for some machines signed is.  */
6834       ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
6835 		      && !flag_syntax_only) ? 0 : 1;
6836 
6837       signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6838       unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6839       intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6840       inner = fold_convert_loc (loc, intermediate_type, inner);
6841 
6842       if (bitnum != 0)
6843 	inner = build2 (RSHIFT_EXPR, intermediate_type,
6844 			inner, size_int (bitnum));
6845 
6846       one = build_int_cst (intermediate_type, 1);
6847 
6848       if (code == EQ_EXPR)
6849 	inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6850 
6851       /* Put the AND last so it can combine with more things.  */
6852       inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6853 
6854       /* Make sure to return the proper type.  */
6855       inner = fold_convert_loc (loc, result_type, inner);
6856 
6857       return inner;
6858     }
6859   return NULL_TREE;
6860 }
6861 
6862 /* Test whether it is preferable two swap two operands, ARG0 and
6863    ARG1, for example because ARG0 is an integer constant and ARG1
6864    isn't.  */
6865 
6866 bool
6867 tree_swap_operands_p (const_tree arg0, const_tree arg1)
6868 {
6869   if (CONSTANT_CLASS_P (arg1))
6870     return 0;
6871   if (CONSTANT_CLASS_P (arg0))
6872     return 1;
6873 
6874   STRIP_NOPS (arg0);
6875   STRIP_NOPS (arg1);
6876 
6877   if (TREE_CONSTANT (arg1))
6878     return 0;
6879   if (TREE_CONSTANT (arg0))
6880     return 1;
6881 
6882   /* It is preferable to swap two SSA_NAME to ensure a canonical form
6883      for commutative and comparison operators.  Ensuring a canonical
6884      form allows the optimizers to find additional redundancies without
6885      having to explicitly check for both orderings.  */
6886   if (TREE_CODE (arg0) == SSA_NAME
6887       && TREE_CODE (arg1) == SSA_NAME
6888       && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6889     return 1;
6890 
6891   /* Put SSA_NAMEs last.  */
6892   if (TREE_CODE (arg1) == SSA_NAME)
6893     return 0;
6894   if (TREE_CODE (arg0) == SSA_NAME)
6895     return 1;
6896 
6897   /* Put variables last.  */
6898   if (DECL_P (arg1))
6899     return 0;
6900   if (DECL_P (arg0))
6901     return 1;
6902 
6903   return 0;
6904 }
6905 
6906 
6907 /* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
6908    means A >= Y && A != MAX, but in this case we know that
6909    A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
6910 
6911 static tree
6912 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6913 {
6914   tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6915 
6916   if (TREE_CODE (bound) == LT_EXPR)
6917     a = TREE_OPERAND (bound, 0);
6918   else if (TREE_CODE (bound) == GT_EXPR)
6919     a = TREE_OPERAND (bound, 1);
6920   else
6921     return NULL_TREE;
6922 
6923   typea = TREE_TYPE (a);
6924   if (!INTEGRAL_TYPE_P (typea)
6925       && !POINTER_TYPE_P (typea))
6926     return NULL_TREE;
6927 
6928   if (TREE_CODE (ineq) == LT_EXPR)
6929     {
6930       a1 = TREE_OPERAND (ineq, 1);
6931       y = TREE_OPERAND (ineq, 0);
6932     }
6933   else if (TREE_CODE (ineq) == GT_EXPR)
6934     {
6935       a1 = TREE_OPERAND (ineq, 0);
6936       y = TREE_OPERAND (ineq, 1);
6937     }
6938   else
6939     return NULL_TREE;
6940 
6941   if (TREE_TYPE (a1) != typea)
6942     return NULL_TREE;
6943 
6944   if (POINTER_TYPE_P (typea))
6945     {
6946       /* Convert the pointer types into integer before taking the difference.  */
6947       tree ta = fold_convert_loc (loc, ssizetype, a);
6948       tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6949       diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6950     }
6951   else
6952     diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6953 
6954   if (!diff || !integer_onep (diff))
6955    return NULL_TREE;
6956 
6957   return fold_build2_loc (loc, GE_EXPR, type, a, y);
6958 }
6959 
6960 /* Fold a sum or difference of at least one multiplication.
6961    Returns the folded tree or NULL if no simplification could be made.  */
6962 
6963 static tree
6964 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6965 			  tree arg0, tree arg1)
6966 {
6967   tree arg00, arg01, arg10, arg11;
6968   tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6969 
6970   /* (A * C) +- (B * C) -> (A+-B) * C.
6971      (A * C) +- A -> A * (C+-1).
6972      We are most concerned about the case where C is a constant,
6973      but other combinations show up during loop reduction.  Since
6974      it is not difficult, try all four possibilities.  */
6975 
6976   if (TREE_CODE (arg0) == MULT_EXPR)
6977     {
6978       arg00 = TREE_OPERAND (arg0, 0);
6979       arg01 = TREE_OPERAND (arg0, 1);
6980     }
6981   else if (TREE_CODE (arg0) == INTEGER_CST)
6982     {
6983       arg00 = build_one_cst (type);
6984       arg01 = arg0;
6985     }
6986   else
6987     {
6988       /* We cannot generate constant 1 for fract.  */
6989       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6990 	return NULL_TREE;
6991       arg00 = arg0;
6992       arg01 = build_one_cst (type);
6993     }
6994   if (TREE_CODE (arg1) == MULT_EXPR)
6995     {
6996       arg10 = TREE_OPERAND (arg1, 0);
6997       arg11 = TREE_OPERAND (arg1, 1);
6998     }
6999   else if (TREE_CODE (arg1) == INTEGER_CST)
7000     {
7001       arg10 = build_one_cst (type);
7002       /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7003 	 the purpose of this canonicalization.  */
7004       if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7005 	  && negate_expr_p (arg1)
7006 	  && code == PLUS_EXPR)
7007 	{
7008 	  arg11 = negate_expr (arg1);
7009 	  code = MINUS_EXPR;
7010 	}
7011       else
7012 	arg11 = arg1;
7013     }
7014   else
7015     {
7016       /* We cannot generate constant 1 for fract.  */
7017       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7018 	return NULL_TREE;
7019       arg10 = arg1;
7020       arg11 = build_one_cst (type);
7021     }
7022   same = NULL_TREE;
7023 
7024   /* Prefer factoring a common non-constant.  */
7025   if (operand_equal_p (arg00, arg10, 0))
7026     same = arg00, alt0 = arg01, alt1 = arg11;
7027   else if (operand_equal_p (arg01, arg11, 0))
7028     same = arg01, alt0 = arg00, alt1 = arg10;
7029   else if (operand_equal_p (arg00, arg11, 0))
7030     same = arg00, alt0 = arg01, alt1 = arg10;
7031   else if (operand_equal_p (arg01, arg10, 0))
7032     same = arg01, alt0 = arg00, alt1 = arg11;
7033 
7034   /* No identical multiplicands; see if we can find a common
7035      power-of-two factor in non-power-of-two multiplies.  This
7036      can help in multi-dimensional array access.  */
7037   else if (tree_fits_shwi_p (arg01)
7038 	   && tree_fits_shwi_p (arg11))
7039     {
7040       HOST_WIDE_INT int01, int11, tmp;
7041       bool swap = false;
7042       tree maybe_same;
7043       int01 = tree_to_shwi (arg01);
7044       int11 = tree_to_shwi (arg11);
7045 
7046       /* Move min of absolute values to int11.  */
7047       if (absu_hwi (int01) < absu_hwi (int11))
7048         {
7049 	  tmp = int01, int01 = int11, int11 = tmp;
7050 	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
7051 	  maybe_same = arg01;
7052 	  swap = true;
7053 	}
7054       else
7055 	maybe_same = arg11;
7056 
7057       if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7058 	  /* The remainder should not be a constant, otherwise we
7059 	     end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7060 	     increased the number of multiplications necessary.  */
7061 	  && TREE_CODE (arg10) != INTEGER_CST)
7062         {
7063 	  alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7064 			      build_int_cst (TREE_TYPE (arg00),
7065 					     int01 / int11));
7066 	  alt1 = arg10;
7067 	  same = maybe_same;
7068 	  if (swap)
7069 	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7070 	}
7071     }
7072 
7073   if (!same)
7074     return NULL_TREE;
7075 
7076   if (! INTEGRAL_TYPE_P (type)
7077       || TYPE_OVERFLOW_WRAPS (type)
7078       /* We are neither factoring zero nor minus one.  */
7079       || TREE_CODE (same) == INTEGER_CST)
7080     return fold_build2_loc (loc, MULT_EXPR, type,
7081 			fold_build2_loc (loc, code, type,
7082 				     fold_convert_loc (loc, type, alt0),
7083 				     fold_convert_loc (loc, type, alt1)),
7084 			fold_convert_loc (loc, type, same));
7085 
7086   /* Same may be zero and thus the operation 'code' may overflow.  Likewise
7087      same may be minus one and thus the multiplication may overflow.  Perform
7088      the sum operation in an unsigned type.  */
7089   tree utype = unsigned_type_for (type);
7090   tree tem = fold_build2_loc (loc, code, utype,
7091 			      fold_convert_loc (loc, utype, alt0),
7092 			      fold_convert_loc (loc, utype, alt1));
7093   /* If the sum evaluated to a constant that is not -INF the multiplication
7094      cannot overflow.  */
7095   if (TREE_CODE (tem) == INTEGER_CST
7096       && (wi::to_wide (tem)
7097 	  != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7098     return fold_build2_loc (loc, MULT_EXPR, type,
7099 			    fold_convert (type, tem), same);
7100 
7101   /* Do not resort to unsigned multiplication because
7102      we lose the no-overflow property of the expression.  */
7103   return NULL_TREE;
7104 }
7105 
7106 /* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7107    specified by EXPR into the buffer PTR of length LEN bytes.
7108    Return the number of bytes placed in the buffer, or zero
7109    upon failure.  */
7110 
7111 static int
7112 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7113 {
7114   tree type = TREE_TYPE (expr);
7115   int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7116   int byte, offset, word, words;
7117   unsigned char value;
7118 
7119   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7120     return 0;
7121   if (off == -1)
7122     off = 0;
7123 
7124   if (ptr == NULL)
7125     /* Dry run.  */
7126     return MIN (len, total_bytes - off);
7127 
7128   words = total_bytes / UNITS_PER_WORD;
7129 
7130   for (byte = 0; byte < total_bytes; byte++)
7131     {
7132       int bitpos = byte * BITS_PER_UNIT;
7133       /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7134 	 number of bytes.  */
7135       value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7136 
7137       if (total_bytes > UNITS_PER_WORD)
7138 	{
7139 	  word = byte / UNITS_PER_WORD;
7140 	  if (WORDS_BIG_ENDIAN)
7141 	    word = (words - 1) - word;
7142 	  offset = word * UNITS_PER_WORD;
7143 	  if (BYTES_BIG_ENDIAN)
7144 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7145 	  else
7146 	    offset += byte % UNITS_PER_WORD;
7147 	}
7148       else
7149 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7150       if (offset >= off && offset - off < len)
7151 	ptr[offset - off] = value;
7152     }
7153   return MIN (len, total_bytes - off);
7154 }
7155 
7156 
7157 /* Subroutine of native_encode_expr.  Encode the FIXED_CST
7158    specified by EXPR into the buffer PTR of length LEN bytes.
7159    Return the number of bytes placed in the buffer, or zero
7160    upon failure.  */
7161 
7162 static int
7163 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7164 {
7165   tree type = TREE_TYPE (expr);
7166   scalar_mode mode = SCALAR_TYPE_MODE (type);
7167   int total_bytes = GET_MODE_SIZE (mode);
7168   FIXED_VALUE_TYPE value;
7169   tree i_value, i_type;
7170 
7171   if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7172     return 0;
7173 
7174   i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7175 
7176   if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7177     return 0;
7178 
7179   value = TREE_FIXED_CST (expr);
7180   i_value = double_int_to_tree (i_type, value.data);
7181 
7182   return native_encode_int (i_value, ptr, len, off);
7183 }
7184 
7185 
7186 /* Subroutine of native_encode_expr.  Encode the REAL_CST
7187    specified by EXPR into the buffer PTR of length LEN bytes.
7188    Return the number of bytes placed in the buffer, or zero
7189    upon failure.  */
7190 
7191 static int
7192 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7193 {
7194   tree type = TREE_TYPE (expr);
7195   int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7196   int byte, offset, word, words, bitpos;
7197   unsigned char value;
7198 
7199   /* There are always 32 bits in each long, no matter the size of
7200      the hosts long.  We handle floating point representations with
7201      up to 192 bits.  */
7202   long tmp[6];
7203 
7204   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7205     return 0;
7206   if (off == -1)
7207     off = 0;
7208 
7209   if (ptr == NULL)
7210     /* Dry run.  */
7211     return MIN (len, total_bytes - off);
7212 
7213   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7214 
7215   real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7216 
7217   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7218        bitpos += BITS_PER_UNIT)
7219     {
7220       byte = (bitpos / BITS_PER_UNIT) & 3;
7221       value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7222 
7223       if (UNITS_PER_WORD < 4)
7224 	{
7225 	  word = byte / UNITS_PER_WORD;
7226 	  if (WORDS_BIG_ENDIAN)
7227 	    word = (words - 1) - word;
7228 	  offset = word * UNITS_PER_WORD;
7229 	  if (BYTES_BIG_ENDIAN)
7230 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7231 	  else
7232 	    offset += byte % UNITS_PER_WORD;
7233 	}
7234       else
7235 	{
7236 	  offset = byte;
7237 	  if (BYTES_BIG_ENDIAN)
7238 	    {
7239 	      /* Reverse bytes within each long, or within the entire float
7240 		 if it's smaller than a long (for HFmode).  */
7241 	      offset = MIN (3, total_bytes - 1) - offset;
7242 	      gcc_assert (offset >= 0);
7243 	    }
7244 	}
7245       offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7246       if (offset >= off
7247 	  && offset - off < len)
7248 	ptr[offset - off] = value;
7249     }
7250   return MIN (len, total_bytes - off);
7251 }
7252 
7253 /* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7254    specified by EXPR into the buffer PTR of length LEN bytes.
7255    Return the number of bytes placed in the buffer, or zero
7256    upon failure.  */
7257 
7258 static int
7259 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7260 {
7261   int rsize, isize;
7262   tree part;
7263 
7264   part = TREE_REALPART (expr);
7265   rsize = native_encode_expr (part, ptr, len, off);
7266   if (off == -1 && rsize == 0)
7267     return 0;
7268   part = TREE_IMAGPART (expr);
7269   if (off != -1)
7270     off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7271   isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7272 			      len - rsize, off);
7273   if (off == -1 && isize != rsize)
7274     return 0;
7275   return rsize + isize;
7276 }
7277 
7278 
7279 /* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7280    specified by EXPR into the buffer PTR of length LEN bytes.
7281    Return the number of bytes placed in the buffer, or zero
7282    upon failure.  */
7283 
7284 static int
7285 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7286 {
7287   unsigned HOST_WIDE_INT i, count;
7288   int size, offset;
7289   tree itype, elem;
7290 
7291   offset = 0;
7292   if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7293     return 0;
7294   itype = TREE_TYPE (TREE_TYPE (expr));
7295   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7296   for (i = 0; i < count; i++)
7297     {
7298       if (off >= size)
7299 	{
7300 	  off -= size;
7301 	  continue;
7302 	}
7303       elem = VECTOR_CST_ELT (expr, i);
7304       int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7305 				    len - offset, off);
7306       if ((off == -1 && res != size) || res == 0)
7307 	return 0;
7308       offset += res;
7309       if (offset >= len)
7310 	return (off == -1 && i < count - 1) ? 0 : offset;
7311       if (off != -1)
7312 	off = 0;
7313     }
7314   return offset;
7315 }
7316 
7317 
7318 /* Subroutine of native_encode_expr.  Encode the STRING_CST
7319    specified by EXPR into the buffer PTR of length LEN bytes.
7320    Return the number of bytes placed in the buffer, or zero
7321    upon failure.  */
7322 
7323 static int
7324 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7325 {
7326   tree type = TREE_TYPE (expr);
7327 
7328   /* Wide-char strings are encoded in target byte-order so native
7329      encoding them is trivial.  */
7330   if (BITS_PER_UNIT != CHAR_BIT
7331       || TREE_CODE (type) != ARRAY_TYPE
7332       || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7333       || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7334     return 0;
7335 
7336   HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7337   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7338     return 0;
7339   if (off == -1)
7340     off = 0;
7341   if (ptr == NULL)
7342     /* Dry run.  */;
7343   else if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7344     {
7345       int written = 0;
7346       if (off < TREE_STRING_LENGTH (expr))
7347 	{
7348 	  written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7349 	  memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7350 	}
7351       memset (ptr + written, 0,
7352 	      MIN (total_bytes - written, len - written));
7353     }
7354   else
7355     memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7356   return MIN (total_bytes - off, len);
7357 }
7358 
7359 
7360 /* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
7361    REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7362    buffer PTR of length LEN bytes.  If PTR is NULL, don't actually store
7363    anything, just do a dry run.  If OFF is not -1 then start
7364    the encoding at byte offset OFF and encode at most LEN bytes.
7365    Return the number of bytes placed in the buffer, or zero upon failure.  */
7366 
7367 int
7368 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7369 {
7370   /* We don't support starting at negative offset and -1 is special.  */
7371   if (off < -1)
7372     return 0;
7373 
7374   switch (TREE_CODE (expr))
7375     {
7376     case INTEGER_CST:
7377       return native_encode_int (expr, ptr, len, off);
7378 
7379     case REAL_CST:
7380       return native_encode_real (expr, ptr, len, off);
7381 
7382     case FIXED_CST:
7383       return native_encode_fixed (expr, ptr, len, off);
7384 
7385     case COMPLEX_CST:
7386       return native_encode_complex (expr, ptr, len, off);
7387 
7388     case VECTOR_CST:
7389       return native_encode_vector (expr, ptr, len, off);
7390 
7391     case STRING_CST:
7392       return native_encode_string (expr, ptr, len, off);
7393 
7394     default:
7395       return 0;
7396     }
7397 }
7398 
7399 
7400 /* Subroutine of native_interpret_expr.  Interpret the contents of
7401    the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7402    If the buffer cannot be interpreted, return NULL_TREE.  */
7403 
7404 static tree
7405 native_interpret_int (tree type, const unsigned char *ptr, int len)
7406 {
7407   int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7408 
7409   if (total_bytes > len
7410       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7411     return NULL_TREE;
7412 
7413   wide_int result = wi::from_buffer (ptr, total_bytes);
7414 
7415   return wide_int_to_tree (type, result);
7416 }
7417 
7418 
7419 /* Subroutine of native_interpret_expr.  Interpret the contents of
7420    the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7421    If the buffer cannot be interpreted, return NULL_TREE.  */
7422 
7423 static tree
7424 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7425 {
7426   scalar_mode mode = SCALAR_TYPE_MODE (type);
7427   int total_bytes = GET_MODE_SIZE (mode);
7428   double_int result;
7429   FIXED_VALUE_TYPE fixed_value;
7430 
7431   if (total_bytes > len
7432       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7433     return NULL_TREE;
7434 
7435   result = double_int::from_buffer (ptr, total_bytes);
7436   fixed_value = fixed_from_double_int (result, mode);
7437 
7438   return build_fixed (type, fixed_value);
7439 }
7440 
7441 
7442 /* Subroutine of native_interpret_expr.  Interpret the contents of
7443    the buffer PTR of length LEN as a REAL_CST of type TYPE.
7444    If the buffer cannot be interpreted, return NULL_TREE.  */
7445 
7446 static tree
7447 native_interpret_real (tree type, const unsigned char *ptr, int len)
7448 {
7449   scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
7450   int total_bytes = GET_MODE_SIZE (mode);
7451   unsigned char value;
7452   /* There are always 32 bits in each long, no matter the size of
7453      the hosts long.  We handle floating point representations with
7454      up to 192 bits.  */
7455   REAL_VALUE_TYPE r;
7456   long tmp[6];
7457 
7458   if (total_bytes > len || total_bytes > 24)
7459     return NULL_TREE;
7460   int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7461 
7462   memset (tmp, 0, sizeof (tmp));
7463   for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7464        bitpos += BITS_PER_UNIT)
7465     {
7466       /* Both OFFSET and BYTE index within a long;
7467 	 bitpos indexes the whole float.  */
7468       int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7469       if (UNITS_PER_WORD < 4)
7470 	{
7471 	  int word = byte / UNITS_PER_WORD;
7472 	  if (WORDS_BIG_ENDIAN)
7473 	    word = (words - 1) - word;
7474 	  offset = word * UNITS_PER_WORD;
7475 	  if (BYTES_BIG_ENDIAN)
7476 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7477 	  else
7478 	    offset += byte % UNITS_PER_WORD;
7479 	}
7480       else
7481 	{
7482 	  offset = byte;
7483 	  if (BYTES_BIG_ENDIAN)
7484 	    {
7485 	      /* Reverse bytes within each long, or within the entire float
7486 		 if it's smaller than a long (for HFmode).  */
7487 	      offset = MIN (3, total_bytes - 1) - offset;
7488 	      gcc_assert (offset >= 0);
7489 	    }
7490 	}
7491       value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7492 
7493       tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7494     }
7495 
7496   real_from_target (&r, tmp, mode);
7497   return build_real (type, r);
7498 }
7499 
7500 
7501 /* Subroutine of native_interpret_expr.  Interpret the contents of
7502    the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7503    If the buffer cannot be interpreted, return NULL_TREE.  */
7504 
7505 static tree
7506 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7507 {
7508   tree etype, rpart, ipart;
7509   int size;
7510 
7511   etype = TREE_TYPE (type);
7512   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7513   if (size * 2 > len)
7514     return NULL_TREE;
7515   rpart = native_interpret_expr (etype, ptr, size);
7516   if (!rpart)
7517     return NULL_TREE;
7518   ipart = native_interpret_expr (etype, ptr+size, size);
7519   if (!ipart)
7520     return NULL_TREE;
7521   return build_complex (type, rpart, ipart);
7522 }
7523 
7524 
7525 /* Subroutine of native_interpret_expr.  Interpret the contents of
7526    the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7527    If the buffer cannot be interpreted, return NULL_TREE.  */
7528 
7529 static tree
7530 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
7531 {
7532   tree etype, elem;
7533   unsigned int i, size;
7534   unsigned HOST_WIDE_INT count;
7535 
7536   etype = TREE_TYPE (type);
7537   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
7538   if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
7539       || size * count > len)
7540     return NULL_TREE;
7541 
7542   tree_vector_builder elements (type, count, 1);
7543   for (i = 0; i < count; ++i)
7544     {
7545       elem = native_interpret_expr (etype, ptr+(i*size), size);
7546       if (!elem)
7547 	return NULL_TREE;
7548       elements.quick_push (elem);
7549     }
7550   return elements.build ();
7551 }
7552 
7553 
7554 /* Subroutine of fold_view_convert_expr.  Interpret the contents of
7555    the buffer PTR of length LEN as a constant of type TYPE.  For
7556    INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7557    we return a REAL_CST, etc...  If the buffer cannot be interpreted,
7558    return NULL_TREE.  */
7559 
7560 tree
7561 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7562 {
7563   switch (TREE_CODE (type))
7564     {
7565     case INTEGER_TYPE:
7566     case ENUMERAL_TYPE:
7567     case BOOLEAN_TYPE:
7568     case POINTER_TYPE:
7569     case REFERENCE_TYPE:
7570       return native_interpret_int (type, ptr, len);
7571 
7572     case REAL_TYPE:
7573       return native_interpret_real (type, ptr, len);
7574 
7575     case FIXED_POINT_TYPE:
7576       return native_interpret_fixed (type, ptr, len);
7577 
7578     case COMPLEX_TYPE:
7579       return native_interpret_complex (type, ptr, len);
7580 
7581     case VECTOR_TYPE:
7582       return native_interpret_vector (type, ptr, len);
7583 
7584     default:
7585       return NULL_TREE;
7586     }
7587 }
7588 
7589 /* Returns true if we can interpret the contents of a native encoding
7590    as TYPE.  */
7591 
7592 static bool
7593 can_native_interpret_type_p (tree type)
7594 {
7595   switch (TREE_CODE (type))
7596     {
7597     case INTEGER_TYPE:
7598     case ENUMERAL_TYPE:
7599     case BOOLEAN_TYPE:
7600     case POINTER_TYPE:
7601     case REFERENCE_TYPE:
7602     case FIXED_POINT_TYPE:
7603     case REAL_TYPE:
7604     case COMPLEX_TYPE:
7605     case VECTOR_TYPE:
7606       return true;
7607     default:
7608       return false;
7609     }
7610 }
7611 
7612 
7613 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7614    TYPE at compile-time.  If we're unable to perform the conversion
7615    return NULL_TREE.  */
7616 
7617 static tree
7618 fold_view_convert_expr (tree type, tree expr)
7619 {
7620   /* We support up to 512-bit values (for V8DFmode).  */
7621   unsigned char buffer[64];
7622   int len;
7623 
7624   /* Check that the host and target are sane.  */
7625   if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7626     return NULL_TREE;
7627 
7628   len = native_encode_expr (expr, buffer, sizeof (buffer));
7629   if (len == 0)
7630     return NULL_TREE;
7631 
7632   return native_interpret_expr (type, buffer, len);
7633 }
7634 
7635 /* Build an expression for the address of T.  Folds away INDIRECT_REF
7636    to avoid confusing the gimplify process.  */
7637 
7638 tree
7639 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7640 {
7641   /* The size of the object is not relevant when talking about its address.  */
7642   if (TREE_CODE (t) == WITH_SIZE_EXPR)
7643     t = TREE_OPERAND (t, 0);
7644 
7645   if (TREE_CODE (t) == INDIRECT_REF)
7646     {
7647       t = TREE_OPERAND (t, 0);
7648 
7649       if (TREE_TYPE (t) != ptrtype)
7650 	t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7651     }
7652   else if (TREE_CODE (t) == MEM_REF
7653 	   && integer_zerop (TREE_OPERAND (t, 1)))
7654     return TREE_OPERAND (t, 0);
7655   else if (TREE_CODE (t) == MEM_REF
7656 	   && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7657     return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7658 			TREE_OPERAND (t, 0),
7659 			convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7660   else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7661     {
7662       t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7663 
7664       if (TREE_TYPE (t) != ptrtype)
7665 	t = fold_convert_loc (loc, ptrtype, t);
7666     }
7667   else
7668     t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7669 
7670   return t;
7671 }
7672 
7673 /* Build an expression for the address of T.  */
7674 
7675 tree
7676 build_fold_addr_expr_loc (location_t loc, tree t)
7677 {
7678   tree ptrtype = build_pointer_type (TREE_TYPE (t));
7679 
7680   return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7681 }
7682 
7683 /* Fold a unary expression of code CODE and type TYPE with operand
7684    OP0.  Return the folded expression if folding is successful.
7685    Otherwise, return NULL_TREE.  */
7686 
7687 tree
7688 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7689 {
7690   tree tem;
7691   tree arg0;
7692   enum tree_code_class kind = TREE_CODE_CLASS (code);
7693 
7694   gcc_assert (IS_EXPR_CODE_CLASS (kind)
7695 	      && TREE_CODE_LENGTH (code) == 1);
7696 
7697   arg0 = op0;
7698   if (arg0)
7699     {
7700       if (CONVERT_EXPR_CODE_P (code)
7701 	  || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7702 	{
7703 	  /* Don't use STRIP_NOPS, because signedness of argument type
7704 	     matters.  */
7705 	  STRIP_SIGN_NOPS (arg0);
7706 	}
7707       else
7708 	{
7709 	  /* Strip any conversions that don't change the mode.  This
7710 	     is safe for every expression, except for a comparison
7711 	     expression because its signedness is derived from its
7712 	     operands.
7713 
7714 	     Note that this is done as an internal manipulation within
7715 	     the constant folder, in order to find the simplest
7716 	     representation of the arguments so that their form can be
7717 	     studied.  In any cases, the appropriate type conversions
7718 	     should be put back in the tree that will get out of the
7719 	     constant folder.  */
7720 	  STRIP_NOPS (arg0);
7721 	}
7722 
7723       if (CONSTANT_CLASS_P (arg0))
7724 	{
7725 	  tree tem = const_unop (code, type, arg0);
7726 	  if (tem)
7727 	    {
7728 	      if (TREE_TYPE (tem) != type)
7729 		tem = fold_convert_loc (loc, type, tem);
7730 	      return tem;
7731 	    }
7732 	}
7733     }
7734 
7735   tem = generic_simplify (loc, code, type, op0);
7736   if (tem)
7737     return tem;
7738 
7739   if (TREE_CODE_CLASS (code) == tcc_unary)
7740     {
7741       if (TREE_CODE (arg0) == COMPOUND_EXPR)
7742 	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7743 		       fold_build1_loc (loc, code, type,
7744 				    fold_convert_loc (loc, TREE_TYPE (op0),
7745 						      TREE_OPERAND (arg0, 1))));
7746       else if (TREE_CODE (arg0) == COND_EXPR)
7747 	{
7748 	  tree arg01 = TREE_OPERAND (arg0, 1);
7749 	  tree arg02 = TREE_OPERAND (arg0, 2);
7750 	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7751 	    arg01 = fold_build1_loc (loc, code, type,
7752 				 fold_convert_loc (loc,
7753 						   TREE_TYPE (op0), arg01));
7754 	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7755 	    arg02 = fold_build1_loc (loc, code, type,
7756 				 fold_convert_loc (loc,
7757 						   TREE_TYPE (op0), arg02));
7758 	  tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7759 			     arg01, arg02);
7760 
7761 	  /* If this was a conversion, and all we did was to move into
7762 	     inside the COND_EXPR, bring it back out.  But leave it if
7763 	     it is a conversion from integer to integer and the
7764 	     result precision is no wider than a word since such a
7765 	     conversion is cheap and may be optimized away by combine,
7766 	     while it couldn't if it were outside the COND_EXPR.  Then return
7767 	     so we don't get into an infinite recursion loop taking the
7768 	     conversion out and then back in.  */
7769 
7770 	  if ((CONVERT_EXPR_CODE_P (code)
7771 	       || code == NON_LVALUE_EXPR)
7772 	      && TREE_CODE (tem) == COND_EXPR
7773 	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7774 	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7775 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7776 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7777 	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7778 		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7779 	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7780 		     && (INTEGRAL_TYPE_P
7781 			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7782 		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7783 		  || flag_syntax_only))
7784 	    tem = build1_loc (loc, code, type,
7785 			      build3 (COND_EXPR,
7786 				      TREE_TYPE (TREE_OPERAND
7787 						 (TREE_OPERAND (tem, 1), 0)),
7788 				      TREE_OPERAND (tem, 0),
7789 				      TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7790 				      TREE_OPERAND (TREE_OPERAND (tem, 2),
7791 						    0)));
7792 	  return tem;
7793 	}
7794    }
7795 
7796   switch (code)
7797     {
7798     case NON_LVALUE_EXPR:
7799       if (!maybe_lvalue_p (op0))
7800 	return fold_convert_loc (loc, type, op0);
7801       return NULL_TREE;
7802 
7803     CASE_CONVERT:
7804     case FLOAT_EXPR:
7805     case FIX_TRUNC_EXPR:
7806       if (COMPARISON_CLASS_P (op0))
7807 	{
7808 	  /* If we have (type) (a CMP b) and type is an integral type, return
7809 	     new expression involving the new type.  Canonicalize
7810 	     (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7811 	     non-integral type.
7812 	     Do not fold the result as that would not simplify further, also
7813 	     folding again results in recursions.  */
7814 	  if (TREE_CODE (type) == BOOLEAN_TYPE)
7815 	    return build2_loc (loc, TREE_CODE (op0), type,
7816 			       TREE_OPERAND (op0, 0),
7817 			       TREE_OPERAND (op0, 1));
7818 	  else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7819 		   && TREE_CODE (type) != VECTOR_TYPE)
7820 	    return build3_loc (loc, COND_EXPR, type, op0,
7821 			       constant_boolean_node (true, type),
7822 			       constant_boolean_node (false, type));
7823 	}
7824 
7825       /* Handle (T *)&A.B.C for A being of type T and B and C
7826 	 living at offset zero.  This occurs frequently in
7827 	 C++ upcasting and then accessing the base.  */
7828       if (TREE_CODE (op0) == ADDR_EXPR
7829 	  && POINTER_TYPE_P (type)
7830 	  && handled_component_p (TREE_OPERAND (op0, 0)))
7831         {
7832 	  poly_int64 bitsize, bitpos;
7833 	  tree offset;
7834 	  machine_mode mode;
7835 	  int unsignedp, reversep, volatilep;
7836 	  tree base
7837 	    = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7838 				   &offset, &mode, &unsignedp, &reversep,
7839 				   &volatilep);
7840 	  /* If the reference was to a (constant) zero offset, we can use
7841 	     the address of the base if it has the same base type
7842 	     as the result type and the pointer type is unqualified.  */
7843 	  if (!offset
7844 	      && known_eq (bitpos, 0)
7845 	      && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7846 		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7847 	      && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7848 	    return fold_convert_loc (loc, type,
7849 				     build_fold_addr_expr_loc (loc, base));
7850         }
7851 
7852       if (TREE_CODE (op0) == MODIFY_EXPR
7853 	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7854 	  /* Detect assigning a bitfield.  */
7855 	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7856 	       && DECL_BIT_FIELD
7857 	       (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7858 	{
7859 	  /* Don't leave an assignment inside a conversion
7860 	     unless assigning a bitfield.  */
7861 	  tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7862 	  /* First do the assignment, then return converted constant.  */
7863 	  tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7864 	  TREE_NO_WARNING (tem) = 1;
7865 	  TREE_USED (tem) = 1;
7866 	  return tem;
7867 	}
7868 
7869       /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7870 	 constants (if x has signed type, the sign bit cannot be set
7871 	 in c).  This folds extension into the BIT_AND_EXPR.
7872 	 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7873 	 very likely don't have maximal range for their precision and this
7874 	 transformation effectively doesn't preserve non-maximal ranges.  */
7875       if (TREE_CODE (type) == INTEGER_TYPE
7876 	  && TREE_CODE (op0) == BIT_AND_EXPR
7877 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7878 	{
7879 	  tree and_expr = op0;
7880 	  tree and0 = TREE_OPERAND (and_expr, 0);
7881 	  tree and1 = TREE_OPERAND (and_expr, 1);
7882 	  int change = 0;
7883 
7884 	  if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7885 	      || (TYPE_PRECISION (type)
7886 		  <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7887 	    change = 1;
7888 	  else if (TYPE_PRECISION (TREE_TYPE (and1))
7889 		   <= HOST_BITS_PER_WIDE_INT
7890 		   && tree_fits_uhwi_p (and1))
7891 	    {
7892 	      unsigned HOST_WIDE_INT cst;
7893 
7894 	      cst = tree_to_uhwi (and1);
7895 	      cst &= HOST_WIDE_INT_M1U
7896 		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7897 	      change = (cst == 0);
7898 	      if (change
7899 		  && !flag_syntax_only
7900 		  && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
7901 		      == ZERO_EXTEND))
7902 		{
7903 		  tree uns = unsigned_type_for (TREE_TYPE (and0));
7904 		  and0 = fold_convert_loc (loc, uns, and0);
7905 		  and1 = fold_convert_loc (loc, uns, and1);
7906 		}
7907 	    }
7908 	  if (change)
7909 	    {
7910 	      tem = force_fit_type (type, wi::to_widest (and1), 0,
7911 				    TREE_OVERFLOW (and1));
7912 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
7913 				      fold_convert_loc (loc, type, and0), tem);
7914 	    }
7915 	}
7916 
7917       /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7918 	 cast (T1)X will fold away.  We assume that this happens when X itself
7919 	 is a cast.  */
7920       if (POINTER_TYPE_P (type)
7921 	  && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7922 	  && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7923 	{
7924 	  tree arg00 = TREE_OPERAND (arg0, 0);
7925 	  tree arg01 = TREE_OPERAND (arg0, 1);
7926 
7927 	  return fold_build_pointer_plus_loc
7928 		   (loc, fold_convert_loc (loc, type, arg00), arg01);
7929 	}
7930 
7931       /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7932 	 of the same precision, and X is an integer type not narrower than
7933 	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
7934       if (INTEGRAL_TYPE_P (type)
7935 	  && TREE_CODE (op0) == BIT_NOT_EXPR
7936 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7937 	  && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7938 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7939 	{
7940 	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7941 	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7942 	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7943 	    return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7944 				fold_convert_loc (loc, type, tem));
7945 	}
7946 
7947       /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7948 	 type of X and Y (integer types only).  */
7949       if (INTEGRAL_TYPE_P (type)
7950 	  && TREE_CODE (op0) == MULT_EXPR
7951 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7952 	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7953 	{
7954 	  /* Be careful not to introduce new overflows.  */
7955 	  tree mult_type;
7956           if (TYPE_OVERFLOW_WRAPS (type))
7957 	    mult_type = type;
7958 	  else
7959 	    mult_type = unsigned_type_for (type);
7960 
7961 	  if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7962 	    {
7963 	      tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7964 				 fold_convert_loc (loc, mult_type,
7965 						   TREE_OPERAND (op0, 0)),
7966 				 fold_convert_loc (loc, mult_type,
7967 						   TREE_OPERAND (op0, 1)));
7968 	      return fold_convert_loc (loc, type, tem);
7969 	    }
7970 	}
7971 
7972       return NULL_TREE;
7973 
7974     case VIEW_CONVERT_EXPR:
7975       if (TREE_CODE (op0) == MEM_REF)
7976         {
7977 	  if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
7978 	    type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
7979 	  tem = fold_build2_loc (loc, MEM_REF, type,
7980 				 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7981 	  REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7982 	  return tem;
7983 	}
7984 
7985       return NULL_TREE;
7986 
7987     case NEGATE_EXPR:
7988       tem = fold_negate_expr (loc, arg0);
7989       if (tem)
7990 	return fold_convert_loc (loc, type, tem);
7991       return NULL_TREE;
7992 
7993     case ABS_EXPR:
7994       /* Convert fabs((double)float) into (double)fabsf(float).  */
7995       if (TREE_CODE (arg0) == NOP_EXPR
7996 	  && TREE_CODE (type) == REAL_TYPE)
7997 	{
7998 	  tree targ0 = strip_float_extensions (arg0);
7999 	  if (targ0 != arg0)
8000 	    return fold_convert_loc (loc, type,
8001 				     fold_build1_loc (loc, ABS_EXPR,
8002 						  TREE_TYPE (targ0),
8003 						  targ0));
8004 	}
8005       return NULL_TREE;
8006 
8007     case BIT_NOT_EXPR:
8008       /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
8009       if (TREE_CODE (arg0) == BIT_XOR_EXPR
8010 	  && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8011 				    fold_convert_loc (loc, type,
8012 						      TREE_OPERAND (arg0, 0)))))
8013 	return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8014 				fold_convert_loc (loc, type,
8015 						  TREE_OPERAND (arg0, 1)));
8016       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8017 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8018 			       	     fold_convert_loc (loc, type,
8019 						       TREE_OPERAND (arg0, 1)))))
8020 	return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8021 			    fold_convert_loc (loc, type,
8022 					      TREE_OPERAND (arg0, 0)), tem);
8023 
8024       return NULL_TREE;
8025 
8026     case TRUTH_NOT_EXPR:
8027       /* Note that the operand of this must be an int
8028 	 and its values must be 0 or 1.
8029 	 ("true" is a fixed value perhaps depending on the language,
8030 	 but we don't handle values other than 1 correctly yet.)  */
8031       tem = fold_truth_not_expr (loc, arg0);
8032       if (!tem)
8033 	return NULL_TREE;
8034       return fold_convert_loc (loc, type, tem);
8035 
8036     case INDIRECT_REF:
8037       /* Fold *&X to X if X is an lvalue.  */
8038       if (TREE_CODE (op0) == ADDR_EXPR)
8039 	{
8040 	  tree op00 = TREE_OPERAND (op0, 0);
8041 	  if ((VAR_P (op00)
8042 	       || TREE_CODE (op00) == PARM_DECL
8043 	       || TREE_CODE (op00) == RESULT_DECL)
8044 	      && !TREE_READONLY (op00))
8045 	    return op00;
8046 	}
8047       return NULL_TREE;
8048 
8049     default:
8050       return NULL_TREE;
8051     } /* switch (code) */
8052 }
8053 
8054 
8055 /* If the operation was a conversion do _not_ mark a resulting constant
8056    with TREE_OVERFLOW if the original constant was not.  These conversions
8057    have implementation defined behavior and retaining the TREE_OVERFLOW
8058    flag here would confuse later passes such as VRP.  */
8059 tree
8060 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8061 				tree type, tree op0)
8062 {
8063   tree res = fold_unary_loc (loc, code, type, op0);
8064   if (res
8065       && TREE_CODE (res) == INTEGER_CST
8066       && TREE_CODE (op0) == INTEGER_CST
8067       && CONVERT_EXPR_CODE_P (code))
8068     TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8069 
8070   return res;
8071 }
8072 
8073 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8074    operands OP0 and OP1.  LOC is the location of the resulting expression.
8075    ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8076    Return the folded expression if folding is successful.  Otherwise,
8077    return NULL_TREE.  */
8078 static tree
8079 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8080 		  tree arg0, tree arg1, tree op0, tree op1)
8081 {
8082   tree tem;
8083 
8084   /* We only do these simplifications if we are optimizing.  */
8085   if (!optimize)
8086     return NULL_TREE;
8087 
8088   /* Check for things like (A || B) && (A || C).  We can convert this
8089      to A || (B && C).  Note that either operator can be any of the four
8090      truth and/or operations and the transformation will still be
8091      valid.   Also note that we only care about order for the
8092      ANDIF and ORIF operators.  If B contains side effects, this
8093      might change the truth-value of A.  */
8094   if (TREE_CODE (arg0) == TREE_CODE (arg1)
8095       && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8096 	  || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8097 	  || TREE_CODE (arg0) == TRUTH_AND_EXPR
8098 	  || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8099       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8100     {
8101       tree a00 = TREE_OPERAND (arg0, 0);
8102       tree a01 = TREE_OPERAND (arg0, 1);
8103       tree a10 = TREE_OPERAND (arg1, 0);
8104       tree a11 = TREE_OPERAND (arg1, 1);
8105       int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8106 			  || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8107 			 && (code == TRUTH_AND_EXPR
8108 			     || code == TRUTH_OR_EXPR));
8109 
8110       if (operand_equal_p (a00, a10, 0))
8111 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8112 			    fold_build2_loc (loc, code, type, a01, a11));
8113       else if (commutative && operand_equal_p (a00, a11, 0))
8114 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8115 			    fold_build2_loc (loc, code, type, a01, a10));
8116       else if (commutative && operand_equal_p (a01, a10, 0))
8117 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8118 			    fold_build2_loc (loc, code, type, a00, a11));
8119 
8120       /* This case if tricky because we must either have commutative
8121 	 operators or else A10 must not have side-effects.  */
8122 
8123       else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8124 	       && operand_equal_p (a01, a11, 0))
8125 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
8126 			    fold_build2_loc (loc, code, type, a00, a10),
8127 			    a01);
8128     }
8129 
8130   /* See if we can build a range comparison.  */
8131   if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
8132     return tem;
8133 
8134   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8135       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8136     {
8137       tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8138       if (tem)
8139 	return fold_build2_loc (loc, code, type, tem, arg1);
8140     }
8141 
8142   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8143       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8144     {
8145       tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8146       if (tem)
8147 	return fold_build2_loc (loc, code, type, arg0, tem);
8148     }
8149 
8150   /* Check for the possibility of merging component references.  If our
8151      lhs is another similar operation, try to merge its rhs with our
8152      rhs.  Then try to merge our lhs and rhs.  */
8153   if (TREE_CODE (arg0) == code
8154       && (tem = fold_truth_andor_1 (loc, code, type,
8155 				    TREE_OPERAND (arg0, 1), arg1)) != 0)
8156     return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8157 
8158   if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8159     return tem;
8160 
8161   if (LOGICAL_OP_NON_SHORT_CIRCUIT
8162       && !flag_sanitize_coverage
8163       && (code == TRUTH_AND_EXPR
8164           || code == TRUTH_ANDIF_EXPR
8165           || code == TRUTH_OR_EXPR
8166           || code == TRUTH_ORIF_EXPR))
8167     {
8168       enum tree_code ncode, icode;
8169 
8170       ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8171 	      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8172       icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8173 
8174       /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8175 	 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8176 	 We don't want to pack more than two leafs to a non-IF AND/OR
8177 	 expression.
8178 	 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8179 	 equal to IF-CODE, then we don't want to add right-hand operand.
8180 	 If the inner right-hand side of left-hand operand has
8181 	 side-effects, or isn't simple, then we can't add to it,
8182 	 as otherwise we might destroy if-sequence.  */
8183       if (TREE_CODE (arg0) == icode
8184 	  && simple_operand_p_2 (arg1)
8185 	  /* Needed for sequence points to handle trappings, and
8186 	     side-effects.  */
8187 	  && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8188 	{
8189 	  tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8190 				 arg1);
8191 	  return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8192 				  tem);
8193 	}
8194 	/* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8195 	   or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C).  */
8196       else if (TREE_CODE (arg1) == icode
8197 	  && simple_operand_p_2 (arg0)
8198 	  /* Needed for sequence points to handle trappings, and
8199 	     side-effects.  */
8200 	  && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8201 	{
8202 	  tem = fold_build2_loc (loc, ncode, type,
8203 				 arg0, TREE_OPERAND (arg1, 0));
8204 	  return fold_build2_loc (loc, icode, type, tem,
8205 				  TREE_OPERAND (arg1, 1));
8206 	}
8207       /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8208 	 into (A OR B).
8209 	 For sequence point consistancy, we need to check for trapping,
8210 	 and side-effects.  */
8211       else if (code == icode && simple_operand_p_2 (arg0)
8212                && simple_operand_p_2 (arg1))
8213 	return fold_build2_loc (loc, ncode, type, arg0, arg1);
8214     }
8215 
8216   return NULL_TREE;
8217 }
8218 
8219 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8220    by changing CODE to reduce the magnitude of constants involved in
8221    ARG0 of the comparison.
8222    Returns a canonicalized comparison tree if a simplification was
8223    possible, otherwise returns NULL_TREE.
8224    Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8225    valid if signed overflow is undefined.  */
8226 
8227 static tree
8228 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8229 				 tree arg0, tree arg1,
8230 				 bool *strict_overflow_p)
8231 {
8232   enum tree_code code0 = TREE_CODE (arg0);
8233   tree t, cst0 = NULL_TREE;
8234   int sgn0;
8235 
8236   /* Match A +- CST code arg1.  We can change this only if overflow
8237      is undefined.  */
8238   if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8239 	 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8240 	/* In principle pointers also have undefined overflow behavior,
8241 	   but that causes problems elsewhere.  */
8242 	&& !POINTER_TYPE_P (TREE_TYPE (arg0))
8243 	&& (code0 == MINUS_EXPR
8244 	    || code0 == PLUS_EXPR)
8245 	&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8246     return NULL_TREE;
8247 
8248   /* Identify the constant in arg0 and its sign.  */
8249   cst0 = TREE_OPERAND (arg0, 1);
8250   sgn0 = tree_int_cst_sgn (cst0);
8251 
8252   /* Overflowed constants and zero will cause problems.  */
8253   if (integer_zerop (cst0)
8254       || TREE_OVERFLOW (cst0))
8255     return NULL_TREE;
8256 
8257   /* See if we can reduce the magnitude of the constant in
8258      arg0 by changing the comparison code.  */
8259   /* A - CST < arg1  ->  A - CST-1 <= arg1.  */
8260   if (code == LT_EXPR
8261       && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8262     code = LE_EXPR;
8263   /* A + CST > arg1  ->  A + CST-1 >= arg1.  */
8264   else if (code == GT_EXPR
8265 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8266     code = GE_EXPR;
8267   /* A + CST <= arg1  ->  A + CST-1 < arg1.  */
8268   else if (code == LE_EXPR
8269 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8270     code = LT_EXPR;
8271   /* A - CST >= arg1  ->  A - CST-1 > arg1.  */
8272   else if (code == GE_EXPR
8273 	   && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8274     code = GT_EXPR;
8275   else
8276     return NULL_TREE;
8277   *strict_overflow_p = true;
8278 
8279   /* Now build the constant reduced in magnitude.  But not if that
8280      would produce one outside of its types range.  */
8281   if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8282       && ((sgn0 == 1
8283 	   && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8284 	   && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8285 	  || (sgn0 == -1
8286 	      && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8287 	      && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8288     return NULL_TREE;
8289 
8290   t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8291 		       cst0, build_int_cst (TREE_TYPE (cst0), 1));
8292   t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8293   t = fold_convert (TREE_TYPE (arg1), t);
8294 
8295   return fold_build2_loc (loc, code, type, t, arg1);
8296 }
8297 
8298 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8299    overflow further.  Try to decrease the magnitude of constants involved
8300    by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8301    and put sole constants at the second argument position.
8302    Returns the canonicalized tree if changed, otherwise NULL_TREE.  */
8303 
8304 static tree
8305 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8306 			       tree arg0, tree arg1)
8307 {
8308   tree t;
8309   bool strict_overflow_p;
8310   const char * const warnmsg = G_("assuming signed overflow does not occur "
8311 				  "when reducing constant in comparison");
8312 
8313   /* Try canonicalization by simplifying arg0.  */
8314   strict_overflow_p = false;
8315   t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8316 				       &strict_overflow_p);
8317   if (t)
8318     {
8319       if (strict_overflow_p)
8320 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8321       return t;
8322     }
8323 
8324   /* Try canonicalization by simplifying arg1 using the swapped
8325      comparison.  */
8326   code = swap_tree_comparison (code);
8327   strict_overflow_p = false;
8328   t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8329 				       &strict_overflow_p);
8330   if (t && strict_overflow_p)
8331     fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8332   return t;
8333 }
8334 
8335 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8336    space.  This is used to avoid issuing overflow warnings for
8337    expressions like &p->x which can not wrap.  */
8338 
8339 static bool
8340 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
8341 {
8342   if (!POINTER_TYPE_P (TREE_TYPE (base)))
8343     return true;
8344 
8345   if (maybe_lt (bitpos, 0))
8346     return true;
8347 
8348   poly_wide_int wi_offset;
8349   int precision = TYPE_PRECISION (TREE_TYPE (base));
8350   if (offset == NULL_TREE)
8351     wi_offset = wi::zero (precision);
8352   else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
8353     return true;
8354   else
8355     wi_offset = wi::to_poly_wide (offset);
8356 
8357   bool overflow;
8358   poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
8359 				  precision);
8360   poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8361   if (overflow)
8362     return true;
8363 
8364   poly_uint64 total_hwi, size;
8365   if (!total.to_uhwi (&total_hwi)
8366       || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
8367 			   &size)
8368       || known_eq (size, 0U))
8369     return true;
8370 
8371   if (known_le (total_hwi, size))
8372     return false;
8373 
8374   /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8375      array.  */
8376   if (TREE_CODE (base) == ADDR_EXPR
8377       && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
8378 			  &size)
8379       && maybe_ne (size, 0U)
8380       && known_le (total_hwi, size))
8381     return false;
8382 
8383   return true;
8384 }
8385 
8386 /* Return a positive integer when the symbol DECL is known to have
8387    a nonzero address, zero when it's known not to (e.g., it's a weak
8388    symbol), and a negative integer when the symbol is not yet in the
8389    symbol table and so whether or not its address is zero is unknown.
8390    For function local objects always return positive integer.  */
8391 static int
8392 maybe_nonzero_address (tree decl)
8393 {
8394   if (DECL_P (decl) && decl_in_symtab_p (decl))
8395     if (struct symtab_node *symbol = symtab_node::get_create (decl))
8396       return symbol->nonzero_address ();
8397 
8398   /* Function local objects are never NULL.  */
8399   if (DECL_P (decl)
8400       && (DECL_CONTEXT (decl)
8401       && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
8402       && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
8403     return 1;
8404 
8405   return -1;
8406 }
8407 
8408 /* Subroutine of fold_binary.  This routine performs all of the
8409    transformations that are common to the equality/inequality
8410    operators (EQ_EXPR and NE_EXPR) and the ordering operators
8411    (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
8412    fold_binary should call fold_binary.  Fold a comparison with
8413    tree code CODE and type TYPE with operands OP0 and OP1.  Return
8414    the folded comparison or NULL_TREE.  */
8415 
8416 static tree
8417 fold_comparison (location_t loc, enum tree_code code, tree type,
8418 		 tree op0, tree op1)
8419 {
8420   const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8421   tree arg0, arg1, tem;
8422 
8423   arg0 = op0;
8424   arg1 = op1;
8425 
8426   STRIP_SIGN_NOPS (arg0);
8427   STRIP_SIGN_NOPS (arg1);
8428 
8429   /* For comparisons of pointers we can decompose it to a compile time
8430      comparison of the base objects and the offsets into the object.
8431      This requires at least one operand being an ADDR_EXPR or a
8432      POINTER_PLUS_EXPR to do more than the operand_equal_p test below.  */
8433   if (POINTER_TYPE_P (TREE_TYPE (arg0))
8434       && (TREE_CODE (arg0) == ADDR_EXPR
8435 	  || TREE_CODE (arg1) == ADDR_EXPR
8436 	  || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8437 	  || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8438     {
8439       tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8440       poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
8441       machine_mode mode;
8442       int volatilep, reversep, unsignedp;
8443       bool indirect_base0 = false, indirect_base1 = false;
8444 
8445       /* Get base and offset for the access.  Strip ADDR_EXPR for
8446 	 get_inner_reference, but put it back by stripping INDIRECT_REF
8447 	 off the base object if possible.  indirect_baseN will be true
8448 	 if baseN is not an address but refers to the object itself.  */
8449       base0 = arg0;
8450       if (TREE_CODE (arg0) == ADDR_EXPR)
8451 	{
8452 	  base0
8453 	    = get_inner_reference (TREE_OPERAND (arg0, 0),
8454 				   &bitsize, &bitpos0, &offset0, &mode,
8455 				   &unsignedp, &reversep, &volatilep);
8456 	  if (TREE_CODE (base0) == INDIRECT_REF)
8457 	    base0 = TREE_OPERAND (base0, 0);
8458 	  else
8459 	    indirect_base0 = true;
8460 	}
8461       else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8462 	{
8463 	  base0 = TREE_OPERAND (arg0, 0);
8464 	  STRIP_SIGN_NOPS (base0);
8465 	  if (TREE_CODE (base0) == ADDR_EXPR)
8466 	    {
8467 	      base0
8468 		= get_inner_reference (TREE_OPERAND (base0, 0),
8469 				       &bitsize, &bitpos0, &offset0, &mode,
8470 				       &unsignedp, &reversep, &volatilep);
8471 	      if (TREE_CODE (base0) == INDIRECT_REF)
8472 		base0 = TREE_OPERAND (base0, 0);
8473 	      else
8474 		indirect_base0 = true;
8475 	    }
8476 	  if (offset0 == NULL_TREE || integer_zerop (offset0))
8477 	    offset0 = TREE_OPERAND (arg0, 1);
8478 	  else
8479 	    offset0 = size_binop (PLUS_EXPR, offset0,
8480 				  TREE_OPERAND (arg0, 1));
8481 	  if (poly_int_tree_p (offset0))
8482 	    {
8483 	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
8484 					      TYPE_PRECISION (sizetype));
8485 	      tem <<= LOG2_BITS_PER_UNIT;
8486 	      tem += bitpos0;
8487 	      if (tem.to_shwi (&bitpos0))
8488 		offset0 = NULL_TREE;
8489 	    }
8490 	}
8491 
8492       base1 = arg1;
8493       if (TREE_CODE (arg1) == ADDR_EXPR)
8494 	{
8495 	  base1
8496 	    = get_inner_reference (TREE_OPERAND (arg1, 0),
8497 				   &bitsize, &bitpos1, &offset1, &mode,
8498 				   &unsignedp, &reversep, &volatilep);
8499 	  if (TREE_CODE (base1) == INDIRECT_REF)
8500 	    base1 = TREE_OPERAND (base1, 0);
8501 	  else
8502 	    indirect_base1 = true;
8503 	}
8504       else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8505 	{
8506 	  base1 = TREE_OPERAND (arg1, 0);
8507 	  STRIP_SIGN_NOPS (base1);
8508 	  if (TREE_CODE (base1) == ADDR_EXPR)
8509 	    {
8510 	      base1
8511 		= get_inner_reference (TREE_OPERAND (base1, 0),
8512 				       &bitsize, &bitpos1, &offset1, &mode,
8513 				       &unsignedp, &reversep, &volatilep);
8514 	      if (TREE_CODE (base1) == INDIRECT_REF)
8515 		base1 = TREE_OPERAND (base1, 0);
8516 	      else
8517 		indirect_base1 = true;
8518 	    }
8519 	  if (offset1 == NULL_TREE || integer_zerop (offset1))
8520 	    offset1 = TREE_OPERAND (arg1, 1);
8521 	  else
8522 	    offset1 = size_binop (PLUS_EXPR, offset1,
8523 				  TREE_OPERAND (arg1, 1));
8524 	  if (poly_int_tree_p (offset1))
8525 	    {
8526 	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
8527 					      TYPE_PRECISION (sizetype));
8528 	      tem <<= LOG2_BITS_PER_UNIT;
8529 	      tem += bitpos1;
8530 	      if (tem.to_shwi (&bitpos1))
8531 		offset1 = NULL_TREE;
8532 	    }
8533 	}
8534 
8535       /* If we have equivalent bases we might be able to simplify.  */
8536       if (indirect_base0 == indirect_base1
8537 	  && operand_equal_p (base0, base1,
8538 			      indirect_base0 ? OEP_ADDRESS_OF : 0))
8539 	{
8540 	  /* We can fold this expression to a constant if the non-constant
8541 	     offset parts are equal.  */
8542 	  if ((offset0 == offset1
8543 	       || (offset0 && offset1
8544 		   && operand_equal_p (offset0, offset1, 0)))
8545 	      && (equality_code
8546 		  || (indirect_base0
8547 		      && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8548 		  || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8549 	    {
8550 	      if (!equality_code
8551 		  && maybe_ne (bitpos0, bitpos1)
8552 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
8553 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
8554 		fold_overflow_warning (("assuming pointer wraparound does not "
8555 					"occur when comparing P +- C1 with "
8556 					"P +- C2"),
8557 				       WARN_STRICT_OVERFLOW_CONDITIONAL);
8558 
8559 	      switch (code)
8560 		{
8561 		case EQ_EXPR:
8562 		  if (known_eq (bitpos0, bitpos1))
8563 		    return constant_boolean_node (true, type);
8564 		  if (known_ne (bitpos0, bitpos1))
8565 		    return constant_boolean_node (false, type);
8566 		  break;
8567 		case NE_EXPR:
8568 		  if (known_ne (bitpos0, bitpos1))
8569 		    return constant_boolean_node (true, type);
8570 		  if (known_eq (bitpos0, bitpos1))
8571 		    return constant_boolean_node (false, type);
8572 		  break;
8573 		case LT_EXPR:
8574 		  if (known_lt (bitpos0, bitpos1))
8575 		    return constant_boolean_node (true, type);
8576 		  if (known_ge (bitpos0, bitpos1))
8577 		    return constant_boolean_node (false, type);
8578 		  break;
8579 		case LE_EXPR:
8580 		  if (known_le (bitpos0, bitpos1))
8581 		    return constant_boolean_node (true, type);
8582 		  if (known_gt (bitpos0, bitpos1))
8583 		    return constant_boolean_node (false, type);
8584 		  break;
8585 		case GE_EXPR:
8586 		  if (known_ge (bitpos0, bitpos1))
8587 		    return constant_boolean_node (true, type);
8588 		  if (known_lt (bitpos0, bitpos1))
8589 		    return constant_boolean_node (false, type);
8590 		  break;
8591 		case GT_EXPR:
8592 		  if (known_gt (bitpos0, bitpos1))
8593 		    return constant_boolean_node (true, type);
8594 		  if (known_le (bitpos0, bitpos1))
8595 		    return constant_boolean_node (false, type);
8596 		  break;
8597 		default:;
8598 		}
8599 	    }
8600 	  /* We can simplify the comparison to a comparison of the variable
8601 	     offset parts if the constant offset parts are equal.
8602 	     Be careful to use signed sizetype here because otherwise we
8603 	     mess with array offsets in the wrong way.  This is possible
8604 	     because pointer arithmetic is restricted to retain within an
8605 	     object and overflow on pointer differences is undefined as of
8606 	     6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
8607 	  else if (known_eq (bitpos0, bitpos1)
8608 		   && (equality_code
8609 		       || (indirect_base0
8610 			   && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
8611 		       || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8612 	    {
8613 	      /* By converting to signed sizetype we cover middle-end pointer
8614 	         arithmetic which operates on unsigned pointer types of size
8615 	         type size and ARRAY_REF offsets which are properly sign or
8616 	         zero extended from their type in case it is narrower than
8617 	         sizetype.  */
8618 	      if (offset0 == NULL_TREE)
8619 		offset0 = build_int_cst (ssizetype, 0);
8620 	      else
8621 		offset0 = fold_convert_loc (loc, ssizetype, offset0);
8622 	      if (offset1 == NULL_TREE)
8623 		offset1 = build_int_cst (ssizetype, 0);
8624 	      else
8625 		offset1 = fold_convert_loc (loc, ssizetype, offset1);
8626 
8627 	      if (!equality_code
8628 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
8629 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
8630 		fold_overflow_warning (("assuming pointer wraparound does not "
8631 					"occur when comparing P +- C1 with "
8632 					"P +- C2"),
8633 				       WARN_STRICT_OVERFLOW_COMPARISON);
8634 
8635 	      return fold_build2_loc (loc, code, type, offset0, offset1);
8636 	    }
8637 	}
8638       /* For equal offsets we can simplify to a comparison of the
8639 	 base addresses.  */
8640       else if (known_eq (bitpos0, bitpos1)
8641 	       && (indirect_base0
8642 		   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8643 	       && (indirect_base1
8644 		   ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8645 	       && ((offset0 == offset1)
8646 		   || (offset0 && offset1
8647 		       && operand_equal_p (offset0, offset1, 0))))
8648 	{
8649 	  if (indirect_base0)
8650 	    base0 = build_fold_addr_expr_loc (loc, base0);
8651 	  if (indirect_base1)
8652 	    base1 = build_fold_addr_expr_loc (loc, base1);
8653 	  return fold_build2_loc (loc, code, type, base0, base1);
8654 	}
8655       /* Comparison between an ordinary (non-weak) symbol and a null
8656 	 pointer can be eliminated since such symbols must have a non
8657 	 null address.  In C, relational expressions between pointers
8658 	 to objects and null pointers are undefined.  The results
8659 	 below follow the C++ rules with the additional property that
8660 	 every object pointer compares greater than a null pointer.
8661       */
8662       else if (((DECL_P (base0)
8663 		 && maybe_nonzero_address (base0) > 0
8664 		 /* Avoid folding references to struct members at offset 0 to
8665 		    prevent tests like '&ptr->firstmember == 0' from getting
8666 		    eliminated.  When ptr is null, although the -> expression
8667 		    is strictly speaking invalid, GCC retains it as a matter
8668 		    of QoI.  See PR c/44555. */
8669 		 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
8670 		|| CONSTANT_CLASS_P (base0))
8671 	       && indirect_base0
8672 	       /* The caller guarantees that when one of the arguments is
8673 		  constant (i.e., null in this case) it is second.  */
8674 	       && integer_zerop (arg1))
8675 	{
8676 	  switch (code)
8677 	    {
8678 	    case EQ_EXPR:
8679 	    case LE_EXPR:
8680 	    case LT_EXPR:
8681 	      return constant_boolean_node (false, type);
8682 	    case GE_EXPR:
8683 	    case GT_EXPR:
8684 	    case NE_EXPR:
8685 	      return constant_boolean_node (true, type);
8686 	    default:
8687 	      gcc_unreachable ();
8688 	    }
8689 	}
8690     }
8691 
8692   /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8693      X CMP Y +- C2 +- C1 for signed X, Y.  This is valid if
8694      the resulting offset is smaller in absolute value than the
8695      original one and has the same sign.  */
8696   if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8697       && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8698       && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8699       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8700 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8701       && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8702       && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8703 	  && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8704     {
8705       tree const1 = TREE_OPERAND (arg0, 1);
8706       tree const2 = TREE_OPERAND (arg1, 1);
8707       tree variable1 = TREE_OPERAND (arg0, 0);
8708       tree variable2 = TREE_OPERAND (arg1, 0);
8709       tree cst;
8710       const char * const warnmsg = G_("assuming signed overflow does not "
8711 				      "occur when combining constants around "
8712 				      "a comparison");
8713 
8714       /* Put the constant on the side where it doesn't overflow and is
8715 	 of lower absolute value and of same sign than before.  */
8716       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8717 			     ? MINUS_EXPR : PLUS_EXPR,
8718 			     const2, const1);
8719       if (!TREE_OVERFLOW (cst)
8720 	  && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8721 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8722 	{
8723 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8724 	  return fold_build2_loc (loc, code, type,
8725 				  variable1,
8726 				  fold_build2_loc (loc, TREE_CODE (arg1),
8727 						   TREE_TYPE (arg1),
8728 						   variable2, cst));
8729 	}
8730 
8731       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8732 			     ? MINUS_EXPR : PLUS_EXPR,
8733 			     const1, const2);
8734       if (!TREE_OVERFLOW (cst)
8735 	  && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8736 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8737 	{
8738 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8739 	  return fold_build2_loc (loc, code, type,
8740 				  fold_build2_loc (loc, TREE_CODE (arg0),
8741 						   TREE_TYPE (arg0),
8742 						   variable1, cst),
8743 				  variable2);
8744 	}
8745     }
8746 
8747   tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8748   if (tem)
8749     return tem;
8750 
8751   /* If we are comparing an expression that just has comparisons
8752      of two integer values, arithmetic expressions of those comparisons,
8753      and constants, we can simplify it.  There are only three cases
8754      to check: the two values can either be equal, the first can be
8755      greater, or the second can be greater.  Fold the expression for
8756      those three values.  Since each value must be 0 or 1, we have
8757      eight possibilities, each of which corresponds to the constant 0
8758      or 1 or one of the six possible comparisons.
8759 
8760      This handles common cases like (a > b) == 0 but also handles
8761      expressions like  ((x > y) - (y > x)) > 0, which supposedly
8762      occur in macroized code.  */
8763 
8764   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8765     {
8766       tree cval1 = 0, cval2 = 0;
8767 
8768       if (twoval_comparison_p (arg0, &cval1, &cval2)
8769 	  /* Don't handle degenerate cases here; they should already
8770 	     have been handled anyway.  */
8771 	  && cval1 != 0 && cval2 != 0
8772 	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8773 	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8774 	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8775 	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8776 	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8777 	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8778 				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8779 	{
8780 	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8781 	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8782 
8783 	  /* We can't just pass T to eval_subst in case cval1 or cval2
8784 	     was the same as ARG1.  */
8785 
8786 	  tree high_result
8787 		= fold_build2_loc (loc, code, type,
8788 			       eval_subst (loc, arg0, cval1, maxval,
8789 					   cval2, minval),
8790 			       arg1);
8791 	  tree equal_result
8792 		= fold_build2_loc (loc, code, type,
8793 			       eval_subst (loc, arg0, cval1, maxval,
8794 					   cval2, maxval),
8795 			       arg1);
8796 	  tree low_result
8797 		= fold_build2_loc (loc, code, type,
8798 			       eval_subst (loc, arg0, cval1, minval,
8799 					   cval2, maxval),
8800 			       arg1);
8801 
8802 	  /* All three of these results should be 0 or 1.  Confirm they are.
8803 	     Then use those values to select the proper code to use.  */
8804 
8805 	  if (TREE_CODE (high_result) == INTEGER_CST
8806 	      && TREE_CODE (equal_result) == INTEGER_CST
8807 	      && TREE_CODE (low_result) == INTEGER_CST)
8808 	    {
8809 	      /* Make a 3-bit mask with the high-order bit being the
8810 		 value for `>', the next for '=', and the low for '<'.  */
8811 	      switch ((integer_onep (high_result) * 4)
8812 		      + (integer_onep (equal_result) * 2)
8813 		      + integer_onep (low_result))
8814 		{
8815 		case 0:
8816 		  /* Always false.  */
8817 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8818 		case 1:
8819 		  code = LT_EXPR;
8820 		  break;
8821 		case 2:
8822 		  code = EQ_EXPR;
8823 		  break;
8824 		case 3:
8825 		  code = LE_EXPR;
8826 		  break;
8827 		case 4:
8828 		  code = GT_EXPR;
8829 		  break;
8830 		case 5:
8831 		  code = NE_EXPR;
8832 		  break;
8833 		case 6:
8834 		  code = GE_EXPR;
8835 		  break;
8836 		case 7:
8837 		  /* Always true.  */
8838 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8839 		}
8840 
8841 	      return fold_build2_loc (loc, code, type, cval1, cval2);
8842 	    }
8843 	}
8844     }
8845 
8846   return NULL_TREE;
8847 }
8848 
8849 
8850 /* Subroutine of fold_binary.  Optimize complex multiplications of the
8851    form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
8852    argument EXPR represents the expression "z" of type TYPE.  */
8853 
8854 static tree
8855 fold_mult_zconjz (location_t loc, tree type, tree expr)
8856 {
8857   tree itype = TREE_TYPE (type);
8858   tree rpart, ipart, tem;
8859 
8860   if (TREE_CODE (expr) == COMPLEX_EXPR)
8861     {
8862       rpart = TREE_OPERAND (expr, 0);
8863       ipart = TREE_OPERAND (expr, 1);
8864     }
8865   else if (TREE_CODE (expr) == COMPLEX_CST)
8866     {
8867       rpart = TREE_REALPART (expr);
8868       ipart = TREE_IMAGPART (expr);
8869     }
8870   else
8871     {
8872       expr = save_expr (expr);
8873       rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8874       ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8875     }
8876 
8877   rpart = save_expr (rpart);
8878   ipart = save_expr (ipart);
8879   tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8880 		     fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8881 		     fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8882   return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8883 			  build_zero_cst (itype));
8884 }
8885 
8886 
8887 /* Helper function for fold_vec_perm.  Store elements of VECTOR_CST or
8888    CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
8889    true if successful.  */
8890 
8891 static bool
8892 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
8893 {
8894   unsigned HOST_WIDE_INT i, nunits;
8895 
8896   if (TREE_CODE (arg) == VECTOR_CST
8897       && VECTOR_CST_NELTS (arg).is_constant (&nunits))
8898     {
8899       for (i = 0; i < nunits; ++i)
8900 	elts[i] = VECTOR_CST_ELT (arg, i);
8901     }
8902   else if (TREE_CODE (arg) == CONSTRUCTOR)
8903     {
8904       constructor_elt *elt;
8905 
8906       FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8907 	if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8908 	  return false;
8909 	else
8910 	  elts[i] = elt->value;
8911     }
8912   else
8913     return false;
8914   for (; i < nelts; i++)
8915     elts[i]
8916       = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8917   return true;
8918 }
8919 
8920 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8921    selector.  Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8922    NULL_TREE otherwise.  */
8923 
8924 static tree
8925 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
8926 {
8927   unsigned int i;
8928   unsigned HOST_WIDE_INT nelts;
8929   bool need_ctor = false;
8930 
8931   if (!sel.length ().is_constant (&nelts))
8932     return NULL_TREE;
8933   gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
8934 	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
8935 	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
8936   if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8937       || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8938     return NULL_TREE;
8939 
8940   tree *in_elts = XALLOCAVEC (tree, nelts * 2);
8941   if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
8942       || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
8943     return NULL_TREE;
8944 
8945   tree_vector_builder out_elts (type, nelts, 1);
8946   for (i = 0; i < nelts; i++)
8947     {
8948       HOST_WIDE_INT index;
8949       if (!sel[i].is_constant (&index))
8950 	return NULL_TREE;
8951       if (!CONSTANT_CLASS_P (in_elts[index]))
8952 	need_ctor = true;
8953       out_elts.quick_push (unshare_expr (in_elts[index]));
8954     }
8955 
8956   if (need_ctor)
8957     {
8958       vec<constructor_elt, va_gc> *v;
8959       vec_alloc (v, nelts);
8960       for (i = 0; i < nelts; i++)
8961 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
8962       return build_constructor (type, v);
8963     }
8964   else
8965     return out_elts.build ();
8966 }
8967 
8968 /* Try to fold a pointer difference of type TYPE two address expressions of
8969    array references AREF0 and AREF1 using location LOC.  Return a
8970    simplified expression for the difference or NULL_TREE.  */
8971 
8972 static tree
8973 fold_addr_of_array_ref_difference (location_t loc, tree type,
8974 				   tree aref0, tree aref1,
8975 				   bool use_pointer_diff)
8976 {
8977   tree base0 = TREE_OPERAND (aref0, 0);
8978   tree base1 = TREE_OPERAND (aref1, 0);
8979   tree base_offset = build_int_cst (type, 0);
8980 
8981   /* If the bases are array references as well, recurse.  If the bases
8982      are pointer indirections compute the difference of the pointers.
8983      If the bases are equal, we are set.  */
8984   if ((TREE_CODE (base0) == ARRAY_REF
8985        && TREE_CODE (base1) == ARRAY_REF
8986        && (base_offset
8987 	   = fold_addr_of_array_ref_difference (loc, type, base0, base1,
8988 						use_pointer_diff)))
8989       || (INDIRECT_REF_P (base0)
8990 	  && INDIRECT_REF_P (base1)
8991 	  && (base_offset
8992 	        = use_pointer_diff
8993 		  ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
8994 				     TREE_OPERAND (base0, 0),
8995 				     TREE_OPERAND (base1, 0))
8996 		  : fold_binary_loc (loc, MINUS_EXPR, type,
8997 				     fold_convert (type,
8998 						   TREE_OPERAND (base0, 0)),
8999 				     fold_convert (type,
9000 						   TREE_OPERAND (base1, 0)))))
9001       || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9002     {
9003       tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9004       tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9005       tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9006       tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
9007       return fold_build2_loc (loc, PLUS_EXPR, type,
9008 			      base_offset,
9009 			      fold_build2_loc (loc, MULT_EXPR, type,
9010 					       diff, esz));
9011     }
9012   return NULL_TREE;
9013 }
9014 
9015 /* If the real or vector real constant CST of type TYPE has an exact
9016    inverse, return it, else return NULL.  */
9017 
9018 tree
9019 exact_inverse (tree type, tree cst)
9020 {
9021   REAL_VALUE_TYPE r;
9022   tree unit_type;
9023   machine_mode mode;
9024 
9025   switch (TREE_CODE (cst))
9026     {
9027     case REAL_CST:
9028       r = TREE_REAL_CST (cst);
9029 
9030       if (exact_real_inverse (TYPE_MODE (type), &r))
9031 	return build_real (type, r);
9032 
9033       return NULL_TREE;
9034 
9035     case VECTOR_CST:
9036       {
9037 	unit_type = TREE_TYPE (type);
9038 	mode = TYPE_MODE (unit_type);
9039 
9040 	tree_vector_builder elts;
9041 	if (!elts.new_unary_operation (type, cst, false))
9042 	  return NULL_TREE;
9043 	unsigned int count = elts.encoded_nelts ();
9044 	for (unsigned int i = 0; i < count; ++i)
9045 	  {
9046 	    r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9047 	    if (!exact_real_inverse (mode, &r))
9048 	      return NULL_TREE;
9049 	    elts.quick_push (build_real (unit_type, r));
9050 	  }
9051 
9052 	return elts.build ();
9053       }
9054 
9055     default:
9056       return NULL_TREE;
9057     }
9058 }
9059 
9060 /*  Mask out the tz least significant bits of X of type TYPE where
9061     tz is the number of trailing zeroes in Y.  */
9062 static wide_int
9063 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9064 {
9065   int tz = wi::ctz (y);
9066   if (tz > 0)
9067     return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9068   return x;
9069 }
9070 
9071 /* Return true when T is an address and is known to be nonzero.
9072    For floating point we further ensure that T is not denormal.
9073    Similar logic is present in nonzero_address in rtlanal.h.
9074 
9075    If the return value is based on the assumption that signed overflow
9076    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9077    change *STRICT_OVERFLOW_P.  */
9078 
9079 static bool
9080 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9081 {
9082   tree type = TREE_TYPE (t);
9083   enum tree_code code;
9084 
9085   /* Doing something useful for floating point would need more work.  */
9086   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9087     return false;
9088 
9089   code = TREE_CODE (t);
9090   switch (TREE_CODE_CLASS (code))
9091     {
9092     case tcc_unary:
9093       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9094 					      strict_overflow_p);
9095     case tcc_binary:
9096     case tcc_comparison:
9097       return tree_binary_nonzero_warnv_p (code, type,
9098 					       TREE_OPERAND (t, 0),
9099 					       TREE_OPERAND (t, 1),
9100 					       strict_overflow_p);
9101     case tcc_constant:
9102     case tcc_declaration:
9103     case tcc_reference:
9104       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9105 
9106     default:
9107       break;
9108     }
9109 
9110   switch (code)
9111     {
9112     case TRUTH_NOT_EXPR:
9113       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9114 					      strict_overflow_p);
9115 
9116     case TRUTH_AND_EXPR:
9117     case TRUTH_OR_EXPR:
9118     case TRUTH_XOR_EXPR:
9119       return tree_binary_nonzero_warnv_p (code, type,
9120 					       TREE_OPERAND (t, 0),
9121 					       TREE_OPERAND (t, 1),
9122 					       strict_overflow_p);
9123 
9124     case COND_EXPR:
9125     case CONSTRUCTOR:
9126     case OBJ_TYPE_REF:
9127     case ASSERT_EXPR:
9128     case ADDR_EXPR:
9129     case WITH_SIZE_EXPR:
9130     case SSA_NAME:
9131       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9132 
9133     case COMPOUND_EXPR:
9134     case MODIFY_EXPR:
9135     case BIND_EXPR:
9136       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9137 					strict_overflow_p);
9138 
9139     case SAVE_EXPR:
9140       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9141 					strict_overflow_p);
9142 
9143     case CALL_EXPR:
9144       {
9145 	tree fndecl = get_callee_fndecl (t);
9146 	if (!fndecl) return false;
9147 	if (flag_delete_null_pointer_checks && !flag_check_new
9148 	    && DECL_IS_OPERATOR_NEW (fndecl)
9149 	    && !TREE_NOTHROW (fndecl))
9150 	  return true;
9151 	if (flag_delete_null_pointer_checks
9152 	    && lookup_attribute ("returns_nonnull",
9153 		 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9154 	  return true;
9155 	return alloca_call_p (t);
9156       }
9157 
9158     default:
9159       break;
9160     }
9161   return false;
9162 }
9163 
9164 /* Return true when T is an address and is known to be nonzero.
9165    Handle warnings about undefined signed overflow.  */
9166 
9167 bool
9168 tree_expr_nonzero_p (tree t)
9169 {
9170   bool ret, strict_overflow_p;
9171 
9172   strict_overflow_p = false;
9173   ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9174   if (strict_overflow_p)
9175     fold_overflow_warning (("assuming signed overflow does not occur when "
9176 			    "determining that expression is always "
9177 			    "non-zero"),
9178 			   WARN_STRICT_OVERFLOW_MISC);
9179   return ret;
9180 }
9181 
9182 /* Return true if T is known not to be equal to an integer W.  */
9183 
9184 bool
9185 expr_not_equal_to (tree t, const wide_int &w)
9186 {
9187   wide_int min, max, nz;
9188   value_range_type rtype;
9189   switch (TREE_CODE (t))
9190     {
9191     case INTEGER_CST:
9192       return wi::to_wide (t) != w;
9193 
9194     case SSA_NAME:
9195       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9196 	return false;
9197       rtype = get_range_info (t, &min, &max);
9198       if (rtype == VR_RANGE)
9199 	{
9200 	  if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9201 	    return true;
9202 	  if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9203 	    return true;
9204 	}
9205       else if (rtype == VR_ANTI_RANGE
9206 	       && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9207 	       && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9208 	return true;
9209       /* If T has some known zero bits and W has any of those bits set,
9210 	 then T is known not to be equal to W.  */
9211       if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9212 			      TYPE_PRECISION (TREE_TYPE (t))), 0))
9213 	return true;
9214       return false;
9215 
9216     default:
9217       return false;
9218     }
9219 }
9220 
9221 /* Fold a binary expression of code CODE and type TYPE with operands
9222    OP0 and OP1.  LOC is the location of the resulting expression.
9223    Return the folded expression if folding is successful.  Otherwise,
9224    return NULL_TREE.  */
9225 
9226 tree
9227 fold_binary_loc (location_t loc, enum tree_code code, tree type,
9228 		 tree op0, tree op1)
9229 {
9230   enum tree_code_class kind = TREE_CODE_CLASS (code);
9231   tree arg0, arg1, tem;
9232   tree t1 = NULL_TREE;
9233   bool strict_overflow_p;
9234   unsigned int prec;
9235 
9236   gcc_assert (IS_EXPR_CODE_CLASS (kind)
9237 	      && TREE_CODE_LENGTH (code) == 2
9238 	      && op0 != NULL_TREE
9239 	      && op1 != NULL_TREE);
9240 
9241   arg0 = op0;
9242   arg1 = op1;
9243 
9244   /* Strip any conversions that don't change the mode.  This is
9245      safe for every expression, except for a comparison expression
9246      because its signedness is derived from its operands.  So, in
9247      the latter case, only strip conversions that don't change the
9248      signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
9249      preserved.
9250 
9251      Note that this is done as an internal manipulation within the
9252      constant folder, in order to find the simplest representation
9253      of the arguments so that their form can be studied.  In any
9254      cases, the appropriate type conversions should be put back in
9255      the tree that will get out of the constant folder.  */
9256 
9257   if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9258     {
9259       STRIP_SIGN_NOPS (arg0);
9260       STRIP_SIGN_NOPS (arg1);
9261     }
9262   else
9263     {
9264       STRIP_NOPS (arg0);
9265       STRIP_NOPS (arg1);
9266     }
9267 
9268   /* Note that TREE_CONSTANT isn't enough: static var addresses are
9269      constant but we can't do arithmetic on them.  */
9270   if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9271     {
9272       tem = const_binop (code, type, arg0, arg1);
9273       if (tem != NULL_TREE)
9274 	{
9275 	  if (TREE_TYPE (tem) != type)
9276 	    tem = fold_convert_loc (loc, type, tem);
9277 	  return tem;
9278 	}
9279     }
9280 
9281   /* If this is a commutative operation, and ARG0 is a constant, move it
9282      to ARG1 to reduce the number of tests below.  */
9283   if (commutative_tree_code (code)
9284       && tree_swap_operands_p (arg0, arg1))
9285     return fold_build2_loc (loc, code, type, op1, op0);
9286 
9287   /* Likewise if this is a comparison, and ARG0 is a constant, move it
9288      to ARG1 to reduce the number of tests below.  */
9289   if (kind == tcc_comparison
9290       && tree_swap_operands_p (arg0, arg1))
9291     return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9292 
9293   tem = generic_simplify (loc, code, type, op0, op1);
9294   if (tem)
9295     return tem;
9296 
9297   /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9298 
9299      First check for cases where an arithmetic operation is applied to a
9300      compound, conditional, or comparison operation.  Push the arithmetic
9301      operation inside the compound or conditional to see if any folding
9302      can then be done.  Convert comparison to conditional for this purpose.
9303      The also optimizes non-constant cases that used to be done in
9304      expand_expr.
9305 
9306      Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9307      one of the operands is a comparison and the other is a comparison, a
9308      BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
9309      code below would make the expression more complex.  Change it to a
9310      TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
9311      TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
9312 
9313   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9314        || code == EQ_EXPR || code == NE_EXPR)
9315       && !VECTOR_TYPE_P (TREE_TYPE (arg0))
9316       && ((truth_value_p (TREE_CODE (arg0))
9317 	   && (truth_value_p (TREE_CODE (arg1))
9318 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
9319 		   && integer_onep (TREE_OPERAND (arg1, 1)))))
9320 	  || (truth_value_p (TREE_CODE (arg1))
9321 	      && (truth_value_p (TREE_CODE (arg0))
9322 		  || (TREE_CODE (arg0) == BIT_AND_EXPR
9323 		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
9324     {
9325       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9326 			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9327 			 : TRUTH_XOR_EXPR,
9328 			 boolean_type_node,
9329 			 fold_convert_loc (loc, boolean_type_node, arg0),
9330 			 fold_convert_loc (loc, boolean_type_node, arg1));
9331 
9332       if (code == EQ_EXPR)
9333 	tem = invert_truthvalue_loc (loc, tem);
9334 
9335       return fold_convert_loc (loc, type, tem);
9336     }
9337 
9338   if (TREE_CODE_CLASS (code) == tcc_binary
9339       || TREE_CODE_CLASS (code) == tcc_comparison)
9340     {
9341       if (TREE_CODE (arg0) == COMPOUND_EXPR)
9342 	{
9343 	  tem = fold_build2_loc (loc, code, type,
9344 			     fold_convert_loc (loc, TREE_TYPE (op0),
9345 					       TREE_OPERAND (arg0, 1)), op1);
9346 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9347 			     tem);
9348 	}
9349       if (TREE_CODE (arg1) == COMPOUND_EXPR)
9350 	{
9351 	  tem = fold_build2_loc (loc, code, type, op0,
9352 			     fold_convert_loc (loc, TREE_TYPE (op1),
9353 					       TREE_OPERAND (arg1, 1)));
9354 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9355 			     tem);
9356 	}
9357 
9358       if (TREE_CODE (arg0) == COND_EXPR
9359 	  || TREE_CODE (arg0) == VEC_COND_EXPR
9360 	  || COMPARISON_CLASS_P (arg0))
9361 	{
9362 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9363 						     arg0, arg1,
9364 						     /*cond_first_p=*/1);
9365 	  if (tem != NULL_TREE)
9366 	    return tem;
9367 	}
9368 
9369       if (TREE_CODE (arg1) == COND_EXPR
9370 	  || TREE_CODE (arg1) == VEC_COND_EXPR
9371 	  || COMPARISON_CLASS_P (arg1))
9372 	{
9373 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9374 						     arg1, arg0,
9375 					             /*cond_first_p=*/0);
9376 	  if (tem != NULL_TREE)
9377 	    return tem;
9378 	}
9379     }
9380 
9381   switch (code)
9382     {
9383     case MEM_REF:
9384       /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2].  */
9385       if (TREE_CODE (arg0) == ADDR_EXPR
9386 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9387 	{
9388 	  tree iref = TREE_OPERAND (arg0, 0);
9389 	  return fold_build2 (MEM_REF, type,
9390 			      TREE_OPERAND (iref, 0),
9391 			      int_const_binop (PLUS_EXPR, arg1,
9392 					       TREE_OPERAND (iref, 1)));
9393 	}
9394 
9395       /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2].  */
9396       if (TREE_CODE (arg0) == ADDR_EXPR
9397 	  && handled_component_p (TREE_OPERAND (arg0, 0)))
9398 	{
9399 	  tree base;
9400 	  poly_int64 coffset;
9401 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9402 						&coffset);
9403 	  if (!base)
9404 	    return NULL_TREE;
9405 	  return fold_build2 (MEM_REF, type,
9406 			      build_fold_addr_expr (base),
9407 			      int_const_binop (PLUS_EXPR, arg1,
9408 					       size_int (coffset)));
9409 	}
9410 
9411       return NULL_TREE;
9412 
9413     case POINTER_PLUS_EXPR:
9414       /* INT +p INT -> (PTR)(INT + INT).  Stripping types allows for this. */
9415       if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9416 	   && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9417         return fold_convert_loc (loc, type,
9418 				 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9419 					      fold_convert_loc (loc, sizetype,
9420 								arg1),
9421 					      fold_convert_loc (loc, sizetype,
9422 								arg0)));
9423 
9424       return NULL_TREE;
9425 
9426     case PLUS_EXPR:
9427       if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9428 	{
9429 	  /* X + (X / CST) * -CST is X % CST.  */
9430 	  if (TREE_CODE (arg1) == MULT_EXPR
9431 	      && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9432 	      && operand_equal_p (arg0,
9433 				  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9434 	    {
9435 	      tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9436 	      tree cst1 = TREE_OPERAND (arg1, 1);
9437 	      tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9438 				      cst1, cst0);
9439 	      if (sum && integer_zerop (sum))
9440 		return fold_convert_loc (loc, type,
9441 					 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9442 						      TREE_TYPE (arg0), arg0,
9443 						      cst0));
9444 	    }
9445 	}
9446 
9447       /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9448 	 one.  Make sure the type is not saturating and has the signedness of
9449 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9450 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
9451       if ((TREE_CODE (arg0) == MULT_EXPR
9452 	   || TREE_CODE (arg1) == MULT_EXPR)
9453 	  && !TYPE_SATURATING (type)
9454 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9455 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9456 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
9457         {
9458 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9459 	  if (tem)
9460 	    return tem;
9461 	}
9462 
9463       if (! FLOAT_TYPE_P (type))
9464 	{
9465 	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9466 	     (plus (plus (mult) (mult)) (foo)) so that we can
9467 	     take advantage of the factoring cases below.  */
9468 	  if (ANY_INTEGRAL_TYPE_P (type)
9469 	      && TYPE_OVERFLOW_WRAPS (type)
9470 	      && (((TREE_CODE (arg0) == PLUS_EXPR
9471 		    || TREE_CODE (arg0) == MINUS_EXPR)
9472 		   && TREE_CODE (arg1) == MULT_EXPR)
9473 		  || ((TREE_CODE (arg1) == PLUS_EXPR
9474 		       || TREE_CODE (arg1) == MINUS_EXPR)
9475 		      && TREE_CODE (arg0) == MULT_EXPR)))
9476 	    {
9477 	      tree parg0, parg1, parg, marg;
9478 	      enum tree_code pcode;
9479 
9480 	      if (TREE_CODE (arg1) == MULT_EXPR)
9481 		parg = arg0, marg = arg1;
9482 	      else
9483 		parg = arg1, marg = arg0;
9484 	      pcode = TREE_CODE (parg);
9485 	      parg0 = TREE_OPERAND (parg, 0);
9486 	      parg1 = TREE_OPERAND (parg, 1);
9487 	      STRIP_NOPS (parg0);
9488 	      STRIP_NOPS (parg1);
9489 
9490 	      if (TREE_CODE (parg0) == MULT_EXPR
9491 		  && TREE_CODE (parg1) != MULT_EXPR)
9492 		return fold_build2_loc (loc, pcode, type,
9493 				    fold_build2_loc (loc, PLUS_EXPR, type,
9494 						 fold_convert_loc (loc, type,
9495 								   parg0),
9496 						 fold_convert_loc (loc, type,
9497 								   marg)),
9498 				    fold_convert_loc (loc, type, parg1));
9499 	      if (TREE_CODE (parg0) != MULT_EXPR
9500 		  && TREE_CODE (parg1) == MULT_EXPR)
9501 		return
9502 		  fold_build2_loc (loc, PLUS_EXPR, type,
9503 			       fold_convert_loc (loc, type, parg0),
9504 			       fold_build2_loc (loc, pcode, type,
9505 					    fold_convert_loc (loc, type, marg),
9506 					    fold_convert_loc (loc, type,
9507 							      parg1)));
9508 	    }
9509 	}
9510       else
9511 	{
9512 	  /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9513 	     to __complex__ ( x, y ).  This is not the same for SNaNs or
9514 	     if signed zeros are involved.  */
9515 	  if (!HONOR_SNANS (element_mode (arg0))
9516               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9517 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9518 	    {
9519 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9520 	      tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9521 	      tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9522 	      bool arg0rz = false, arg0iz = false;
9523 	      if ((arg0r && (arg0rz = real_zerop (arg0r)))
9524 		  || (arg0i && (arg0iz = real_zerop (arg0i))))
9525 		{
9526 		  tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9527 		  tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9528 		  if (arg0rz && arg1i && real_zerop (arg1i))
9529 		    {
9530 		      tree rp = arg1r ? arg1r
9531 				  : build1 (REALPART_EXPR, rtype, arg1);
9532 		      tree ip = arg0i ? arg0i
9533 				  : build1 (IMAGPART_EXPR, rtype, arg0);
9534 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9535 		    }
9536 		  else if (arg0iz && arg1r && real_zerop (arg1r))
9537 		    {
9538 		      tree rp = arg0r ? arg0r
9539 				  : build1 (REALPART_EXPR, rtype, arg0);
9540 		      tree ip = arg1i ? arg1i
9541 				  : build1 (IMAGPART_EXPR, rtype, arg1);
9542 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9543 		    }
9544 		}
9545 	    }
9546 
9547           /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9548              We associate floats only if the user has specified
9549              -fassociative-math.  */
9550           if (flag_associative_math
9551               && TREE_CODE (arg1) == PLUS_EXPR
9552               && TREE_CODE (arg0) != MULT_EXPR)
9553             {
9554               tree tree10 = TREE_OPERAND (arg1, 0);
9555               tree tree11 = TREE_OPERAND (arg1, 1);
9556               if (TREE_CODE (tree11) == MULT_EXPR
9557 		  && TREE_CODE (tree10) == MULT_EXPR)
9558                 {
9559                   tree tree0;
9560                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9561                   return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9562                 }
9563             }
9564           /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9565              We associate floats only if the user has specified
9566              -fassociative-math.  */
9567           if (flag_associative_math
9568               && TREE_CODE (arg0) == PLUS_EXPR
9569               && TREE_CODE (arg1) != MULT_EXPR)
9570             {
9571               tree tree00 = TREE_OPERAND (arg0, 0);
9572               tree tree01 = TREE_OPERAND (arg0, 1);
9573               if (TREE_CODE (tree01) == MULT_EXPR
9574 		  && TREE_CODE (tree00) == MULT_EXPR)
9575                 {
9576                   tree tree0;
9577                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9578                   return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9579                 }
9580             }
9581 	}
9582 
9583      bit_rotate:
9584       /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9585 	 is a rotate of A by C1 bits.  */
9586       /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9587 	 is a rotate of A by B bits.
9588 	 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
9589 	 though in this case CODE must be | and not + or ^, otherwise
9590 	 it doesn't return A when B is 0.  */
9591       {
9592 	enum tree_code code0, code1;
9593 	tree rtype;
9594 	code0 = TREE_CODE (arg0);
9595 	code1 = TREE_CODE (arg1);
9596 	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9597 	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9598 	    && operand_equal_p (TREE_OPERAND (arg0, 0),
9599 			        TREE_OPERAND (arg1, 0), 0)
9600 	    && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9601 	        TYPE_UNSIGNED (rtype))
9602 	    /* Only create rotates in complete modes.  Other cases are not
9603 	       expanded properly.  */
9604 	    && (element_precision (rtype)
9605 		== GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9606 	  {
9607 	    tree tree01, tree11;
9608 	    tree orig_tree01, orig_tree11;
9609 	    enum tree_code code01, code11;
9610 
9611 	    tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
9612 	    tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
9613 	    STRIP_NOPS (tree01);
9614 	    STRIP_NOPS (tree11);
9615 	    code01 = TREE_CODE (tree01);
9616 	    code11 = TREE_CODE (tree11);
9617 	    if (code11 != MINUS_EXPR
9618 		&& (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
9619 	      {
9620 		std::swap (code0, code1);
9621 		std::swap (code01, code11);
9622 		std::swap (tree01, tree11);
9623 		std::swap (orig_tree01, orig_tree11);
9624 	      }
9625 	    if (code01 == INTEGER_CST
9626 		&& code11 == INTEGER_CST
9627 		&& (wi::to_widest (tree01) + wi::to_widest (tree11)
9628 		    == element_precision (rtype)))
9629 	      {
9630 		tem = build2_loc (loc, LROTATE_EXPR,
9631 				  rtype, TREE_OPERAND (arg0, 0),
9632 				  code0 == LSHIFT_EXPR
9633 				  ? orig_tree01 : orig_tree11);
9634 		return fold_convert_loc (loc, type, tem);
9635 	      }
9636 	    else if (code11 == MINUS_EXPR)
9637 	      {
9638 		tree tree110, tree111;
9639 		tree110 = TREE_OPERAND (tree11, 0);
9640 		tree111 = TREE_OPERAND (tree11, 1);
9641 		STRIP_NOPS (tree110);
9642 		STRIP_NOPS (tree111);
9643 		if (TREE_CODE (tree110) == INTEGER_CST
9644 		    && compare_tree_int (tree110,
9645 					 element_precision (rtype)) == 0
9646 		    && operand_equal_p (tree01, tree111, 0))
9647 		  {
9648 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9649 					    ? LROTATE_EXPR : RROTATE_EXPR),
9650 				      rtype, TREE_OPERAND (arg0, 0),
9651 				      orig_tree01);
9652 		    return fold_convert_loc (loc, type, tem);
9653 		  }
9654 	      }
9655 	    else if (code == BIT_IOR_EXPR
9656 		     && code11 == BIT_AND_EXPR
9657 		     && pow2p_hwi (element_precision (rtype)))
9658 	      {
9659 		tree tree110, tree111;
9660 		tree110 = TREE_OPERAND (tree11, 0);
9661 		tree111 = TREE_OPERAND (tree11, 1);
9662 		STRIP_NOPS (tree110);
9663 		STRIP_NOPS (tree111);
9664 		if (TREE_CODE (tree110) == NEGATE_EXPR
9665 		    && TREE_CODE (tree111) == INTEGER_CST
9666 		    && compare_tree_int (tree111,
9667 					 element_precision (rtype) - 1) == 0
9668 		    && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
9669 		  {
9670 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
9671 					    ? LROTATE_EXPR : RROTATE_EXPR),
9672 				      rtype, TREE_OPERAND (arg0, 0),
9673 				      orig_tree01);
9674 		    return fold_convert_loc (loc, type, tem);
9675 		  }
9676 	      }
9677 	  }
9678       }
9679 
9680     associate:
9681       /* In most languages, can't associate operations on floats through
9682 	 parentheses.  Rather than remember where the parentheses were, we
9683 	 don't associate floats at all, unless the user has specified
9684 	 -fassociative-math.
9685 	 And, we need to make sure type is not saturating.  */
9686 
9687       if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9688 	  && !TYPE_SATURATING (type))
9689 	{
9690 	  tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
9691 	  tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
9692 	  tree atype = type;
9693 	  bool ok = true;
9694 
9695 	  /* Split both trees into variables, constants, and literals.  Then
9696 	     associate each group together, the constants with literals,
9697 	     then the result with variables.  This increases the chances of
9698 	     literals being recombined later and of generating relocatable
9699 	     expressions for the sum of a constant and literal.  */
9700 	  var0 = split_tree (arg0, type, code,
9701 			     &minus_var0, &con0, &minus_con0,
9702 			     &lit0, &minus_lit0, 0);
9703 	  var1 = split_tree (arg1, type, code,
9704 			     &minus_var1, &con1, &minus_con1,
9705 			     &lit1, &minus_lit1, code == MINUS_EXPR);
9706 
9707 	  /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
9708 	  if (code == MINUS_EXPR)
9709 	    code = PLUS_EXPR;
9710 
9711 	  /* With undefined overflow prefer doing association in a type
9712 	     which wraps on overflow, if that is one of the operand types.  */
9713 	  if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
9714 	      && !TYPE_OVERFLOW_WRAPS (type))
9715 	    {
9716 	      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9717 		  && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9718 		atype = TREE_TYPE (arg0);
9719 	      else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9720 		       && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9721 		atype = TREE_TYPE (arg1);
9722 	      gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9723 	    }
9724 
9725 	  /* With undefined overflow we can only associate constants with one
9726 	     variable, and constants whose association doesn't overflow.  */
9727 	  if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
9728 	      && !TYPE_OVERFLOW_WRAPS (atype))
9729 	    {
9730 	      if ((var0 && var1) || (minus_var0 && minus_var1))
9731 		{
9732 		  /* ???  If split_tree would handle NEGATE_EXPR we could
9733 		     simply reject these cases and the allowed cases would
9734 		     be the var0/minus_var1 ones.  */
9735 		  tree tmp0 = var0 ? var0 : minus_var0;
9736 		  tree tmp1 = var1 ? var1 : minus_var1;
9737 		  bool one_neg = false;
9738 
9739 		  if (TREE_CODE (tmp0) == NEGATE_EXPR)
9740 		    {
9741 		      tmp0 = TREE_OPERAND (tmp0, 0);
9742 		      one_neg = !one_neg;
9743 		    }
9744 		  if (CONVERT_EXPR_P (tmp0)
9745 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9746 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9747 			  <= TYPE_PRECISION (atype)))
9748 		    tmp0 = TREE_OPERAND (tmp0, 0);
9749 		  if (TREE_CODE (tmp1) == NEGATE_EXPR)
9750 		    {
9751 		      tmp1 = TREE_OPERAND (tmp1, 0);
9752 		      one_neg = !one_neg;
9753 		    }
9754 		  if (CONVERT_EXPR_P (tmp1)
9755 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9756 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9757 			  <= TYPE_PRECISION (atype)))
9758 		    tmp1 = TREE_OPERAND (tmp1, 0);
9759 		  /* The only case we can still associate with two variables
9760 		     is if they cancel out.  */
9761 		  if (!one_neg
9762 		      || !operand_equal_p (tmp0, tmp1, 0))
9763 		    ok = false;
9764 		}
9765 	      else if ((var0 && minus_var1
9766 			&& ! operand_equal_p (var0, minus_var1, 0))
9767 		       || (minus_var0 && var1
9768 			   && ! operand_equal_p (minus_var0, var1, 0)))
9769 		ok = false;
9770 	    }
9771 
9772 	  /* Only do something if we found more than two objects.  Otherwise,
9773 	     nothing has changed and we risk infinite recursion.  */
9774 	  if (ok
9775 	      && ((var0 != 0) + (var1 != 0)
9776 		  + (minus_var0 != 0) + (minus_var1 != 0)
9777 		  + (con0 != 0) + (con1 != 0)
9778 		  + (minus_con0 != 0) + (minus_con1 != 0)
9779 		  + (lit0 != 0) + (lit1 != 0)
9780 		  + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
9781 	    {
9782 	      var0 = associate_trees (loc, var0, var1, code, atype);
9783 	      minus_var0 = associate_trees (loc, minus_var0, minus_var1,
9784 					    code, atype);
9785 	      con0 = associate_trees (loc, con0, con1, code, atype);
9786 	      minus_con0 = associate_trees (loc, minus_con0, minus_con1,
9787 					    code, atype);
9788 	      lit0 = associate_trees (loc, lit0, lit1, code, atype);
9789 	      minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9790 					    code, atype);
9791 
9792 	      if (minus_var0 && var0)
9793 		{
9794 		  var0 = associate_trees (loc, var0, minus_var0,
9795 					  MINUS_EXPR, atype);
9796 		  minus_var0 = 0;
9797 		}
9798 	      if (minus_con0 && con0)
9799 		{
9800 		  con0 = associate_trees (loc, con0, minus_con0,
9801 					  MINUS_EXPR, atype);
9802 		  minus_con0 = 0;
9803 		}
9804 
9805 	      /* Preserve the MINUS_EXPR if the negative part of the literal is
9806 		 greater than the positive part.  Otherwise, the multiplicative
9807 		 folding code (i.e extract_muldiv) may be fooled in case
9808 		 unsigned constants are subtracted, like in the following
9809 		 example: ((X*2 + 4) - 8U)/2.  */
9810 	      if (minus_lit0 && lit0)
9811 		{
9812 		  if (TREE_CODE (lit0) == INTEGER_CST
9813 		      && TREE_CODE (minus_lit0) == INTEGER_CST
9814 		      && tree_int_cst_lt (lit0, minus_lit0)
9815 		      /* But avoid ending up with only negated parts.  */
9816 		      && (var0 || con0))
9817 		    {
9818 		      minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9819 						    MINUS_EXPR, atype);
9820 		      lit0 = 0;
9821 		    }
9822 		  else
9823 		    {
9824 		      lit0 = associate_trees (loc, lit0, minus_lit0,
9825 					      MINUS_EXPR, atype);
9826 		      minus_lit0 = 0;
9827 		    }
9828 		}
9829 
9830 	      /* Don't introduce overflows through reassociation.  */
9831 	      if ((lit0 && TREE_OVERFLOW_P (lit0))
9832 		  || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
9833 		return NULL_TREE;
9834 
9835 	      /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
9836 	      con0 = associate_trees (loc, con0, lit0, code, atype);
9837 	      lit0 = 0;
9838 	      minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
9839 					    code, atype);
9840 	      minus_lit0 = 0;
9841 
9842 	      /* Eliminate minus_con0.  */
9843 	      if (minus_con0)
9844 		{
9845 		  if (con0)
9846 		    con0 = associate_trees (loc, con0, minus_con0,
9847 					    MINUS_EXPR, atype);
9848 		  else if (var0)
9849 		    var0 = associate_trees (loc, var0, minus_con0,
9850 					    MINUS_EXPR, atype);
9851 		  else
9852 		    gcc_unreachable ();
9853 		  minus_con0 = 0;
9854 		}
9855 
9856 	      /* Eliminate minus_var0.  */
9857 	      if (minus_var0)
9858 		{
9859 		  if (con0)
9860 		    con0 = associate_trees (loc, con0, minus_var0,
9861 					    MINUS_EXPR, atype);
9862 		  else
9863 		    gcc_unreachable ();
9864 		  minus_var0 = 0;
9865 		}
9866 
9867 	      return
9868 		fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9869 							      code, atype));
9870 	    }
9871 	}
9872 
9873       return NULL_TREE;
9874 
9875     case POINTER_DIFF_EXPR:
9876     case MINUS_EXPR:
9877       /* Fold &a[i] - &a[j] to i-j.  */
9878       if (TREE_CODE (arg0) == ADDR_EXPR
9879 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9880 	  && TREE_CODE (arg1) == ADDR_EXPR
9881 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9882         {
9883 	  tree tem = fold_addr_of_array_ref_difference (loc, type,
9884 							TREE_OPERAND (arg0, 0),
9885 							TREE_OPERAND (arg1, 0),
9886 							code
9887 							== POINTER_DIFF_EXPR);
9888 	  if (tem)
9889 	    return tem;
9890 	}
9891 
9892       /* Further transformations are not for pointers.  */
9893       if (code == POINTER_DIFF_EXPR)
9894 	return NULL_TREE;
9895 
9896       /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
9897       if (TREE_CODE (arg0) == NEGATE_EXPR
9898 	  && negate_expr_p (op1)
9899 	  /* If arg0 is e.g. unsigned int and type is int, then this could
9900 	     introduce UB, because if A is INT_MIN at runtime, the original
9901 	     expression can be well defined while the latter is not.
9902 	     See PR83269.  */
9903 	  && !(ANY_INTEGRAL_TYPE_P (type)
9904 	       && TYPE_OVERFLOW_UNDEFINED (type)
9905 	       && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9906 	       && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9907 	return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
9908 			        fold_convert_loc (loc, type,
9909 						  TREE_OPERAND (arg0, 0)));
9910 
9911       /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9912 	 __complex__ ( x, -y ).  This is not the same for SNaNs or if
9913 	 signed zeros are involved.  */
9914       if (!HONOR_SNANS (element_mode (arg0))
9915 	  && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9916 	  && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9917         {
9918 	  tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9919 	  tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9920 	  tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9921 	  bool arg0rz = false, arg0iz = false;
9922 	  if ((arg0r && (arg0rz = real_zerop (arg0r)))
9923 	      || (arg0i && (arg0iz = real_zerop (arg0i))))
9924 	    {
9925 	      tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9926 	      tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9927 	      if (arg0rz && arg1i && real_zerop (arg1i))
9928 	        {
9929 		  tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9930 					 arg1r ? arg1r
9931 					 : build1 (REALPART_EXPR, rtype, arg1));
9932 		  tree ip = arg0i ? arg0i
9933 		    : build1 (IMAGPART_EXPR, rtype, arg0);
9934 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9935 		}
9936 	      else if (arg0iz && arg1r && real_zerop (arg1r))
9937 	        {
9938 		  tree rp = arg0r ? arg0r
9939 		    : build1 (REALPART_EXPR, rtype, arg0);
9940 		  tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9941 					 arg1i ? arg1i
9942 					 : build1 (IMAGPART_EXPR, rtype, arg1));
9943 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9944 		}
9945 	    }
9946 	}
9947 
9948       /* A - B -> A + (-B) if B is easily negatable.  */
9949       if (negate_expr_p (op1)
9950 	  && ! TYPE_OVERFLOW_SANITIZED (type)
9951 	  && ((FLOAT_TYPE_P (type)
9952                /* Avoid this transformation if B is a positive REAL_CST.  */
9953 	       && (TREE_CODE (op1) != REAL_CST
9954 		   || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9955 	      || INTEGRAL_TYPE_P (type)))
9956 	return fold_build2_loc (loc, PLUS_EXPR, type,
9957 				fold_convert_loc (loc, type, arg0),
9958 				negate_expr (op1));
9959 
9960       /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9961 	 one.  Make sure the type is not saturating and has the signedness of
9962 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9963 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
9964       if ((TREE_CODE (arg0) == MULT_EXPR
9965 	   || TREE_CODE (arg1) == MULT_EXPR)
9966 	  && !TYPE_SATURATING (type)
9967 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9968 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9969 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
9970         {
9971 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9972 	  if (tem)
9973 	    return tem;
9974 	}
9975 
9976       goto associate;
9977 
9978     case MULT_EXPR:
9979       if (! FLOAT_TYPE_P (type))
9980 	{
9981 	  /* Transform x * -C into -x * C if x is easily negatable.  */
9982 	  if (TREE_CODE (op1) == INTEGER_CST
9983 	      && tree_int_cst_sgn (op1) == -1
9984 	      && negate_expr_p (op0)
9985 	      && negate_expr_p (op1)
9986 	      && (tem = negate_expr (op1)) != op1
9987 	      && ! TREE_OVERFLOW (tem))
9988 	    return fold_build2_loc (loc, MULT_EXPR, type,
9989 				    fold_convert_loc (loc, type,
9990 						      negate_expr (op0)), tem);
9991 
9992 	  strict_overflow_p = false;
9993 	  if (TREE_CODE (arg1) == INTEGER_CST
9994 	      && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9995 					&strict_overflow_p)) != 0)
9996 	    {
9997 	      if (strict_overflow_p)
9998 		fold_overflow_warning (("assuming signed overflow does not "
9999 					"occur when simplifying "
10000 					"multiplication"),
10001 				       WARN_STRICT_OVERFLOW_MISC);
10002 	      return fold_convert_loc (loc, type, tem);
10003 	    }
10004 
10005 	  /* Optimize z * conj(z) for integer complex numbers.  */
10006 	  if (TREE_CODE (arg0) == CONJ_EXPR
10007 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10008 	    return fold_mult_zconjz (loc, type, arg1);
10009 	  if (TREE_CODE (arg1) == CONJ_EXPR
10010 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10011 	    return fold_mult_zconjz (loc, type, arg0);
10012 	}
10013       else
10014 	{
10015 	  /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10016 	     This is not the same for NaNs or if signed zeros are
10017 	     involved.  */
10018 	  if (!HONOR_NANS (arg0)
10019               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10020 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10021 	      && TREE_CODE (arg1) == COMPLEX_CST
10022 	      && real_zerop (TREE_REALPART (arg1)))
10023 	    {
10024 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10025 	      if (real_onep (TREE_IMAGPART (arg1)))
10026 		return
10027 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
10028 			       negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10029 							     rtype, arg0)),
10030 			       fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10031 	      else if (real_minus_onep (TREE_IMAGPART (arg1)))
10032 		return
10033 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
10034 			       fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10035 			       negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10036 							     rtype, arg0)));
10037 	    }
10038 
10039 	  /* Optimize z * conj(z) for floating point complex numbers.
10040 	     Guarded by flag_unsafe_math_optimizations as non-finite
10041 	     imaginary components don't produce scalar results.  */
10042 	  if (flag_unsafe_math_optimizations
10043 	      && TREE_CODE (arg0) == CONJ_EXPR
10044 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10045 	    return fold_mult_zconjz (loc, type, arg1);
10046 	  if (flag_unsafe_math_optimizations
10047 	      && TREE_CODE (arg1) == CONJ_EXPR
10048 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10049 	    return fold_mult_zconjz (loc, type, arg0);
10050 	}
10051       goto associate;
10052 
10053     case BIT_IOR_EXPR:
10054       /* Canonicalize (X & C1) | C2.  */
10055       if (TREE_CODE (arg0) == BIT_AND_EXPR
10056 	  && TREE_CODE (arg1) == INTEGER_CST
10057 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10058 	{
10059 	  int width = TYPE_PRECISION (type), w;
10060 	  wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
10061 	  wide_int c2 = wi::to_wide (arg1);
10062 
10063 	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
10064 	  if ((c1 & c2) == c1)
10065 	    return omit_one_operand_loc (loc, type, arg1,
10066 					 TREE_OPERAND (arg0, 0));
10067 
10068 	  wide_int msk = wi::mask (width, false,
10069 				   TYPE_PRECISION (TREE_TYPE (arg1)));
10070 
10071 	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
10072 	  if (wi::bit_and_not (msk, c1 | c2) == 0)
10073 	    {
10074 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10075 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10076 	    }
10077 
10078 	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10079 	     unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10080 	     mode which allows further optimizations.  */
10081 	  c1 &= msk;
10082 	  c2 &= msk;
10083 	  wide_int c3 = wi::bit_and_not (c1, c2);
10084 	  for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10085 	    {
10086 	      wide_int mask = wi::mask (w, false,
10087 					TYPE_PRECISION (type));
10088 	      if (((c1 | c2) & mask) == mask
10089 		  && wi::bit_and_not (c1, mask) == 0)
10090 		{
10091 		  c3 = mask;
10092 		  break;
10093 		}
10094 	    }
10095 
10096 	  if (c3 != c1)
10097 	    {
10098 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10099 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
10100 				     wide_int_to_tree (type, c3));
10101 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10102 	    }
10103 	}
10104 
10105       /* See if this can be simplified into a rotate first.  If that
10106 	 is unsuccessful continue in the association code.  */
10107       goto bit_rotate;
10108 
10109     case BIT_XOR_EXPR:
10110       /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
10111       if (TREE_CODE (arg0) == BIT_AND_EXPR
10112 	  && INTEGRAL_TYPE_P (type)
10113 	  && integer_onep (TREE_OPERAND (arg0, 1))
10114 	  && integer_onep (arg1))
10115 	return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10116 				build_zero_cst (TREE_TYPE (arg0)));
10117 
10118       /* See if this can be simplified into a rotate first.  If that
10119 	 is unsuccessful continue in the association code.  */
10120       goto bit_rotate;
10121 
10122     case BIT_AND_EXPR:
10123       /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
10124       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10125 	  && INTEGRAL_TYPE_P (type)
10126 	  && integer_onep (TREE_OPERAND (arg0, 1))
10127 	  && integer_onep (arg1))
10128 	{
10129 	  tree tem2;
10130 	  tem = TREE_OPERAND (arg0, 0);
10131 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10132 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10133 				  tem, tem2);
10134 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10135 				  build_zero_cst (TREE_TYPE (tem)));
10136 	}
10137       /* Fold ~X & 1 as (X & 1) == 0.  */
10138       if (TREE_CODE (arg0) == BIT_NOT_EXPR
10139 	  && INTEGRAL_TYPE_P (type)
10140 	  && integer_onep (arg1))
10141 	{
10142 	  tree tem2;
10143 	  tem = TREE_OPERAND (arg0, 0);
10144 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10145 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10146 				  tem, tem2);
10147 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10148 				  build_zero_cst (TREE_TYPE (tem)));
10149 	}
10150       /* Fold !X & 1 as X == 0.  */
10151       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10152 	  && integer_onep (arg1))
10153 	{
10154 	  tem = TREE_OPERAND (arg0, 0);
10155 	  return fold_build2_loc (loc, EQ_EXPR, type, tem,
10156 				  build_zero_cst (TREE_TYPE (tem)));
10157 	}
10158 
10159       /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10160          multiple of 1 << CST.  */
10161       if (TREE_CODE (arg1) == INTEGER_CST)
10162 	{
10163 	  wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10164 	  wide_int ncst1 = -cst1;
10165 	  if ((cst1 & ncst1) == ncst1
10166 	      && multiple_of_p (type, arg0,
10167 				wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10168 	    return fold_convert_loc (loc, type, arg0);
10169 	}
10170 
10171       /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10172          bits from CST2.  */
10173       if (TREE_CODE (arg1) == INTEGER_CST
10174 	  && TREE_CODE (arg0) == MULT_EXPR
10175 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10176 	{
10177 	  wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
10178 	  wide_int masked
10179 	    = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
10180 
10181 	  if (masked == 0)
10182 	    return omit_two_operands_loc (loc, type, build_zero_cst (type),
10183 	                                  arg0, arg1);
10184 	  else if (masked != warg1)
10185 	    {
10186 	      /* Avoid the transform if arg1 is a mask of some
10187 	         mode which allows further optimizations.  */
10188 	      int pop = wi::popcount (warg1);
10189 	      if (!(pop >= BITS_PER_UNIT
10190 		    && pow2p_hwi (pop)
10191 		    && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10192 		return fold_build2_loc (loc, code, type, op0,
10193 					wide_int_to_tree (type, masked));
10194 	    }
10195 	}
10196 
10197       /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10198 	 ((A & N) + B) & M -> (A + B) & M
10199 	 Similarly if (N & M) == 0,
10200 	 ((A | N) + B) & M -> (A + B) & M
10201 	 and for - instead of + (or unary - instead of +)
10202 	 and/or ^ instead of |.
10203 	 If B is constant and (B & M) == 0, fold into A & M.  */
10204       if (TREE_CODE (arg1) == INTEGER_CST)
10205 	{
10206 	  wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
10207 	  if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10208 	      && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10209 	      && (TREE_CODE (arg0) == PLUS_EXPR
10210 		  || TREE_CODE (arg0) == MINUS_EXPR
10211 		  || TREE_CODE (arg0) == NEGATE_EXPR)
10212 	      && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10213 		  || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10214 	    {
10215 	      tree pmop[2];
10216 	      int which = 0;
10217 	      wide_int cst0;
10218 
10219 	      /* Now we know that arg0 is (C + D) or (C - D) or
10220 		 -C and arg1 (M) is == (1LL << cst) - 1.
10221 		 Store C into PMOP[0] and D into PMOP[1].  */
10222 	      pmop[0] = TREE_OPERAND (arg0, 0);
10223 	      pmop[1] = NULL;
10224 	      if (TREE_CODE (arg0) != NEGATE_EXPR)
10225 		{
10226 		  pmop[1] = TREE_OPERAND (arg0, 1);
10227 		  which = 1;
10228 		}
10229 
10230 	      if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10231 		which = -1;
10232 
10233 	      for (; which >= 0; which--)
10234 		switch (TREE_CODE (pmop[which]))
10235 		  {
10236 		  case BIT_AND_EXPR:
10237 		  case BIT_IOR_EXPR:
10238 		  case BIT_XOR_EXPR:
10239 		    if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10240 			!= INTEGER_CST)
10241 		      break;
10242 		    cst0 = wi::to_wide (TREE_OPERAND (pmop[which], 1)) & cst1;
10243 		    if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10244 		      {
10245 			if (cst0 != cst1)
10246 			  break;
10247 		      }
10248 		    else if (cst0 != 0)
10249 		      break;
10250 		    /* If C or D is of the form (A & N) where
10251 		       (N & M) == M, or of the form (A | N) or
10252 		       (A ^ N) where (N & M) == 0, replace it with A.  */
10253 		    pmop[which] = TREE_OPERAND (pmop[which], 0);
10254 		    break;
10255 		  case INTEGER_CST:
10256 		    /* If C or D is a N where (N & M) == 0, it can be
10257 		       omitted (assumed 0).  */
10258 		    if ((TREE_CODE (arg0) == PLUS_EXPR
10259 			 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10260 			&& (cst1 & wi::to_wide (pmop[which])) == 0)
10261 		      pmop[which] = NULL;
10262 		    break;
10263 		  default:
10264 		    break;
10265 		  }
10266 
10267 	      /* Only build anything new if we optimized one or both arguments
10268 		 above.  */
10269 	      if (pmop[0] != TREE_OPERAND (arg0, 0)
10270 		  || (TREE_CODE (arg0) != NEGATE_EXPR
10271 		      && pmop[1] != TREE_OPERAND (arg0, 1)))
10272 		{
10273 		  tree utype = TREE_TYPE (arg0);
10274 		  if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10275 		    {
10276 		      /* Perform the operations in a type that has defined
10277 			 overflow behavior.  */
10278 		      utype = unsigned_type_for (TREE_TYPE (arg0));
10279 		      if (pmop[0] != NULL)
10280 			pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10281 		      if (pmop[1] != NULL)
10282 			pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10283 		    }
10284 
10285 		  if (TREE_CODE (arg0) == NEGATE_EXPR)
10286 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10287 		  else if (TREE_CODE (arg0) == PLUS_EXPR)
10288 		    {
10289 		      if (pmop[0] != NULL && pmop[1] != NULL)
10290 			tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10291 					       pmop[0], pmop[1]);
10292 		      else if (pmop[0] != NULL)
10293 			tem = pmop[0];
10294 		      else if (pmop[1] != NULL)
10295 			tem = pmop[1];
10296 		      else
10297 			return build_int_cst (type, 0);
10298 		    }
10299 		  else if (pmop[0] == NULL)
10300 		    tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10301 		  else
10302 		    tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10303 					   pmop[0], pmop[1]);
10304 		  /* TEM is now the new binary +, - or unary - replacement.  */
10305 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10306 					 fold_convert_loc (loc, utype, arg1));
10307 		  return fold_convert_loc (loc, type, tem);
10308 		}
10309 	    }
10310 	}
10311 
10312       /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
10313       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10314 	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10315 	{
10316 	  prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10317 
10318 	  wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
10319 	  if (mask == -1)
10320 	    return
10321 	      fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10322 	}
10323 
10324       goto associate;
10325 
10326     case RDIV_EXPR:
10327       /* Don't touch a floating-point divide by zero unless the mode
10328 	 of the constant can represent infinity.  */
10329       if (TREE_CODE (arg1) == REAL_CST
10330 	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10331 	  && real_zerop (arg1))
10332 	return NULL_TREE;
10333 
10334       /* (-A) / (-B) -> A / B  */
10335       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10336 	return fold_build2_loc (loc, RDIV_EXPR, type,
10337 			    TREE_OPERAND (arg0, 0),
10338 			    negate_expr (arg1));
10339       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10340 	return fold_build2_loc (loc, RDIV_EXPR, type,
10341 			    negate_expr (arg0),
10342 			    TREE_OPERAND (arg1, 0));
10343       return NULL_TREE;
10344 
10345     case TRUNC_DIV_EXPR:
10346       /* Fall through */
10347 
10348     case FLOOR_DIV_EXPR:
10349       /* Simplify A / (B << N) where A and B are positive and B is
10350 	 a power of 2, to A >> (N + log2(B)).  */
10351       strict_overflow_p = false;
10352       if (TREE_CODE (arg1) == LSHIFT_EXPR
10353 	  && (TYPE_UNSIGNED (type)
10354 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10355 	{
10356 	  tree sval = TREE_OPERAND (arg1, 0);
10357 	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10358 	    {
10359 	      tree sh_cnt = TREE_OPERAND (arg1, 1);
10360 	      tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10361 					 wi::exact_log2 (wi::to_wide (sval)));
10362 
10363 	      if (strict_overflow_p)
10364 		fold_overflow_warning (("assuming signed overflow does not "
10365 					"occur when simplifying A / (B << N)"),
10366 				       WARN_STRICT_OVERFLOW_MISC);
10367 
10368 	      sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10369 					sh_cnt, pow2);
10370 	      return fold_build2_loc (loc, RSHIFT_EXPR, type,
10371 				      fold_convert_loc (loc, type, arg0), sh_cnt);
10372 	    }
10373 	}
10374 
10375       /* Fall through */
10376 
10377     case ROUND_DIV_EXPR:
10378     case CEIL_DIV_EXPR:
10379     case EXACT_DIV_EXPR:
10380       if (integer_zerop (arg1))
10381 	return NULL_TREE;
10382 
10383       /* Convert -A / -B to A / B when the type is signed and overflow is
10384 	 undefined.  */
10385       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10386 	  && TREE_CODE (op0) == NEGATE_EXPR
10387 	  && negate_expr_p (op1))
10388 	{
10389 	  if (INTEGRAL_TYPE_P (type))
10390 	    fold_overflow_warning (("assuming signed overflow does not occur "
10391 				    "when distributing negation across "
10392 				    "division"),
10393 				   WARN_STRICT_OVERFLOW_MISC);
10394 	  return fold_build2_loc (loc, code, type,
10395 				  fold_convert_loc (loc, type,
10396 						    TREE_OPERAND (arg0, 0)),
10397 				  negate_expr (op1));
10398 	}
10399       if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10400 	  && TREE_CODE (arg1) == NEGATE_EXPR
10401 	  && negate_expr_p (op0))
10402 	{
10403 	  if (INTEGRAL_TYPE_P (type))
10404 	    fold_overflow_warning (("assuming signed overflow does not occur "
10405 				    "when distributing negation across "
10406 				    "division"),
10407 				   WARN_STRICT_OVERFLOW_MISC);
10408 	  return fold_build2_loc (loc, code, type,
10409 				  negate_expr (op0),
10410 				  fold_convert_loc (loc, type,
10411 						    TREE_OPERAND (arg1, 0)));
10412 	}
10413 
10414       /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10415 	 operation, EXACT_DIV_EXPR.
10416 
10417 	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10418 	 At one time others generated faster code, it's not clear if they do
10419 	 after the last round to changes to the DIV code in expmed.c.  */
10420       if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10421 	  && multiple_of_p (type, arg0, arg1))
10422 	return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10423 				fold_convert (type, arg0),
10424 				fold_convert (type, arg1));
10425 
10426       strict_overflow_p = false;
10427       if (TREE_CODE (arg1) == INTEGER_CST
10428 	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10429 				    &strict_overflow_p)) != 0)
10430 	{
10431 	  if (strict_overflow_p)
10432 	    fold_overflow_warning (("assuming signed overflow does not occur "
10433 				    "when simplifying division"),
10434 				   WARN_STRICT_OVERFLOW_MISC);
10435 	  return fold_convert_loc (loc, type, tem);
10436 	}
10437 
10438       return NULL_TREE;
10439 
10440     case CEIL_MOD_EXPR:
10441     case FLOOR_MOD_EXPR:
10442     case ROUND_MOD_EXPR:
10443     case TRUNC_MOD_EXPR:
10444       strict_overflow_p = false;
10445       if (TREE_CODE (arg1) == INTEGER_CST
10446 	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10447 				    &strict_overflow_p)) != 0)
10448 	{
10449 	  if (strict_overflow_p)
10450 	    fold_overflow_warning (("assuming signed overflow does not occur "
10451 				    "when simplifying modulus"),
10452 				   WARN_STRICT_OVERFLOW_MISC);
10453 	  return fold_convert_loc (loc, type, tem);
10454 	}
10455 
10456       return NULL_TREE;
10457 
10458     case LROTATE_EXPR:
10459     case RROTATE_EXPR:
10460     case RSHIFT_EXPR:
10461     case LSHIFT_EXPR:
10462       /* Since negative shift count is not well-defined,
10463 	 don't try to compute it in the compiler.  */
10464       if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10465 	return NULL_TREE;
10466 
10467       prec = element_precision (type);
10468 
10469       /* If we have a rotate of a bit operation with the rotate count and
10470 	 the second operand of the bit operation both constant,
10471 	 permute the two operations.  */
10472       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10473 	  && (TREE_CODE (arg0) == BIT_AND_EXPR
10474 	      || TREE_CODE (arg0) == BIT_IOR_EXPR
10475 	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
10476 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10477 	{
10478 	  tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10479 	  tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10480 	  return fold_build2_loc (loc, TREE_CODE (arg0), type,
10481 				  fold_build2_loc (loc, code, type,
10482 						   arg00, arg1),
10483 				  fold_build2_loc (loc, code, type,
10484 						   arg01, arg1));
10485 	}
10486 
10487       /* Two consecutive rotates adding up to the some integer
10488 	 multiple of the precision of the type can be ignored.  */
10489       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10490 	  && TREE_CODE (arg0) == RROTATE_EXPR
10491 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10492 	  && wi::umod_trunc (wi::to_wide (arg1)
10493 			     + wi::to_wide (TREE_OPERAND (arg0, 1)),
10494 			     prec) == 0)
10495 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10496 
10497       return NULL_TREE;
10498 
10499     case MIN_EXPR:
10500     case MAX_EXPR:
10501       goto associate;
10502 
10503     case TRUTH_ANDIF_EXPR:
10504       /* Note that the operands of this must be ints
10505 	 and their values must be 0 or 1.
10506 	 ("true" is a fixed value perhaps depending on the language.)  */
10507       /* If first arg is constant zero, return it.  */
10508       if (integer_zerop (arg0))
10509 	return fold_convert_loc (loc, type, arg0);
10510       /* FALLTHRU */
10511     case TRUTH_AND_EXPR:
10512       /* If either arg is constant true, drop it.  */
10513       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10514 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10515       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10516 	  /* Preserve sequence points.  */
10517 	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10518 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10519       /* If second arg is constant zero, result is zero, but first arg
10520 	 must be evaluated.  */
10521       if (integer_zerop (arg1))
10522 	return omit_one_operand_loc (loc, type, arg1, arg0);
10523       /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10524 	 case will be handled here.  */
10525       if (integer_zerop (arg0))
10526 	return omit_one_operand_loc (loc, type, arg0, arg1);
10527 
10528       /* !X && X is always false.  */
10529       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10530 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10531 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10532       /* X && !X is always false.  */
10533       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10534 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10535 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10536 
10537       /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
10538 	 means A >= Y && A != MAX, but in this case we know that
10539 	 A < X <= MAX.  */
10540 
10541       if (!TREE_SIDE_EFFECTS (arg0)
10542 	  && !TREE_SIDE_EFFECTS (arg1))
10543 	{
10544 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10545 	  if (tem && !operand_equal_p (tem, arg0, 0))
10546 	    return fold_build2_loc (loc, code, type, tem, arg1);
10547 
10548 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10549 	  if (tem && !operand_equal_p (tem, arg1, 0))
10550 	    return fold_build2_loc (loc, code, type, arg0, tem);
10551 	}
10552 
10553       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10554           != NULL_TREE)
10555         return tem;
10556 
10557       return NULL_TREE;
10558 
10559     case TRUTH_ORIF_EXPR:
10560       /* Note that the operands of this must be ints
10561 	 and their values must be 0 or true.
10562 	 ("true" is a fixed value perhaps depending on the language.)  */
10563       /* If first arg is constant true, return it.  */
10564       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10565 	return fold_convert_loc (loc, type, arg0);
10566       /* FALLTHRU */
10567     case TRUTH_OR_EXPR:
10568       /* If either arg is constant zero, drop it.  */
10569       if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10570 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10571       if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10572 	  /* Preserve sequence points.  */
10573 	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10574 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10575       /* If second arg is constant true, result is true, but we must
10576 	 evaluate first arg.  */
10577       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10578 	return omit_one_operand_loc (loc, type, arg1, arg0);
10579       /* Likewise for first arg, but note this only occurs here for
10580 	 TRUTH_OR_EXPR.  */
10581       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10582 	return omit_one_operand_loc (loc, type, arg0, arg1);
10583 
10584       /* !X || X is always true.  */
10585       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10586 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10587 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10588       /* X || !X is always true.  */
10589       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10590 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10591 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10592 
10593       /* (X && !Y) || (!X && Y) is X ^ Y */
10594       if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10595 	  && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10596         {
10597 	  tree a0, a1, l0, l1, n0, n1;
10598 
10599 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10600 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10601 
10602 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10603 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10604 
10605 	  n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10606 	  n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10607 
10608 	  if ((operand_equal_p (n0, a0, 0)
10609 	       && operand_equal_p (n1, a1, 0))
10610 	      || (operand_equal_p (n0, a1, 0)
10611 		  && operand_equal_p (n1, a0, 0)))
10612 	    return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10613 	}
10614 
10615       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10616           != NULL_TREE)
10617         return tem;
10618 
10619       return NULL_TREE;
10620 
10621     case TRUTH_XOR_EXPR:
10622       /* If the second arg is constant zero, drop it.  */
10623       if (integer_zerop (arg1))
10624 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10625       /* If the second arg is constant true, this is a logical inversion.  */
10626       if (integer_onep (arg1))
10627 	{
10628 	  tem = invert_truthvalue_loc (loc, arg0);
10629 	  return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10630 	}
10631       /* Identical arguments cancel to zero.  */
10632       if (operand_equal_p (arg0, arg1, 0))
10633 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10634 
10635       /* !X ^ X is always true.  */
10636       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10637 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10638 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10639 
10640       /* X ^ !X is always true.  */
10641       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10642 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10643 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10644 
10645       return NULL_TREE;
10646 
10647     case EQ_EXPR:
10648     case NE_EXPR:
10649       STRIP_NOPS (arg0);
10650       STRIP_NOPS (arg1);
10651 
10652       tem = fold_comparison (loc, code, type, op0, op1);
10653       if (tem != NULL_TREE)
10654 	return tem;
10655 
10656       /* bool_var != 1 becomes !bool_var. */
10657       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10658           && code == NE_EXPR)
10659         return fold_convert_loc (loc, type,
10660 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10661 						  TREE_TYPE (arg0), arg0));
10662 
10663       /* bool_var == 0 becomes !bool_var. */
10664       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10665           && code == EQ_EXPR)
10666         return fold_convert_loc (loc, type,
10667 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10668 						  TREE_TYPE (arg0), arg0));
10669 
10670       /* !exp != 0 becomes !exp */
10671       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10672 	  && code == NE_EXPR)
10673         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10674 
10675       /* If this is an EQ or NE comparison with zero and ARG0 is
10676 	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
10677 	 two operations, but the latter can be done in one less insn
10678 	 on machines that have only two-operand insns or on which a
10679 	 constant cannot be the first operand.  */
10680       if (TREE_CODE (arg0) == BIT_AND_EXPR
10681 	  && integer_zerop (arg1))
10682 	{
10683 	  tree arg00 = TREE_OPERAND (arg0, 0);
10684 	  tree arg01 = TREE_OPERAND (arg0, 1);
10685 	  if (TREE_CODE (arg00) == LSHIFT_EXPR
10686 	      && integer_onep (TREE_OPERAND (arg00, 0)))
10687 	    {
10688 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10689 				      arg01, TREE_OPERAND (arg00, 1));
10690 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10691 				 build_int_cst (TREE_TYPE (arg0), 1));
10692 	      return fold_build2_loc (loc, code, type,
10693 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10694 				  arg1);
10695 	    }
10696 	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
10697 		   && integer_onep (TREE_OPERAND (arg01, 0)))
10698 	    {
10699 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10700 				      arg00, TREE_OPERAND (arg01, 1));
10701 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10702 				 build_int_cst (TREE_TYPE (arg0), 1));
10703 	      return fold_build2_loc (loc, code, type,
10704 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10705 				  arg1);
10706 	    }
10707 	}
10708 
10709       /* If this is an NE or EQ comparison of zero against the result of a
10710 	 signed MOD operation whose second operand is a power of 2, make
10711 	 the MOD operation unsigned since it is simpler and equivalent.  */
10712       if (integer_zerop (arg1)
10713 	  && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10714 	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10715 	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
10716 	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10717 	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10718 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
10719 	{
10720 	  tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10721 	  tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10722 				     fold_convert_loc (loc, newtype,
10723 						       TREE_OPERAND (arg0, 0)),
10724 				     fold_convert_loc (loc, newtype,
10725 						       TREE_OPERAND (arg0, 1)));
10726 
10727 	  return fold_build2_loc (loc, code, type, newmod,
10728 			      fold_convert_loc (loc, newtype, arg1));
10729 	}
10730 
10731       /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10732 	 C1 is a valid shift constant, and C2 is a power of two, i.e.
10733 	 a single bit.  */
10734       if (TREE_CODE (arg0) == BIT_AND_EXPR
10735 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10736 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10737 	     == INTEGER_CST
10738 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10739 	  && integer_zerop (arg1))
10740 	{
10741 	  tree itype = TREE_TYPE (arg0);
10742 	  tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10743 	  prec = TYPE_PRECISION (itype);
10744 
10745 	  /* Check for a valid shift count.  */
10746 	  if (wi::ltu_p (wi::to_wide (arg001), prec))
10747 	    {
10748 	      tree arg01 = TREE_OPERAND (arg0, 1);
10749 	      tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10750 	      unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10751 	      /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10752 		 can be rewritten as (X & (C2 << C1)) != 0.  */
10753 	      if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10754 		{
10755 		  tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10756 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10757 		  return fold_build2_loc (loc, code, type, tem,
10758 					  fold_convert_loc (loc, itype, arg1));
10759 		}
10760 	      /* Otherwise, for signed (arithmetic) shifts,
10761 		 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10762 		 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
10763 	      else if (!TYPE_UNSIGNED (itype))
10764 		return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10765 				    arg000, build_int_cst (itype, 0));
10766 	      /* Otherwise, of unsigned (logical) shifts,
10767 		 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10768 		 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
10769 	      else
10770 		return omit_one_operand_loc (loc, type,
10771 					 code == EQ_EXPR ? integer_one_node
10772 							 : integer_zero_node,
10773 					 arg000);
10774 	    }
10775 	}
10776 
10777       /* If this is a comparison of a field, we may be able to simplify it.  */
10778       if ((TREE_CODE (arg0) == COMPONENT_REF
10779 	   || TREE_CODE (arg0) == BIT_FIELD_REF)
10780 	  /* Handle the constant case even without -O
10781 	     to make sure the warnings are given.  */
10782 	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10783 	{
10784 	  t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10785 	  if (t1)
10786 	    return t1;
10787 	}
10788 
10789       /* Optimize comparisons of strlen vs zero to a compare of the
10790 	 first character of the string vs zero.  To wit,
10791 		strlen(ptr) == 0   =>  *ptr == 0
10792 		strlen(ptr) != 0   =>  *ptr != 0
10793 	 Other cases should reduce to one of these two (or a constant)
10794 	 due to the return value of strlen being unsigned.  */
10795       if (TREE_CODE (arg0) == CALL_EXPR
10796 	  && integer_zerop (arg1))
10797 	{
10798 	  tree fndecl = get_callee_fndecl (arg0);
10799 
10800 	  if (fndecl
10801 	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10802 	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10803 	      && call_expr_nargs (arg0) == 1
10804 	      && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10805 	    {
10806 	      tree iref = build_fold_indirect_ref_loc (loc,
10807 						   CALL_EXPR_ARG (arg0, 0));
10808 	      return fold_build2_loc (loc, code, type, iref,
10809 				  build_int_cst (TREE_TYPE (iref), 0));
10810 	    }
10811 	}
10812 
10813       /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10814 	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
10815       if (TREE_CODE (arg0) == RSHIFT_EXPR
10816 	  && integer_zerop (arg1)
10817 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10818 	{
10819 	  tree arg00 = TREE_OPERAND (arg0, 0);
10820 	  tree arg01 = TREE_OPERAND (arg0, 1);
10821 	  tree itype = TREE_TYPE (arg00);
10822 	  if (wi::to_wide (arg01) == element_precision (itype) - 1)
10823 	    {
10824 	      if (TYPE_UNSIGNED (itype))
10825 		{
10826 		  itype = signed_type_for (itype);
10827 		  arg00 = fold_convert_loc (loc, itype, arg00);
10828 		}
10829 	      return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10830 				  type, arg00, build_zero_cst (itype));
10831 	    }
10832 	}
10833 
10834       /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10835 	 (X & C) == 0 when C is a single bit.  */
10836       if (TREE_CODE (arg0) == BIT_AND_EXPR
10837 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10838 	  && integer_zerop (arg1)
10839 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
10840 	{
10841 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10842 				 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10843 				 TREE_OPERAND (arg0, 1));
10844 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10845 				  type, tem,
10846 				  fold_convert_loc (loc, TREE_TYPE (arg0),
10847 						    arg1));
10848 	}
10849 
10850       /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10851 	 constant C is a power of two, i.e. a single bit.  */
10852       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10853 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10854 	  && integer_zerop (arg1)
10855 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10856 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10857 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10858 	{
10859 	  tree arg00 = TREE_OPERAND (arg0, 0);
10860 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10861 			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
10862 	}
10863 
10864       /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10865 	 when is C is a power of two, i.e. a single bit.  */
10866       if (TREE_CODE (arg0) == BIT_AND_EXPR
10867 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10868 	  && integer_zerop (arg1)
10869 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
10870 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10871 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10872 	{
10873 	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10874 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10875 			     arg000, TREE_OPERAND (arg0, 1));
10876 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10877 			      tem, build_int_cst (TREE_TYPE (tem), 0));
10878 	}
10879 
10880       if (integer_zerop (arg1)
10881 	  && tree_expr_nonzero_p (arg0))
10882         {
10883 	  tree res = constant_boolean_node (code==NE_EXPR, type);
10884 	  return omit_one_operand_loc (loc, type, res, arg0);
10885 	}
10886 
10887       /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries.  */
10888       if (TREE_CODE (arg0) == BIT_AND_EXPR
10889 	  && TREE_CODE (arg1) == BIT_AND_EXPR)
10890 	{
10891 	  tree arg00 = TREE_OPERAND (arg0, 0);
10892 	  tree arg01 = TREE_OPERAND (arg0, 1);
10893 	  tree arg10 = TREE_OPERAND (arg1, 0);
10894 	  tree arg11 = TREE_OPERAND (arg1, 1);
10895 	  tree itype = TREE_TYPE (arg0);
10896 
10897 	  if (operand_equal_p (arg01, arg11, 0))
10898 	    {
10899 	      tem = fold_convert_loc (loc, itype, arg10);
10900 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10901 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10902 	      return fold_build2_loc (loc, code, type, tem,
10903 				      build_zero_cst (itype));
10904 	    }
10905 	  if (operand_equal_p (arg01, arg10, 0))
10906 	    {
10907 	      tem = fold_convert_loc (loc, itype, arg11);
10908 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10909 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
10910 	      return fold_build2_loc (loc, code, type, tem,
10911 				      build_zero_cst (itype));
10912 	    }
10913 	  if (operand_equal_p (arg00, arg11, 0))
10914 	    {
10915 	      tem = fold_convert_loc (loc, itype, arg10);
10916 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10917 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10918 	      return fold_build2_loc (loc, code, type, tem,
10919 				      build_zero_cst (itype));
10920 	    }
10921 	  if (operand_equal_p (arg00, arg10, 0))
10922 	    {
10923 	      tem = fold_convert_loc (loc, itype, arg11);
10924 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
10925 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
10926 	      return fold_build2_loc (loc, code, type, tem,
10927 				      build_zero_cst (itype));
10928 	    }
10929 	}
10930 
10931       if (TREE_CODE (arg0) == BIT_XOR_EXPR
10932 	  && TREE_CODE (arg1) == BIT_XOR_EXPR)
10933 	{
10934 	  tree arg00 = TREE_OPERAND (arg0, 0);
10935 	  tree arg01 = TREE_OPERAND (arg0, 1);
10936 	  tree arg10 = TREE_OPERAND (arg1, 0);
10937 	  tree arg11 = TREE_OPERAND (arg1, 1);
10938 	  tree itype = TREE_TYPE (arg0);
10939 
10940 	  /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10941 	     operand_equal_p guarantees no side-effects so we don't need
10942 	     to use omit_one_operand on Z.  */
10943 	  if (operand_equal_p (arg01, arg11, 0))
10944 	    return fold_build2_loc (loc, code, type, arg00,
10945 				    fold_convert_loc (loc, TREE_TYPE (arg00),
10946 						      arg10));
10947 	  if (operand_equal_p (arg01, arg10, 0))
10948 	    return fold_build2_loc (loc, code, type, arg00,
10949 				    fold_convert_loc (loc, TREE_TYPE (arg00),
10950 						      arg11));
10951 	  if (operand_equal_p (arg00, arg11, 0))
10952 	    return fold_build2_loc (loc, code, type, arg01,
10953 				    fold_convert_loc (loc, TREE_TYPE (arg01),
10954 						      arg10));
10955 	  if (operand_equal_p (arg00, arg10, 0))
10956 	    return fold_build2_loc (loc, code, type, arg01,
10957 				    fold_convert_loc (loc, TREE_TYPE (arg01),
10958 						      arg11));
10959 
10960 	  /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y.  */
10961 	  if (TREE_CODE (arg01) == INTEGER_CST
10962 	      && TREE_CODE (arg11) == INTEGER_CST)
10963 	    {
10964 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10965 				     fold_convert_loc (loc, itype, arg11));
10966 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10967 	      return fold_build2_loc (loc, code, type, tem,
10968 				      fold_convert_loc (loc, itype, arg10));
10969 	    }
10970 	}
10971 
10972       /* Attempt to simplify equality/inequality comparisons of complex
10973 	 values.  Only lower the comparison if the result is known or
10974 	 can be simplified to a single scalar comparison.  */
10975       if ((TREE_CODE (arg0) == COMPLEX_EXPR
10976 	   || TREE_CODE (arg0) == COMPLEX_CST)
10977 	  && (TREE_CODE (arg1) == COMPLEX_EXPR
10978 	      || TREE_CODE (arg1) == COMPLEX_CST))
10979 	{
10980 	  tree real0, imag0, real1, imag1;
10981 	  tree rcond, icond;
10982 
10983 	  if (TREE_CODE (arg0) == COMPLEX_EXPR)
10984 	    {
10985 	      real0 = TREE_OPERAND (arg0, 0);
10986 	      imag0 = TREE_OPERAND (arg0, 1);
10987 	    }
10988 	  else
10989 	    {
10990 	      real0 = TREE_REALPART (arg0);
10991 	      imag0 = TREE_IMAGPART (arg0);
10992 	    }
10993 
10994 	  if (TREE_CODE (arg1) == COMPLEX_EXPR)
10995 	    {
10996 	      real1 = TREE_OPERAND (arg1, 0);
10997 	      imag1 = TREE_OPERAND (arg1, 1);
10998 	    }
10999 	  else
11000 	    {
11001 	      real1 = TREE_REALPART (arg1);
11002 	      imag1 = TREE_IMAGPART (arg1);
11003 	    }
11004 
11005 	  rcond = fold_binary_loc (loc, code, type, real0, real1);
11006 	  if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11007 	    {
11008 	      if (integer_zerop (rcond))
11009 		{
11010 		  if (code == EQ_EXPR)
11011 		    return omit_two_operands_loc (loc, type, boolean_false_node,
11012 					      imag0, imag1);
11013 		  return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11014 		}
11015 	      else
11016 		{
11017 		  if (code == NE_EXPR)
11018 		    return omit_two_operands_loc (loc, type, boolean_true_node,
11019 					      imag0, imag1);
11020 		  return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11021 		}
11022 	    }
11023 
11024 	  icond = fold_binary_loc (loc, code, type, imag0, imag1);
11025 	  if (icond && TREE_CODE (icond) == INTEGER_CST)
11026 	    {
11027 	      if (integer_zerop (icond))
11028 		{
11029 		  if (code == EQ_EXPR)
11030 		    return omit_two_operands_loc (loc, type, boolean_false_node,
11031 					      real0, real1);
11032 		  return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11033 		}
11034 	      else
11035 		{
11036 		  if (code == NE_EXPR)
11037 		    return omit_two_operands_loc (loc, type, boolean_true_node,
11038 					      real0, real1);
11039 		  return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11040 		}
11041 	    }
11042 	}
11043 
11044       return NULL_TREE;
11045 
11046     case LT_EXPR:
11047     case GT_EXPR:
11048     case LE_EXPR:
11049     case GE_EXPR:
11050       tem = fold_comparison (loc, code, type, op0, op1);
11051       if (tem != NULL_TREE)
11052 	return tem;
11053 
11054       /* Transform comparisons of the form X +- C CMP X.  */
11055       if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11056 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11057 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11058 	  && !HONOR_SNANS (arg0))
11059 	{
11060 	  tree arg01 = TREE_OPERAND (arg0, 1);
11061 	  enum tree_code code0 = TREE_CODE (arg0);
11062 	  int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11063 
11064 	  /* (X - c) > X becomes false.  */
11065 	  if (code == GT_EXPR
11066 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
11067 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
11068 	    return constant_boolean_node (0, type);
11069 
11070 	  /* Likewise (X + c) < X becomes false.  */
11071 	  if (code == LT_EXPR
11072 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11073 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11074 	    return constant_boolean_node (0, type);
11075 
11076 	  /* Convert (X - c) <= X to true.  */
11077 	  if (!HONOR_NANS (arg1)
11078 	      && code == LE_EXPR
11079 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
11080 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
11081 	    return constant_boolean_node (1, type);
11082 
11083 	  /* Convert (X + c) >= X to true.  */
11084 	  if (!HONOR_NANS (arg1)
11085 	      && code == GE_EXPR
11086 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11087 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11088 	    return constant_boolean_node (1, type);
11089 	}
11090 
11091       /* If we are comparing an ABS_EXPR with a constant, we can
11092 	 convert all the cases into explicit comparisons, but they may
11093 	 well not be faster than doing the ABS and one comparison.
11094 	 But ABS (X) <= C is a range comparison, which becomes a subtraction
11095 	 and a comparison, and is probably faster.  */
11096       if (code == LE_EXPR
11097 	  && TREE_CODE (arg1) == INTEGER_CST
11098 	  && TREE_CODE (arg0) == ABS_EXPR
11099 	  && ! TREE_SIDE_EFFECTS (arg0)
11100 	  && (tem = negate_expr (arg1)) != 0
11101 	  && TREE_CODE (tem) == INTEGER_CST
11102 	  && !TREE_OVERFLOW (tem))
11103 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11104 			    build2 (GE_EXPR, type,
11105 				    TREE_OPERAND (arg0, 0), tem),
11106 			    build2 (LE_EXPR, type,
11107 				    TREE_OPERAND (arg0, 0), arg1));
11108 
11109       /* Convert ABS_EXPR<x> >= 0 to true.  */
11110       strict_overflow_p = false;
11111       if (code == GE_EXPR
11112 	  && (integer_zerop (arg1)
11113 	      || (! HONOR_NANS (arg0)
11114 		  && real_zerop (arg1)))
11115 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11116 	{
11117 	  if (strict_overflow_p)
11118 	    fold_overflow_warning (("assuming signed overflow does not occur "
11119 				    "when simplifying comparison of "
11120 				    "absolute value and zero"),
11121 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11122 	  return omit_one_operand_loc (loc, type,
11123 				       constant_boolean_node (true, type),
11124 				       arg0);
11125 	}
11126 
11127       /* Convert ABS_EXPR<x> < 0 to false.  */
11128       strict_overflow_p = false;
11129       if (code == LT_EXPR
11130 	  && (integer_zerop (arg1) || real_zerop (arg1))
11131 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11132 	{
11133 	  if (strict_overflow_p)
11134 	    fold_overflow_warning (("assuming signed overflow does not occur "
11135 				    "when simplifying comparison of "
11136 				    "absolute value and zero"),
11137 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11138 	  return omit_one_operand_loc (loc, type,
11139 				       constant_boolean_node (false, type),
11140 				       arg0);
11141 	}
11142 
11143       /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11144 	 and similarly for >= into !=.  */
11145       if ((code == LT_EXPR || code == GE_EXPR)
11146 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11147 	  && TREE_CODE (arg1) == LSHIFT_EXPR
11148 	  && integer_onep (TREE_OPERAND (arg1, 0)))
11149 	return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11150 			   build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11151 				   TREE_OPERAND (arg1, 1)),
11152 			   build_zero_cst (TREE_TYPE (arg0)));
11153 
11154       /* Similarly for X < (cast) (1 << Y).  But cast can't be narrowing,
11155 	 otherwise Y might be >= # of bits in X's type and thus e.g.
11156 	 (unsigned char) (1 << Y) for Y 15 might be 0.
11157 	 If the cast is widening, then 1 << Y should have unsigned type,
11158 	 otherwise if Y is number of bits in the signed shift type minus 1,
11159 	 we can't optimize this.  E.g. (unsigned long long) (1 << Y) for Y
11160 	 31 might be 0xffffffff80000000.  */
11161       if ((code == LT_EXPR || code == GE_EXPR)
11162 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11163 	  && CONVERT_EXPR_P (arg1)
11164 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11165 	  && (element_precision (TREE_TYPE (arg1))
11166 	      >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11167 	  && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11168 	      || (element_precision (TREE_TYPE (arg1))
11169 		  == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11170 	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11171 	{
11172 	  tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11173 			TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11174 	  return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11175 			     fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11176 			     build_zero_cst (TREE_TYPE (arg0)));
11177 	}
11178 
11179       return NULL_TREE;
11180 
11181     case UNORDERED_EXPR:
11182     case ORDERED_EXPR:
11183     case UNLT_EXPR:
11184     case UNLE_EXPR:
11185     case UNGT_EXPR:
11186     case UNGE_EXPR:
11187     case UNEQ_EXPR:
11188     case LTGT_EXPR:
11189       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
11190       {
11191 	tree targ0 = strip_float_extensions (arg0);
11192 	tree targ1 = strip_float_extensions (arg1);
11193 	tree newtype = TREE_TYPE (targ0);
11194 
11195 	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11196 	  newtype = TREE_TYPE (targ1);
11197 
11198 	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11199 	  return fold_build2_loc (loc, code, type,
11200 			      fold_convert_loc (loc, newtype, targ0),
11201 			      fold_convert_loc (loc, newtype, targ1));
11202       }
11203 
11204       return NULL_TREE;
11205 
11206     case COMPOUND_EXPR:
11207       /* When pedantic, a compound expression can be neither an lvalue
11208 	 nor an integer constant expression.  */
11209       if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11210 	return NULL_TREE;
11211       /* Don't let (0, 0) be null pointer constant.  */
11212       tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11213 				 : fold_convert_loc (loc, type, arg1);
11214       return pedantic_non_lvalue_loc (loc, tem);
11215 
11216     case ASSERT_EXPR:
11217       /* An ASSERT_EXPR should never be passed to fold_binary.  */
11218       gcc_unreachable ();
11219 
11220     default:
11221       return NULL_TREE;
11222     } /* switch (code) */
11223 }
11224 
11225 /* Used by contains_label_[p1].  */
11226 
11227 struct contains_label_data
11228 {
11229   hash_set<tree> *pset;
11230   bool inside_switch_p;
11231 };
11232 
11233 /* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
11234    a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
11235    return NULL_TREE.  Do not check the subtrees of GOTO_EXPR.  */
11236 
11237 static tree
11238 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
11239 {
11240   contains_label_data *d = (contains_label_data *) data;
11241   switch (TREE_CODE (*tp))
11242     {
11243     case LABEL_EXPR:
11244       return *tp;
11245 
11246     case CASE_LABEL_EXPR:
11247       if (!d->inside_switch_p)
11248 	return *tp;
11249       return NULL_TREE;
11250 
11251     case SWITCH_EXPR:
11252       if (!d->inside_switch_p)
11253 	{
11254 	  if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
11255 	    return *tp;
11256 	  d->inside_switch_p = true;
11257 	  if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
11258 	    return *tp;
11259 	  d->inside_switch_p = false;
11260 	  *walk_subtrees = 0;
11261 	}
11262       return NULL_TREE;
11263 
11264     case GOTO_EXPR:
11265       *walk_subtrees = 0;
11266       return NULL_TREE;
11267 
11268     default:
11269       return NULL_TREE;
11270     }
11271 }
11272 
11273 /* Return whether the sub-tree ST contains a label which is accessible from
11274    outside the sub-tree.  */
11275 
11276 static bool
11277 contains_label_p (tree st)
11278 {
11279   hash_set<tree> pset;
11280   contains_label_data data = { &pset, false };
11281   return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
11282 }
11283 
11284 /* Fold a ternary expression of code CODE and type TYPE with operands
11285    OP0, OP1, and OP2.  Return the folded expression if folding is
11286    successful.  Otherwise, return NULL_TREE.  */
11287 
11288 tree
11289 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11290 		  tree op0, tree op1, tree op2)
11291 {
11292   tree tem;
11293   tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11294   enum tree_code_class kind = TREE_CODE_CLASS (code);
11295 
11296   gcc_assert (IS_EXPR_CODE_CLASS (kind)
11297 	      && TREE_CODE_LENGTH (code) == 3);
11298 
11299   /* If this is a commutative operation, and OP0 is a constant, move it
11300      to OP1 to reduce the number of tests below.  */
11301   if (commutative_ternary_tree_code (code)
11302       && tree_swap_operands_p (op0, op1))
11303     return fold_build3_loc (loc, code, type, op1, op0, op2);
11304 
11305   tem = generic_simplify (loc, code, type, op0, op1, op2);
11306   if (tem)
11307     return tem;
11308 
11309   /* Strip any conversions that don't change the mode.  This is safe
11310      for every expression, except for a comparison expression because
11311      its signedness is derived from its operands.  So, in the latter
11312      case, only strip conversions that don't change the signedness.
11313 
11314      Note that this is done as an internal manipulation within the
11315      constant folder, in order to find the simplest representation of
11316      the arguments so that their form can be studied.  In any cases,
11317      the appropriate type conversions should be put back in the tree
11318      that will get out of the constant folder.  */
11319   if (op0)
11320     {
11321       arg0 = op0;
11322       STRIP_NOPS (arg0);
11323     }
11324 
11325   if (op1)
11326     {
11327       arg1 = op1;
11328       STRIP_NOPS (arg1);
11329     }
11330 
11331   if (op2)
11332     {
11333       arg2 = op2;
11334       STRIP_NOPS (arg2);
11335     }
11336 
11337   switch (code)
11338     {
11339     case COMPONENT_REF:
11340       if (TREE_CODE (arg0) == CONSTRUCTOR
11341 	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11342 	{
11343 	  unsigned HOST_WIDE_INT idx;
11344 	  tree field, value;
11345 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11346 	    if (field == arg1)
11347 	      return value;
11348 	}
11349       return NULL_TREE;
11350 
11351     case COND_EXPR:
11352     case VEC_COND_EXPR:
11353       /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11354 	 so all simple results must be passed through pedantic_non_lvalue.  */
11355       if (TREE_CODE (arg0) == INTEGER_CST)
11356 	{
11357 	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
11358 	  tem = integer_zerop (arg0) ? op2 : op1;
11359 	  /* Only optimize constant conditions when the selected branch
11360 	     has the same type as the COND_EXPR.  This avoids optimizing
11361              away "c ? x : throw", where the throw has a void type.
11362              Avoid throwing away that operand which contains label.  */
11363           if ((!TREE_SIDE_EFFECTS (unused_op)
11364                || !contains_label_p (unused_op))
11365               && (! VOID_TYPE_P (TREE_TYPE (tem))
11366                   || VOID_TYPE_P (type)))
11367 	    return pedantic_non_lvalue_loc (loc, tem);
11368 	  return NULL_TREE;
11369 	}
11370       else if (TREE_CODE (arg0) == VECTOR_CST)
11371 	{
11372 	  unsigned HOST_WIDE_INT nelts;
11373 	  if ((TREE_CODE (arg1) == VECTOR_CST
11374 	       || TREE_CODE (arg1) == CONSTRUCTOR)
11375 	      && (TREE_CODE (arg2) == VECTOR_CST
11376 		  || TREE_CODE (arg2) == CONSTRUCTOR)
11377 	      && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
11378 	    {
11379 	      vec_perm_builder sel (nelts, nelts, 1);
11380 	      for (unsigned int i = 0; i < nelts; i++)
11381 		{
11382 		  tree val = VECTOR_CST_ELT (arg0, i);
11383 		  if (integer_all_onesp (val))
11384 		    sel.quick_push (i);
11385 		  else if (integer_zerop (val))
11386 		    sel.quick_push (nelts + i);
11387 		  else /* Currently unreachable.  */
11388 		    return NULL_TREE;
11389 		}
11390 	      vec_perm_indices indices (sel, 2, nelts);
11391 	      tree t = fold_vec_perm (type, arg1, arg2, indices);
11392 	      if (t != NULL_TREE)
11393 		return t;
11394 	    }
11395 	}
11396 
11397       /* If we have A op B ? A : C, we may be able to convert this to a
11398 	 simpler expression, depending on the operation and the values
11399 	 of B and C.  Signed zeros prevent all of these transformations,
11400 	 for reasons given above each one.
11401 
11402          Also try swapping the arguments and inverting the conditional.  */
11403       if (COMPARISON_CLASS_P (arg0)
11404 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
11405 	  && !HONOR_SIGNED_ZEROS (element_mode (op1)))
11406 	{
11407 	  tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11408 	  if (tem)
11409 	    return tem;
11410 	}
11411 
11412       if (COMPARISON_CLASS_P (arg0)
11413 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
11414 	  && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11415 	{
11416 	  location_t loc0 = expr_location_or (arg0, loc);
11417 	  tem = fold_invert_truthvalue (loc0, arg0);
11418 	  if (tem && COMPARISON_CLASS_P (tem))
11419 	    {
11420 	      tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11421 	      if (tem)
11422 		return tem;
11423 	    }
11424 	}
11425 
11426       /* If the second operand is simpler than the third, swap them
11427 	 since that produces better jump optimization results.  */
11428       if (truth_value_p (TREE_CODE (arg0))
11429 	  && tree_swap_operands_p (op1, op2))
11430 	{
11431 	  location_t loc0 = expr_location_or (arg0, loc);
11432 	  /* See if this can be inverted.  If it can't, possibly because
11433 	     it was a floating-point inequality comparison, don't do
11434 	     anything.  */
11435 	  tem = fold_invert_truthvalue (loc0, arg0);
11436 	  if (tem)
11437 	    return fold_build3_loc (loc, code, type, tem, op2, op1);
11438 	}
11439 
11440       /* Convert A ? 1 : 0 to simply A.  */
11441       if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11442 				 : (integer_onep (op1)
11443 				    && !VECTOR_TYPE_P (type)))
11444 	  && integer_zerop (op2)
11445 	  /* If we try to convert OP0 to our type, the
11446 	     call to fold will try to move the conversion inside
11447 	     a COND, which will recurse.  In that case, the COND_EXPR
11448 	     is probably the best choice, so leave it alone.  */
11449 	  && type == TREE_TYPE (arg0))
11450 	return pedantic_non_lvalue_loc (loc, arg0);
11451 
11452       /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
11453 	 over COND_EXPR in cases such as floating point comparisons.  */
11454       if (integer_zerop (op1)
11455 	  && code == COND_EXPR
11456 	  && integer_onep (op2)
11457 	  && !VECTOR_TYPE_P (type)
11458 	  && truth_value_p (TREE_CODE (arg0)))
11459 	return pedantic_non_lvalue_loc (loc,
11460 				    fold_convert_loc (loc, type,
11461 					      invert_truthvalue_loc (loc,
11462 								     arg0)));
11463 
11464       /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
11465       if (TREE_CODE (arg0) == LT_EXPR
11466 	  && integer_zerop (TREE_OPERAND (arg0, 1))
11467 	  && integer_zerop (op2)
11468 	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11469 	{
11470 	  /* sign_bit_p looks through both zero and sign extensions,
11471 	     but for this optimization only sign extensions are
11472 	     usable.  */
11473 	  tree tem2 = TREE_OPERAND (arg0, 0);
11474 	  while (tem != tem2)
11475 	    {
11476 	      if (TREE_CODE (tem2) != NOP_EXPR
11477 		  || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11478 		{
11479 		  tem = NULL_TREE;
11480 		  break;
11481 		}
11482 	      tem2 = TREE_OPERAND (tem2, 0);
11483 	    }
11484 	  /* sign_bit_p only checks ARG1 bits within A's precision.
11485 	     If <sign bit of A> has wider type than A, bits outside
11486 	     of A's precision in <sign bit of A> need to be checked.
11487 	     If they are all 0, this optimization needs to be done
11488 	     in unsigned A's type, if they are all 1 in signed A's type,
11489 	     otherwise this can't be done.  */
11490 	  if (tem
11491 	      && TYPE_PRECISION (TREE_TYPE (tem))
11492 		 < TYPE_PRECISION (TREE_TYPE (arg1))
11493 	      && TYPE_PRECISION (TREE_TYPE (tem))
11494 		 < TYPE_PRECISION (type))
11495 	    {
11496 	      int inner_width, outer_width;
11497 	      tree tem_type;
11498 
11499 	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11500 	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11501 	      if (outer_width > TYPE_PRECISION (type))
11502 		outer_width = TYPE_PRECISION (type);
11503 
11504 	      wide_int mask = wi::shifted_mask
11505 		(inner_width, outer_width - inner_width, false,
11506 		 TYPE_PRECISION (TREE_TYPE (arg1)));
11507 
11508 	      wide_int common = mask & wi::to_wide (arg1);
11509 	      if (common == mask)
11510 		{
11511 		  tem_type = signed_type_for (TREE_TYPE (tem));
11512 		  tem = fold_convert_loc (loc, tem_type, tem);
11513 		}
11514 	      else if (common == 0)
11515 		{
11516 		  tem_type = unsigned_type_for (TREE_TYPE (tem));
11517 		  tem = fold_convert_loc (loc, tem_type, tem);
11518 		}
11519 	      else
11520 		tem = NULL;
11521 	    }
11522 
11523 	  if (tem)
11524 	    return
11525 	      fold_convert_loc (loc, type,
11526 				fold_build2_loc (loc, BIT_AND_EXPR,
11527 					     TREE_TYPE (tem), tem,
11528 					     fold_convert_loc (loc,
11529 							       TREE_TYPE (tem),
11530 							       arg1)));
11531 	}
11532 
11533       /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
11534 	 already handled above.  */
11535       if (TREE_CODE (arg0) == BIT_AND_EXPR
11536 	  && integer_onep (TREE_OPERAND (arg0, 1))
11537 	  && integer_zerop (op2)
11538 	  && integer_pow2p (arg1))
11539 	{
11540 	  tree tem = TREE_OPERAND (arg0, 0);
11541 	  STRIP_NOPS (tem);
11542 	  if (TREE_CODE (tem) == RSHIFT_EXPR
11543 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11544               && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
11545 		 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
11546 	    return fold_build2_loc (loc, BIT_AND_EXPR, type,
11547 				    fold_convert_loc (loc, type,
11548 						      TREE_OPERAND (tem, 0)),
11549 				    op1);
11550 	}
11551 
11552       /* A & N ? N : 0 is simply A & N if N is a power of two.  This
11553 	 is probably obsolete because the first operand should be a
11554 	 truth value (that's why we have the two cases above), but let's
11555 	 leave it in until we can confirm this for all front-ends.  */
11556       if (integer_zerop (op2)
11557 	  && TREE_CODE (arg0) == NE_EXPR
11558 	  && integer_zerop (TREE_OPERAND (arg0, 1))
11559 	  && integer_pow2p (arg1)
11560 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11561 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11562 			      arg1, OEP_ONLY_CONST))
11563 	return pedantic_non_lvalue_loc (loc,
11564 				    fold_convert_loc (loc, type,
11565 						      TREE_OPERAND (arg0, 0)));
11566 
11567       /* Disable the transformations below for vectors, since
11568 	 fold_binary_op_with_conditional_arg may undo them immediately,
11569 	 yielding an infinite loop.  */
11570       if (code == VEC_COND_EXPR)
11571 	return NULL_TREE;
11572 
11573       /* Convert A ? B : 0 into A && B if A and B are truth values.  */
11574       if (integer_zerop (op2)
11575 	  && truth_value_p (TREE_CODE (arg0))
11576 	  && truth_value_p (TREE_CODE (arg1))
11577 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11578 	return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11579 							   : TRUTH_ANDIF_EXPR,
11580 				type, fold_convert_loc (loc, type, arg0), op1);
11581 
11582       /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
11583       if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11584 	  && truth_value_p (TREE_CODE (arg0))
11585 	  && truth_value_p (TREE_CODE (arg1))
11586 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11587 	{
11588 	  location_t loc0 = expr_location_or (arg0, loc);
11589 	  /* Only perform transformation if ARG0 is easily inverted.  */
11590 	  tem = fold_invert_truthvalue (loc0, arg0);
11591 	  if (tem)
11592 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
11593 					 ? BIT_IOR_EXPR
11594 					 : TRUTH_ORIF_EXPR,
11595 				    type, fold_convert_loc (loc, type, tem),
11596 				    op1);
11597 	}
11598 
11599       /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
11600       if (integer_zerop (arg1)
11601 	  && truth_value_p (TREE_CODE (arg0))
11602 	  && truth_value_p (TREE_CODE (op2))
11603 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11604 	{
11605 	  location_t loc0 = expr_location_or (arg0, loc);
11606 	  /* Only perform transformation if ARG0 is easily inverted.  */
11607 	  tem = fold_invert_truthvalue (loc0, arg0);
11608 	  if (tem)
11609 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
11610 					 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11611 				    type, fold_convert_loc (loc, type, tem),
11612 				    op2);
11613 	}
11614 
11615       /* Convert A ? 1 : B into A || B if A and B are truth values.  */
11616       if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11617 	  && truth_value_p (TREE_CODE (arg0))
11618 	  && truth_value_p (TREE_CODE (op2))
11619 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11620 	return fold_build2_loc (loc, code == VEC_COND_EXPR
11621 				     ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11622 				type, fold_convert_loc (loc, type, arg0), op2);
11623 
11624       return NULL_TREE;
11625 
11626     case CALL_EXPR:
11627       /* CALL_EXPRs used to be ternary exprs.  Catch any mistaken uses
11628 	 of fold_ternary on them.  */
11629       gcc_unreachable ();
11630 
11631     case BIT_FIELD_REF:
11632       if (TREE_CODE (arg0) == VECTOR_CST
11633 	  && (type == TREE_TYPE (TREE_TYPE (arg0))
11634 	      || (VECTOR_TYPE_P (type)
11635 		  && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
11636 	  && tree_fits_uhwi_p (op1)
11637 	  && tree_fits_uhwi_p (op2))
11638 	{
11639 	  tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11640 	  unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11641 	  unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11642 	  unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11643 
11644 	  if (n != 0
11645 	      && (idx % width) == 0
11646 	      && (n % width) == 0
11647 	      && known_le ((idx + n) / width,
11648 			   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
11649 	    {
11650 	      idx = idx / width;
11651 	      n = n / width;
11652 
11653 	      if (TREE_CODE (arg0) == VECTOR_CST)
11654 		{
11655 		  if (n == 1)
11656 		    {
11657 		      tem = VECTOR_CST_ELT (arg0, idx);
11658 		      if (VECTOR_TYPE_P (type))
11659 			tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
11660 		      return tem;
11661 		    }
11662 
11663 		  tree_vector_builder vals (type, n, 1);
11664 		  for (unsigned i = 0; i < n; ++i)
11665 		    vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
11666 		  return vals.build ();
11667 		}
11668 	    }
11669 	}
11670 
11671       /* On constants we can use native encode/interpret to constant
11672          fold (nearly) all BIT_FIELD_REFs.  */
11673       if (CONSTANT_CLASS_P (arg0)
11674 	  && can_native_interpret_type_p (type)
11675 	  && BITS_PER_UNIT == 8
11676 	  && tree_fits_uhwi_p (op1)
11677 	  && tree_fits_uhwi_p (op2))
11678 	{
11679 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11680 	  unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11681 	  /* Limit us to a reasonable amount of work.  To relax the
11682 	     other limitations we need bit-shifting of the buffer
11683 	     and rounding up the size.  */
11684 	  if (bitpos % BITS_PER_UNIT == 0
11685 	      && bitsize % BITS_PER_UNIT == 0
11686 	      && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
11687 	    {
11688 	      unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
11689 	      unsigned HOST_WIDE_INT len
11690 		= native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
11691 				      bitpos / BITS_PER_UNIT);
11692 	      if (len > 0
11693 		  && len * BITS_PER_UNIT >= bitsize)
11694 		{
11695 		  tree v = native_interpret_expr (type, b,
11696 						  bitsize / BITS_PER_UNIT);
11697 		  if (v)
11698 		    return v;
11699 		}
11700 	    }
11701 	}
11702 
11703       return NULL_TREE;
11704 
11705     case FMA_EXPR:
11706       /* For integers we can decompose the FMA if possible.  */
11707       if (TREE_CODE (arg0) == INTEGER_CST
11708 	  && TREE_CODE (arg1) == INTEGER_CST)
11709 	return fold_build2_loc (loc, PLUS_EXPR, type,
11710 				const_binop (MULT_EXPR, arg0, arg1), arg2);
11711       if (integer_zerop (arg2))
11712 	return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11713 
11714       return fold_fma (loc, type, arg0, arg1, arg2);
11715 
11716     case VEC_PERM_EXPR:
11717       if (TREE_CODE (arg2) == VECTOR_CST)
11718 	{
11719 	  /* Build a vector of integers from the tree mask.  */
11720 	  vec_perm_builder builder;
11721 	  if (!tree_to_vec_perm_builder (&builder, arg2))
11722 	    return NULL_TREE;
11723 
11724 	  /* Create a vec_perm_indices for the integer vector.  */
11725 	  poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
11726 	  bool single_arg = (op0 == op1);
11727 	  vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
11728 
11729 	  /* Check for cases that fold to OP0 or OP1 in their original
11730 	     element order.  */
11731 	  if (sel.series_p (0, 1, 0, 1))
11732 	    return op0;
11733 	  if (sel.series_p (0, 1, nelts, 1))
11734 	    return op1;
11735 
11736 	  if (!single_arg)
11737 	    {
11738 	      if (sel.all_from_input_p (0))
11739 		op1 = op0;
11740 	      else if (sel.all_from_input_p (1))
11741 		{
11742 		  op0 = op1;
11743 		  sel.rotate_inputs (1);
11744 		}
11745 	    }
11746 
11747 	  if ((TREE_CODE (op0) == VECTOR_CST
11748 	       || TREE_CODE (op0) == CONSTRUCTOR)
11749 	      && (TREE_CODE (op1) == VECTOR_CST
11750 		  || TREE_CODE (op1) == CONSTRUCTOR))
11751 	    {
11752 	      tree t = fold_vec_perm (type, op0, op1, sel);
11753 	      if (t != NULL_TREE)
11754 		return t;
11755 	    }
11756 
11757 	  bool changed = (op0 == op1 && !single_arg);
11758 
11759 	  /* Generate a canonical form of the selector.  */
11760 	  if (arg2 == op2 && sel.encoding () != builder)
11761 	    {
11762 	      /* Some targets are deficient and fail to expand a single
11763 		 argument permutation while still allowing an equivalent
11764 		 2-argument version.  */
11765 	      if (sel.ninputs () == 2
11766 		  || can_vec_perm_const_p (TYPE_MODE (type), sel, false))
11767 		op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
11768 	      else
11769 		{
11770 		  vec_perm_indices sel2 (builder, 2, nelts);
11771 		  if (can_vec_perm_const_p (TYPE_MODE (type), sel2, false))
11772 		    op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel2);
11773 		  else
11774 		    /* Not directly supported with either encoding,
11775 		       so use the preferred form.  */
11776 		    op2 = vec_perm_indices_to_tree (TREE_TYPE (arg2), sel);
11777 		}
11778 	      changed = true;
11779 	    }
11780 
11781 	  if (changed)
11782 	    return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11783 	}
11784       return NULL_TREE;
11785 
11786     case BIT_INSERT_EXPR:
11787       /* Perform (partial) constant folding of BIT_INSERT_EXPR.  */
11788       if (TREE_CODE (arg0) == INTEGER_CST
11789 	  && TREE_CODE (arg1) == INTEGER_CST)
11790 	{
11791 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11792 	  unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
11793 	  wide_int tem = (wi::to_wide (arg0)
11794 			  & wi::shifted_mask (bitpos, bitsize, true,
11795 					      TYPE_PRECISION (type)));
11796 	  wide_int tem2
11797 	    = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
11798 				    bitsize), bitpos);
11799 	  return wide_int_to_tree (type, wi::bit_or (tem, tem2));
11800 	}
11801       else if (TREE_CODE (arg0) == VECTOR_CST
11802 	       && CONSTANT_CLASS_P (arg1)
11803 	       && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
11804 				      TREE_TYPE (arg1)))
11805 	{
11806 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11807 	  unsigned HOST_WIDE_INT elsize
11808 	    = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
11809 	  if (bitpos % elsize == 0)
11810 	    {
11811 	      unsigned k = bitpos / elsize;
11812 	      unsigned HOST_WIDE_INT nelts;
11813 	      if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
11814 		return arg0;
11815 	      else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
11816 		{
11817 		  tree_vector_builder elts (type, nelts, 1);
11818 		  elts.quick_grow (nelts);
11819 		  for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
11820 		    elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
11821 		  return elts.build ();
11822 		}
11823 	    }
11824 	}
11825       return NULL_TREE;
11826 
11827     default:
11828       return NULL_TREE;
11829     } /* switch (code) */
11830 }
11831 
11832 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11833    of an array (or vector).  */
11834 
11835 tree
11836 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11837 {
11838   tree index_type = NULL_TREE;
11839   offset_int low_bound = 0;
11840 
11841   if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11842     {
11843       tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11844       if (domain_type && TYPE_MIN_VALUE (domain_type))
11845 	{
11846 	  /* Static constructors for variably sized objects makes no sense.  */
11847 	  gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11848 	  index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11849 	  low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11850 	}
11851     }
11852 
11853   if (index_type)
11854     access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11855 			    TYPE_SIGN (index_type));
11856 
11857   offset_int index = low_bound - 1;
11858   if (index_type)
11859     index = wi::ext (index, TYPE_PRECISION (index_type),
11860 		     TYPE_SIGN (index_type));
11861 
11862   offset_int max_index;
11863   unsigned HOST_WIDE_INT cnt;
11864   tree cfield, cval;
11865 
11866   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11867     {
11868       /* Array constructor might explicitly set index, or specify a range,
11869 	 or leave index NULL meaning that it is next index after previous
11870 	 one.  */
11871       if (cfield)
11872 	{
11873 	  if (TREE_CODE (cfield) == INTEGER_CST)
11874 	    max_index = index = wi::to_offset (cfield);
11875 	  else
11876 	    {
11877 	      gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11878 	      index = wi::to_offset (TREE_OPERAND (cfield, 0));
11879 	      max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
11880 	    }
11881 	}
11882       else
11883 	{
11884 	  index += 1;
11885 	  if (index_type)
11886 	    index = wi::ext (index, TYPE_PRECISION (index_type),
11887 			     TYPE_SIGN (index_type));
11888 	  max_index = index;
11889 	}
11890 
11891     /* Do we have match?  */
11892     if (wi::cmpu (access_index, index) >= 0
11893 	&& wi::cmpu (access_index, max_index) <= 0)
11894       return cval;
11895   }
11896   return NULL_TREE;
11897 }
11898 
11899 /* Perform constant folding and related simplification of EXPR.
11900    The related simplifications include x*1 => x, x*0 => 0, etc.,
11901    and application of the associative law.
11902    NOP_EXPR conversions may be removed freely (as long as we
11903    are careful not to change the type of the overall expression).
11904    We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11905    but we can constant-fold them if they have constant operands.  */
11906 
11907 #ifdef ENABLE_FOLD_CHECKING
11908 # define fold(x) fold_1 (x)
11909 static tree fold_1 (tree);
11910 static
11911 #endif
11912 tree
11913 fold (tree expr)
11914 {
11915   const tree t = expr;
11916   enum tree_code code = TREE_CODE (t);
11917   enum tree_code_class kind = TREE_CODE_CLASS (code);
11918   tree tem;
11919   location_t loc = EXPR_LOCATION (expr);
11920 
11921   /* Return right away if a constant.  */
11922   if (kind == tcc_constant)
11923     return t;
11924 
11925   /* CALL_EXPR-like objects with variable numbers of operands are
11926      treated specially.  */
11927   if (kind == tcc_vl_exp)
11928     {
11929       if (code == CALL_EXPR)
11930 	{
11931 	  tem = fold_call_expr (loc, expr, false);
11932 	  return tem ? tem : expr;
11933 	}
11934       return expr;
11935     }
11936 
11937   if (IS_EXPR_CODE_CLASS (kind))
11938     {
11939       tree type = TREE_TYPE (t);
11940       tree op0, op1, op2;
11941 
11942       switch (TREE_CODE_LENGTH (code))
11943 	{
11944 	case 1:
11945 	  op0 = TREE_OPERAND (t, 0);
11946 	  tem = fold_unary_loc (loc, code, type, op0);
11947 	  return tem ? tem : expr;
11948 	case 2:
11949 	  op0 = TREE_OPERAND (t, 0);
11950 	  op1 = TREE_OPERAND (t, 1);
11951 	  tem = fold_binary_loc (loc, code, type, op0, op1);
11952 	  return tem ? tem : expr;
11953 	case 3:
11954 	  op0 = TREE_OPERAND (t, 0);
11955 	  op1 = TREE_OPERAND (t, 1);
11956 	  op2 = TREE_OPERAND (t, 2);
11957 	  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
11958 	  return tem ? tem : expr;
11959 	default:
11960 	  break;
11961 	}
11962     }
11963 
11964   switch (code)
11965     {
11966     case ARRAY_REF:
11967       {
11968 	tree op0 = TREE_OPERAND (t, 0);
11969 	tree op1 = TREE_OPERAND (t, 1);
11970 
11971 	if (TREE_CODE (op1) == INTEGER_CST
11972 	    && TREE_CODE (op0) == CONSTRUCTOR
11973 	    && ! type_contains_placeholder_p (TREE_TYPE (op0)))
11974 	  {
11975 	    tree val = get_array_ctor_element_at_index (op0,
11976 							wi::to_offset (op1));
11977 	    if (val)
11978 	      return val;
11979 	  }
11980 
11981 	return t;
11982       }
11983 
11984       /* Return a VECTOR_CST if possible.  */
11985     case CONSTRUCTOR:
11986       {
11987 	tree type = TREE_TYPE (t);
11988 	if (TREE_CODE (type) != VECTOR_TYPE)
11989 	  return t;
11990 
11991 	unsigned i;
11992 	tree val;
11993 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
11994 	  if (! CONSTANT_CLASS_P (val))
11995 	    return t;
11996 
11997 	return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
11998       }
11999 
12000     case CONST_DECL:
12001       return fold (DECL_INITIAL (t));
12002 
12003     default:
12004       return t;
12005     } /* switch (code) */
12006 }
12007 
12008 #ifdef ENABLE_FOLD_CHECKING
12009 #undef fold
12010 
12011 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12012 				hash_table<nofree_ptr_hash<const tree_node> > *);
12013 static void fold_check_failed (const_tree, const_tree);
12014 void print_fold_checksum (const_tree);
12015 
12016 /* When --enable-checking=fold, compute a digest of expr before
12017    and after actual fold call to see if fold did not accidentally
12018    change original expr.  */
12019 
12020 tree
12021 fold (tree expr)
12022 {
12023   tree ret;
12024   struct md5_ctx ctx;
12025   unsigned char checksum_before[16], checksum_after[16];
12026   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12027 
12028   md5_init_ctx (&ctx);
12029   fold_checksum_tree (expr, &ctx, &ht);
12030   md5_finish_ctx (&ctx, checksum_before);
12031   ht.empty ();
12032 
12033   ret = fold_1 (expr);
12034 
12035   md5_init_ctx (&ctx);
12036   fold_checksum_tree (expr, &ctx, &ht);
12037   md5_finish_ctx (&ctx, checksum_after);
12038 
12039   if (memcmp (checksum_before, checksum_after, 16))
12040     fold_check_failed (expr, ret);
12041 
12042   return ret;
12043 }
12044 
12045 void
12046 print_fold_checksum (const_tree expr)
12047 {
12048   struct md5_ctx ctx;
12049   unsigned char checksum[16], cnt;
12050   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12051 
12052   md5_init_ctx (&ctx);
12053   fold_checksum_tree (expr, &ctx, &ht);
12054   md5_finish_ctx (&ctx, checksum);
12055   for (cnt = 0; cnt < 16; ++cnt)
12056     fprintf (stderr, "%02x", checksum[cnt]);
12057   putc ('\n', stderr);
12058 }
12059 
12060 static void
12061 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12062 {
12063   internal_error ("fold check: original tree changed by fold");
12064 }
12065 
12066 static void
12067 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12068 		    hash_table<nofree_ptr_hash <const tree_node> > *ht)
12069 {
12070   const tree_node **slot;
12071   enum tree_code code;
12072   union tree_node buf;
12073   int i, len;
12074 
12075  recursive_label:
12076   if (expr == NULL)
12077     return;
12078   slot = ht->find_slot (expr, INSERT);
12079   if (*slot != NULL)
12080     return;
12081   *slot = expr;
12082   code = TREE_CODE (expr);
12083   if (TREE_CODE_CLASS (code) == tcc_declaration
12084       && HAS_DECL_ASSEMBLER_NAME_P (expr))
12085     {
12086       /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified.  */
12087       memcpy ((char *) &buf, expr, tree_size (expr));
12088       SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12089       buf.decl_with_vis.symtab_node = NULL;
12090       expr = (tree) &buf;
12091     }
12092   else if (TREE_CODE_CLASS (code) == tcc_type
12093 	   && (TYPE_POINTER_TO (expr)
12094 	       || TYPE_REFERENCE_TO (expr)
12095 	       || TYPE_CACHED_VALUES_P (expr)
12096 	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12097 	       || TYPE_NEXT_VARIANT (expr)
12098 	       || TYPE_ALIAS_SET_KNOWN_P (expr)))
12099     {
12100       /* Allow these fields to be modified.  */
12101       tree tmp;
12102       memcpy ((char *) &buf, expr, tree_size (expr));
12103       expr = tmp = (tree) &buf;
12104       TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12105       TYPE_POINTER_TO (tmp) = NULL;
12106       TYPE_REFERENCE_TO (tmp) = NULL;
12107       TYPE_NEXT_VARIANT (tmp) = NULL;
12108       TYPE_ALIAS_SET (tmp) = -1;
12109       if (TYPE_CACHED_VALUES_P (tmp))
12110 	{
12111 	  TYPE_CACHED_VALUES_P (tmp) = 0;
12112 	  TYPE_CACHED_VALUES (tmp) = NULL;
12113 	}
12114     }
12115   md5_process_bytes (expr, tree_size (expr), ctx);
12116   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12117     fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12118   if (TREE_CODE_CLASS (code) != tcc_type
12119       && TREE_CODE_CLASS (code) != tcc_declaration
12120       && code != TREE_LIST
12121       && code != SSA_NAME
12122       && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12123     fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12124   switch (TREE_CODE_CLASS (code))
12125     {
12126     case tcc_constant:
12127       switch (code)
12128 	{
12129 	case STRING_CST:
12130 	  md5_process_bytes (TREE_STRING_POINTER (expr),
12131 			     TREE_STRING_LENGTH (expr), ctx);
12132 	  break;
12133 	case COMPLEX_CST:
12134 	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12135 	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12136 	  break;
12137 	case VECTOR_CST:
12138 	  len = vector_cst_encoded_nelts (expr);
12139 	  for (i = 0; i < len; ++i)
12140 	    fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
12141 	  break;
12142 	default:
12143 	  break;
12144 	}
12145       break;
12146     case tcc_exceptional:
12147       switch (code)
12148 	{
12149 	case TREE_LIST:
12150 	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12151 	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12152 	  expr = TREE_CHAIN (expr);
12153 	  goto recursive_label;
12154 	  break;
12155 	case TREE_VEC:
12156 	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12157 	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12158 	  break;
12159 	default:
12160 	  break;
12161 	}
12162       break;
12163     case tcc_expression:
12164     case tcc_reference:
12165     case tcc_comparison:
12166     case tcc_unary:
12167     case tcc_binary:
12168     case tcc_statement:
12169     case tcc_vl_exp:
12170       len = TREE_OPERAND_LENGTH (expr);
12171       for (i = 0; i < len; ++i)
12172 	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12173       break;
12174     case tcc_declaration:
12175       fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12176       fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12177       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12178 	{
12179 	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12180 	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12181 	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12182 	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12183 	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12184 	}
12185 
12186       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12187 	{
12188 	  if (TREE_CODE (expr) == FUNCTION_DECL)
12189 	    {
12190 	      fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12191 	      fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12192 	    }
12193 	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12194 	}
12195       break;
12196     case tcc_type:
12197       if (TREE_CODE (expr) == ENUMERAL_TYPE)
12198         fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12199       fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12200       fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12201       fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12202       fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12203       if (INTEGRAL_TYPE_P (expr)
12204           || SCALAR_FLOAT_TYPE_P (expr))
12205 	{
12206 	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12207 	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12208 	}
12209       fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12210       if (TREE_CODE (expr) == RECORD_TYPE
12211 	  || TREE_CODE (expr) == UNION_TYPE
12212 	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
12213 	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12214       fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12215       break;
12216     default:
12217       break;
12218     }
12219 }
12220 
12221 /* Helper function for outputting the checksum of a tree T.  When
12222    debugging with gdb, you can "define mynext" to be "next" followed
12223    by "call debug_fold_checksum (op0)", then just trace down till the
12224    outputs differ.  */
12225 
12226 DEBUG_FUNCTION void
12227 debug_fold_checksum (const_tree t)
12228 {
12229   int i;
12230   unsigned char checksum[16];
12231   struct md5_ctx ctx;
12232   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12233 
12234   md5_init_ctx (&ctx);
12235   fold_checksum_tree (t, &ctx, &ht);
12236   md5_finish_ctx (&ctx, checksum);
12237   ht.empty ();
12238 
12239   for (i = 0; i < 16; i++)
12240     fprintf (stderr, "%d ", checksum[i]);
12241 
12242   fprintf (stderr, "\n");
12243 }
12244 
12245 #endif
12246 
12247 /* Fold a unary tree expression with code CODE of type TYPE with an
12248    operand OP0.  LOC is the location of the resulting expression.
12249    Return a folded expression if successful.  Otherwise, return a tree
12250    expression with code CODE of type TYPE with an operand OP0.  */
12251 
12252 tree
12253 fold_build1_loc (location_t loc,
12254 		 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12255 {
12256   tree tem;
12257 #ifdef ENABLE_FOLD_CHECKING
12258   unsigned char checksum_before[16], checksum_after[16];
12259   struct md5_ctx ctx;
12260   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12261 
12262   md5_init_ctx (&ctx);
12263   fold_checksum_tree (op0, &ctx, &ht);
12264   md5_finish_ctx (&ctx, checksum_before);
12265   ht.empty ();
12266 #endif
12267 
12268   tem = fold_unary_loc (loc, code, type, op0);
12269   if (!tem)
12270     tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
12271 
12272 #ifdef ENABLE_FOLD_CHECKING
12273   md5_init_ctx (&ctx);
12274   fold_checksum_tree (op0, &ctx, &ht);
12275   md5_finish_ctx (&ctx, checksum_after);
12276 
12277   if (memcmp (checksum_before, checksum_after, 16))
12278     fold_check_failed (op0, tem);
12279 #endif
12280   return tem;
12281 }
12282 
12283 /* Fold a binary tree expression with code CODE of type TYPE with
12284    operands OP0 and OP1.  LOC is the location of the resulting
12285    expression.  Return a folded expression if successful.  Otherwise,
12286    return a tree expression with code CODE of type TYPE with operands
12287    OP0 and OP1.  */
12288 
12289 tree
12290 fold_build2_loc (location_t loc,
12291 		      enum tree_code code, tree type, tree op0, tree op1
12292 		      MEM_STAT_DECL)
12293 {
12294   tree tem;
12295 #ifdef ENABLE_FOLD_CHECKING
12296   unsigned char checksum_before_op0[16],
12297                 checksum_before_op1[16],
12298 		checksum_after_op0[16],
12299 		checksum_after_op1[16];
12300   struct md5_ctx ctx;
12301   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12302 
12303   md5_init_ctx (&ctx);
12304   fold_checksum_tree (op0, &ctx, &ht);
12305   md5_finish_ctx (&ctx, checksum_before_op0);
12306   ht.empty ();
12307 
12308   md5_init_ctx (&ctx);
12309   fold_checksum_tree (op1, &ctx, &ht);
12310   md5_finish_ctx (&ctx, checksum_before_op1);
12311   ht.empty ();
12312 #endif
12313 
12314   tem = fold_binary_loc (loc, code, type, op0, op1);
12315   if (!tem)
12316     tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12317 
12318 #ifdef ENABLE_FOLD_CHECKING
12319   md5_init_ctx (&ctx);
12320   fold_checksum_tree (op0, &ctx, &ht);
12321   md5_finish_ctx (&ctx, checksum_after_op0);
12322   ht.empty ();
12323 
12324   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12325     fold_check_failed (op0, tem);
12326 
12327   md5_init_ctx (&ctx);
12328   fold_checksum_tree (op1, &ctx, &ht);
12329   md5_finish_ctx (&ctx, checksum_after_op1);
12330 
12331   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12332     fold_check_failed (op1, tem);
12333 #endif
12334   return tem;
12335 }
12336 
12337 /* Fold a ternary tree expression with code CODE of type TYPE with
12338    operands OP0, OP1, and OP2.  Return a folded expression if
12339    successful.  Otherwise, return a tree expression with code CODE of
12340    type TYPE with operands OP0, OP1, and OP2.  */
12341 
12342 tree
12343 fold_build3_loc (location_t loc, enum tree_code code, tree type,
12344 		      tree op0, tree op1, tree op2 MEM_STAT_DECL)
12345 {
12346   tree tem;
12347 #ifdef ENABLE_FOLD_CHECKING
12348   unsigned char checksum_before_op0[16],
12349                 checksum_before_op1[16],
12350                 checksum_before_op2[16],
12351 		checksum_after_op0[16],
12352 		checksum_after_op1[16],
12353 		checksum_after_op2[16];
12354   struct md5_ctx ctx;
12355   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12356 
12357   md5_init_ctx (&ctx);
12358   fold_checksum_tree (op0, &ctx, &ht);
12359   md5_finish_ctx (&ctx, checksum_before_op0);
12360   ht.empty ();
12361 
12362   md5_init_ctx (&ctx);
12363   fold_checksum_tree (op1, &ctx, &ht);
12364   md5_finish_ctx (&ctx, checksum_before_op1);
12365   ht.empty ();
12366 
12367   md5_init_ctx (&ctx);
12368   fold_checksum_tree (op2, &ctx, &ht);
12369   md5_finish_ctx (&ctx, checksum_before_op2);
12370   ht.empty ();
12371 #endif
12372 
12373   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12374   tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12375   if (!tem)
12376     tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12377 
12378 #ifdef ENABLE_FOLD_CHECKING
12379   md5_init_ctx (&ctx);
12380   fold_checksum_tree (op0, &ctx, &ht);
12381   md5_finish_ctx (&ctx, checksum_after_op0);
12382   ht.empty ();
12383 
12384   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12385     fold_check_failed (op0, tem);
12386 
12387   md5_init_ctx (&ctx);
12388   fold_checksum_tree (op1, &ctx, &ht);
12389   md5_finish_ctx (&ctx, checksum_after_op1);
12390   ht.empty ();
12391 
12392   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12393     fold_check_failed (op1, tem);
12394 
12395   md5_init_ctx (&ctx);
12396   fold_checksum_tree (op2, &ctx, &ht);
12397   md5_finish_ctx (&ctx, checksum_after_op2);
12398 
12399   if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12400     fold_check_failed (op2, tem);
12401 #endif
12402   return tem;
12403 }
12404 
12405 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12406    arguments in ARGARRAY, and a null static chain.
12407    Return a folded expression if successful.  Otherwise, return a CALL_EXPR
12408    of type TYPE from the given operands as constructed by build_call_array.  */
12409 
12410 tree
12411 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12412 			   int nargs, tree *argarray)
12413 {
12414   tree tem;
12415 #ifdef ENABLE_FOLD_CHECKING
12416   unsigned char checksum_before_fn[16],
12417                 checksum_before_arglist[16],
12418 		checksum_after_fn[16],
12419 		checksum_after_arglist[16];
12420   struct md5_ctx ctx;
12421   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12422   int i;
12423 
12424   md5_init_ctx (&ctx);
12425   fold_checksum_tree (fn, &ctx, &ht);
12426   md5_finish_ctx (&ctx, checksum_before_fn);
12427   ht.empty ();
12428 
12429   md5_init_ctx (&ctx);
12430   for (i = 0; i < nargs; i++)
12431     fold_checksum_tree (argarray[i], &ctx, &ht);
12432   md5_finish_ctx (&ctx, checksum_before_arglist);
12433   ht.empty ();
12434 #endif
12435 
12436   tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12437   if (!tem)
12438     tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12439 
12440 #ifdef ENABLE_FOLD_CHECKING
12441   md5_init_ctx (&ctx);
12442   fold_checksum_tree (fn, &ctx, &ht);
12443   md5_finish_ctx (&ctx, checksum_after_fn);
12444   ht.empty ();
12445 
12446   if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12447     fold_check_failed (fn, tem);
12448 
12449   md5_init_ctx (&ctx);
12450   for (i = 0; i < nargs; i++)
12451     fold_checksum_tree (argarray[i], &ctx, &ht);
12452   md5_finish_ctx (&ctx, checksum_after_arglist);
12453 
12454   if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12455     fold_check_failed (NULL_TREE, tem);
12456 #endif
12457   return tem;
12458 }
12459 
12460 /* Perform constant folding and related simplification of initializer
12461    expression EXPR.  These behave identically to "fold_buildN" but ignore
12462    potential run-time traps and exceptions that fold must preserve.  */
12463 
12464 #define START_FOLD_INIT \
12465   int saved_signaling_nans = flag_signaling_nans;\
12466   int saved_trapping_math = flag_trapping_math;\
12467   int saved_rounding_math = flag_rounding_math;\
12468   int saved_trapv = flag_trapv;\
12469   int saved_folding_initializer = folding_initializer;\
12470   flag_signaling_nans = 0;\
12471   flag_trapping_math = 0;\
12472   flag_rounding_math = 0;\
12473   flag_trapv = 0;\
12474   folding_initializer = 1;
12475 
12476 #define END_FOLD_INIT \
12477   flag_signaling_nans = saved_signaling_nans;\
12478   flag_trapping_math = saved_trapping_math;\
12479   flag_rounding_math = saved_rounding_math;\
12480   flag_trapv = saved_trapv;\
12481   folding_initializer = saved_folding_initializer;
12482 
12483 tree
12484 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12485 			     tree type, tree op)
12486 {
12487   tree result;
12488   START_FOLD_INIT;
12489 
12490   result = fold_build1_loc (loc, code, type, op);
12491 
12492   END_FOLD_INIT;
12493   return result;
12494 }
12495 
12496 tree
12497 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12498 			     tree type, tree op0, tree op1)
12499 {
12500   tree result;
12501   START_FOLD_INIT;
12502 
12503   result = fold_build2_loc (loc, code, type, op0, op1);
12504 
12505   END_FOLD_INIT;
12506   return result;
12507 }
12508 
12509 tree
12510 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12511 				       int nargs, tree *argarray)
12512 {
12513   tree result;
12514   START_FOLD_INIT;
12515 
12516   result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12517 
12518   END_FOLD_INIT;
12519   return result;
12520 }
12521 
12522 #undef START_FOLD_INIT
12523 #undef END_FOLD_INIT
12524 
12525 /* Determine if first argument is a multiple of second argument.  Return 0 if
12526    it is not, or we cannot easily determined it to be.
12527 
12528    An example of the sort of thing we care about (at this point; this routine
12529    could surely be made more general, and expanded to do what the *_DIV_EXPR's
12530    fold cases do now) is discovering that
12531 
12532      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12533 
12534    is a multiple of
12535 
12536      SAVE_EXPR (J * 8)
12537 
12538    when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12539 
12540    This code also handles discovering that
12541 
12542      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12543 
12544    is a multiple of 8 so we don't have to worry about dealing with a
12545    possible remainder.
12546 
12547    Note that we *look* inside a SAVE_EXPR only to determine how it was
12548    calculated; it is not safe for fold to do much of anything else with the
12549    internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12550    at run time.  For example, the latter example above *cannot* be implemented
12551    as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12552    evaluation time of the original SAVE_EXPR is not necessarily the same at
12553    the time the new expression is evaluated.  The only optimization of this
12554    sort that would be valid is changing
12555 
12556      SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12557 
12558    divided by 8 to
12559 
12560      SAVE_EXPR (I) * SAVE_EXPR (J)
12561 
12562    (where the same SAVE_EXPR (J) is used in the original and the
12563    transformed version).  */
12564 
12565 int
12566 multiple_of_p (tree type, const_tree top, const_tree bottom)
12567 {
12568   gimple *stmt;
12569   tree t1, op1, op2;
12570 
12571   if (operand_equal_p (top, bottom, 0))
12572     return 1;
12573 
12574   if (TREE_CODE (type) != INTEGER_TYPE)
12575     return 0;
12576 
12577   switch (TREE_CODE (top))
12578     {
12579     case BIT_AND_EXPR:
12580       /* Bitwise and provides a power of two multiple.  If the mask is
12581 	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
12582       if (!integer_pow2p (bottom))
12583 	return 0;
12584       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12585 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12586 
12587     case MULT_EXPR:
12588       if (TREE_CODE (bottom) == INTEGER_CST)
12589 	{
12590 	  op1 = TREE_OPERAND (top, 0);
12591 	  op2 = TREE_OPERAND (top, 1);
12592 	  if (TREE_CODE (op1) == INTEGER_CST)
12593 	    std::swap (op1, op2);
12594 	  if (TREE_CODE (op2) == INTEGER_CST)
12595 	    {
12596 	      if (multiple_of_p (type, op2, bottom))
12597 		return 1;
12598 	      /* Handle multiple_of_p ((x * 2 + 2) * 4, 8).  */
12599 	      if (multiple_of_p (type, bottom, op2))
12600 		{
12601 		  widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
12602 						 wi::to_widest (op2));
12603 		  if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
12604 		    {
12605 		      op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
12606 		      return multiple_of_p (type, op1, op2);
12607 		    }
12608 		}
12609 	      return multiple_of_p (type, op1, bottom);
12610 	    }
12611 	}
12612       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12613 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12614 
12615     case MINUS_EXPR:
12616       /* It is impossible to prove if op0 - op1 is multiple of bottom
12617 	 precisely, so be conservative here checking if both op0 and op1
12618 	 are multiple of bottom.  Note we check the second operand first
12619 	 since it's usually simpler.  */
12620       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12621 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12622 
12623     case PLUS_EXPR:
12624       /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
12625 	 as op0 - 3 if the expression has unsigned type.  For example,
12626 	 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not.  */
12627       op1 = TREE_OPERAND (top, 1);
12628       if (TYPE_UNSIGNED (type)
12629 	  && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
12630 	op1 = fold_build1 (NEGATE_EXPR, type, op1);
12631       return (multiple_of_p (type, op1, bottom)
12632 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
12633 
12634     case LSHIFT_EXPR:
12635       if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12636 	{
12637 	  op1 = TREE_OPERAND (top, 1);
12638 	  /* const_binop may not detect overflow correctly,
12639 	     so check for it explicitly here.  */
12640 	  if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
12641 			 wi::to_wide (op1))
12642 	      && (t1 = fold_convert (type,
12643 				     const_binop (LSHIFT_EXPR, size_one_node,
12644 						  op1))) != 0
12645 	      && !TREE_OVERFLOW (t1))
12646 	    return multiple_of_p (type, t1, bottom);
12647 	}
12648       return 0;
12649 
12650     case NOP_EXPR:
12651       /* Can't handle conversions from non-integral or wider integral type.  */
12652       if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12653 	  || (TYPE_PRECISION (type)
12654 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12655 	return 0;
12656 
12657       /* fall through */
12658 
12659     case SAVE_EXPR:
12660       return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12661 
12662     case COND_EXPR:
12663       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12664 	      && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12665 
12666     case INTEGER_CST:
12667       if (TREE_CODE (bottom) != INTEGER_CST
12668 	  || integer_zerop (bottom)
12669 	  || (TYPE_UNSIGNED (type)
12670 	      && (tree_int_cst_sgn (top) < 0
12671 		  || tree_int_cst_sgn (bottom) < 0)))
12672 	return 0;
12673       return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12674 				SIGNED);
12675 
12676     case SSA_NAME:
12677       if (TREE_CODE (bottom) == INTEGER_CST
12678 	  && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
12679 	  && gimple_code (stmt) == GIMPLE_ASSIGN)
12680 	{
12681 	  enum tree_code code = gimple_assign_rhs_code (stmt);
12682 
12683 	  /* Check for special cases to see if top is defined as multiple
12684 	     of bottom:
12685 
12686 	       top = (X & ~(bottom - 1) ; bottom is power of 2
12687 
12688 	     or
12689 
12690 	       Y = X % bottom
12691 	       top = X - Y.  */
12692 	  if (code == BIT_AND_EXPR
12693 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12694 	      && TREE_CODE (op2) == INTEGER_CST
12695 	      && integer_pow2p (bottom)
12696 	      && wi::multiple_of_p (wi::to_widest (op2),
12697 				    wi::to_widest (bottom), UNSIGNED))
12698 	    return 1;
12699 
12700 	  op1 = gimple_assign_rhs1 (stmt);
12701 	  if (code == MINUS_EXPR
12702 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
12703 	      && TREE_CODE (op2) == SSA_NAME
12704 	      && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
12705 	      && gimple_code (stmt) == GIMPLE_ASSIGN
12706 	      && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
12707 	      && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
12708 	      && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
12709 	    return 1;
12710 	}
12711 
12712       /* fall through */
12713 
12714     default:
12715       if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
12716 	return multiple_p (wi::to_poly_widest (top),
12717 			   wi::to_poly_widest (bottom));
12718 
12719       return 0;
12720     }
12721 }
12722 
12723 #define tree_expr_nonnegative_warnv_p(X, Y) \
12724   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12725 
12726 #define RECURSE(X) \
12727   ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12728 
12729 /* Return true if CODE or TYPE is known to be non-negative. */
12730 
12731 static bool
12732 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12733 {
12734   if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12735       && truth_value_p (code))
12736     /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12737        have a signed:1 type (where the value is -1 and 0).  */
12738     return true;
12739   return false;
12740 }
12741 
12742 /* Return true if (CODE OP0) is known to be non-negative.  If the return
12743    value is based on the assumption that signed overflow is undefined,
12744    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12745    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12746 
12747 bool
12748 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12749 				bool *strict_overflow_p, int depth)
12750 {
12751   if (TYPE_UNSIGNED (type))
12752     return true;
12753 
12754   switch (code)
12755     {
12756     case ABS_EXPR:
12757       /* We can't return 1 if flag_wrapv is set because
12758 	 ABS_EXPR<INT_MIN> = INT_MIN.  */
12759       if (!ANY_INTEGRAL_TYPE_P (type))
12760 	return true;
12761       if (TYPE_OVERFLOW_UNDEFINED (type))
12762 	{
12763 	  *strict_overflow_p = true;
12764 	  return true;
12765 	}
12766       break;
12767 
12768     case NON_LVALUE_EXPR:
12769     case FLOAT_EXPR:
12770     case FIX_TRUNC_EXPR:
12771       return RECURSE (op0);
12772 
12773     CASE_CONVERT:
12774       {
12775 	tree inner_type = TREE_TYPE (op0);
12776 	tree outer_type = type;
12777 
12778 	if (TREE_CODE (outer_type) == REAL_TYPE)
12779 	  {
12780 	    if (TREE_CODE (inner_type) == REAL_TYPE)
12781 	      return RECURSE (op0);
12782 	    if (INTEGRAL_TYPE_P (inner_type))
12783 	      {
12784 		if (TYPE_UNSIGNED (inner_type))
12785 		  return true;
12786 		return RECURSE (op0);
12787 	      }
12788 	  }
12789 	else if (INTEGRAL_TYPE_P (outer_type))
12790 	  {
12791 	    if (TREE_CODE (inner_type) == REAL_TYPE)
12792 	      return RECURSE (op0);
12793 	    if (INTEGRAL_TYPE_P (inner_type))
12794 	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12795 		      && TYPE_UNSIGNED (inner_type);
12796 	  }
12797       }
12798       break;
12799 
12800     default:
12801       return tree_simple_nonnegative_warnv_p (code, type);
12802     }
12803 
12804   /* We don't know sign of `t', so be conservative and return false.  */
12805   return false;
12806 }
12807 
12808 /* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
12809    value is based on the assumption that signed overflow is undefined,
12810    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12811    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12812 
12813 bool
12814 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12815 				 tree op1, bool *strict_overflow_p,
12816 				 int depth)
12817 {
12818   if (TYPE_UNSIGNED (type))
12819     return true;
12820 
12821   switch (code)
12822     {
12823     case POINTER_PLUS_EXPR:
12824     case PLUS_EXPR:
12825       if (FLOAT_TYPE_P (type))
12826 	return RECURSE (op0) && RECURSE (op1);
12827 
12828       /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12829 	 both unsigned and at least 2 bits shorter than the result.  */
12830       if (TREE_CODE (type) == INTEGER_TYPE
12831 	  && TREE_CODE (op0) == NOP_EXPR
12832 	  && TREE_CODE (op1) == NOP_EXPR)
12833 	{
12834 	  tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12835 	  tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12836 	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12837 	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12838 	    {
12839 	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
12840 				       TYPE_PRECISION (inner2)) + 1;
12841 	      return prec < TYPE_PRECISION (type);
12842 	    }
12843 	}
12844       break;
12845 
12846     case MULT_EXPR:
12847       if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12848 	{
12849 	  /* x * x is always non-negative for floating point x
12850 	     or without overflow.  */
12851 	  if (operand_equal_p (op0, op1, 0)
12852 	      || (RECURSE (op0) && RECURSE (op1)))
12853 	    {
12854 	      if (ANY_INTEGRAL_TYPE_P (type)
12855 		  && TYPE_OVERFLOW_UNDEFINED (type))
12856 		*strict_overflow_p = true;
12857 	      return true;
12858 	    }
12859 	}
12860 
12861       /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12862 	 both unsigned and their total bits is shorter than the result.  */
12863       if (TREE_CODE (type) == INTEGER_TYPE
12864 	  && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12865 	  && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12866 	{
12867 	  tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12868 	    ? TREE_TYPE (TREE_OPERAND (op0, 0))
12869 	    : TREE_TYPE (op0);
12870 	  tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12871 	    ? TREE_TYPE (TREE_OPERAND (op1, 0))
12872 	    : TREE_TYPE (op1);
12873 
12874 	  bool unsigned0 = TYPE_UNSIGNED (inner0);
12875 	  bool unsigned1 = TYPE_UNSIGNED (inner1);
12876 
12877 	  if (TREE_CODE (op0) == INTEGER_CST)
12878 	    unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12879 
12880 	  if (TREE_CODE (op1) == INTEGER_CST)
12881 	    unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12882 
12883 	  if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12884 	      && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12885 	    {
12886 	      unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12887 		? tree_int_cst_min_precision (op0, UNSIGNED)
12888 		: TYPE_PRECISION (inner0);
12889 
12890 	      unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12891 		? tree_int_cst_min_precision (op1, UNSIGNED)
12892 		: TYPE_PRECISION (inner1);
12893 
12894 	      return precision0 + precision1 < TYPE_PRECISION (type);
12895 	    }
12896 	}
12897       return false;
12898 
12899     case BIT_AND_EXPR:
12900     case MAX_EXPR:
12901       return RECURSE (op0) || RECURSE (op1);
12902 
12903     case BIT_IOR_EXPR:
12904     case BIT_XOR_EXPR:
12905     case MIN_EXPR:
12906     case RDIV_EXPR:
12907     case TRUNC_DIV_EXPR:
12908     case CEIL_DIV_EXPR:
12909     case FLOOR_DIV_EXPR:
12910     case ROUND_DIV_EXPR:
12911       return RECURSE (op0) && RECURSE (op1);
12912 
12913     case TRUNC_MOD_EXPR:
12914       return RECURSE (op0);
12915 
12916     case FLOOR_MOD_EXPR:
12917       return RECURSE (op1);
12918 
12919     case CEIL_MOD_EXPR:
12920     case ROUND_MOD_EXPR:
12921     default:
12922       return tree_simple_nonnegative_warnv_p (code, type);
12923     }
12924 
12925   /* We don't know sign of `t', so be conservative and return false.  */
12926   return false;
12927 }
12928 
12929 /* Return true if T is known to be non-negative.  If the return
12930    value is based on the assumption that signed overflow is undefined,
12931    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12932    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12933 
12934 bool
12935 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12936 {
12937   if (TYPE_UNSIGNED (TREE_TYPE (t)))
12938     return true;
12939 
12940   switch (TREE_CODE (t))
12941     {
12942     case INTEGER_CST:
12943       return tree_int_cst_sgn (t) >= 0;
12944 
12945     case REAL_CST:
12946       return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12947 
12948     case FIXED_CST:
12949       return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12950 
12951     case COND_EXPR:
12952       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12953 
12954     case SSA_NAME:
12955       /* Limit the depth of recursion to avoid quadratic behavior.
12956 	 This is expected to catch almost all occurrences in practice.
12957 	 If this code misses important cases that unbounded recursion
12958 	 would not, passes that need this information could be revised
12959 	 to provide it through dataflow propagation.  */
12960       return (!name_registered_for_update_p (t)
12961 	      && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12962 	      && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12963 						  strict_overflow_p, depth));
12964 
12965     default:
12966       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
12967     }
12968 }
12969 
12970 /* Return true if T is known to be non-negative.  If the return
12971    value is based on the assumption that signed overflow is undefined,
12972    set *STRICT_OVERFLOW_P to true; otherwise, don't change
12973    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
12974 
12975 bool
12976 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
12977 			       bool *strict_overflow_p, int depth)
12978 {
12979   switch (fn)
12980     {
12981     CASE_CFN_ACOS:
12982     CASE_CFN_ACOSH:
12983     CASE_CFN_CABS:
12984     CASE_CFN_COSH:
12985     CASE_CFN_ERFC:
12986     CASE_CFN_EXP:
12987     CASE_CFN_EXP10:
12988     CASE_CFN_EXP2:
12989     CASE_CFN_FABS:
12990     CASE_CFN_FDIM:
12991     CASE_CFN_HYPOT:
12992     CASE_CFN_POW10:
12993     CASE_CFN_FFS:
12994     CASE_CFN_PARITY:
12995     CASE_CFN_POPCOUNT:
12996     CASE_CFN_CLZ:
12997     CASE_CFN_CLRSB:
12998     case CFN_BUILT_IN_BSWAP32:
12999     case CFN_BUILT_IN_BSWAP64:
13000       /* Always true.  */
13001       return true;
13002 
13003     CASE_CFN_SQRT:
13004     CASE_CFN_SQRT_FN:
13005       /* sqrt(-0.0) is -0.0.  */
13006       if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13007 	return true;
13008       return RECURSE (arg0);
13009 
13010     CASE_CFN_ASINH:
13011     CASE_CFN_ATAN:
13012     CASE_CFN_ATANH:
13013     CASE_CFN_CBRT:
13014     CASE_CFN_CEIL:
13015     CASE_CFN_CEIL_FN:
13016     CASE_CFN_ERF:
13017     CASE_CFN_EXPM1:
13018     CASE_CFN_FLOOR:
13019     CASE_CFN_FLOOR_FN:
13020     CASE_CFN_FMOD:
13021     CASE_CFN_FREXP:
13022     CASE_CFN_ICEIL:
13023     CASE_CFN_IFLOOR:
13024     CASE_CFN_IRINT:
13025     CASE_CFN_IROUND:
13026     CASE_CFN_LCEIL:
13027     CASE_CFN_LDEXP:
13028     CASE_CFN_LFLOOR:
13029     CASE_CFN_LLCEIL:
13030     CASE_CFN_LLFLOOR:
13031     CASE_CFN_LLRINT:
13032     CASE_CFN_LLROUND:
13033     CASE_CFN_LRINT:
13034     CASE_CFN_LROUND:
13035     CASE_CFN_MODF:
13036     CASE_CFN_NEARBYINT:
13037     CASE_CFN_NEARBYINT_FN:
13038     CASE_CFN_RINT:
13039     CASE_CFN_RINT_FN:
13040     CASE_CFN_ROUND:
13041     CASE_CFN_ROUND_FN:
13042     CASE_CFN_SCALB:
13043     CASE_CFN_SCALBLN:
13044     CASE_CFN_SCALBN:
13045     CASE_CFN_SIGNBIT:
13046     CASE_CFN_SIGNIFICAND:
13047     CASE_CFN_SINH:
13048     CASE_CFN_TANH:
13049     CASE_CFN_TRUNC:
13050     CASE_CFN_TRUNC_FN:
13051       /* True if the 1st argument is nonnegative.  */
13052       return RECURSE (arg0);
13053 
13054     CASE_CFN_FMAX:
13055     CASE_CFN_FMAX_FN:
13056       /* True if the 1st OR 2nd arguments are nonnegative.  */
13057       return RECURSE (arg0) || RECURSE (arg1);
13058 
13059     CASE_CFN_FMIN:
13060     CASE_CFN_FMIN_FN:
13061       /* True if the 1st AND 2nd arguments are nonnegative.  */
13062       return RECURSE (arg0) && RECURSE (arg1);
13063 
13064     CASE_CFN_COPYSIGN:
13065     CASE_CFN_COPYSIGN_FN:
13066       /* True if the 2nd argument is nonnegative.  */
13067       return RECURSE (arg1);
13068 
13069     CASE_CFN_POWI:
13070       /* True if the 1st argument is nonnegative or the second
13071 	 argument is an even integer.  */
13072       if (TREE_CODE (arg1) == INTEGER_CST
13073 	  && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13074 	return true;
13075       return RECURSE (arg0);
13076 
13077     CASE_CFN_POW:
13078       /* True if the 1st argument is nonnegative or the second
13079 	 argument is an even integer valued real.  */
13080       if (TREE_CODE (arg1) == REAL_CST)
13081 	{
13082 	  REAL_VALUE_TYPE c;
13083 	  HOST_WIDE_INT n;
13084 
13085 	  c = TREE_REAL_CST (arg1);
13086 	  n = real_to_integer (&c);
13087 	  if ((n & 1) == 0)
13088 	    {
13089 	      REAL_VALUE_TYPE cint;
13090 	      real_from_integer (&cint, VOIDmode, n, SIGNED);
13091 	      if (real_identical (&c, &cint))
13092 		return true;
13093 	    }
13094 	}
13095       return RECURSE (arg0);
13096 
13097     default:
13098       break;
13099     }
13100   return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13101 }
13102 
13103 /* Return true if T is known to be non-negative.  If the return
13104    value is based on the assumption that signed overflow is undefined,
13105    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13106    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13107 
13108 static bool
13109 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13110 {
13111   enum tree_code code = TREE_CODE (t);
13112   if (TYPE_UNSIGNED (TREE_TYPE (t)))
13113     return true;
13114 
13115   switch (code)
13116     {
13117     case TARGET_EXPR:
13118       {
13119 	tree temp = TARGET_EXPR_SLOT (t);
13120 	t = TARGET_EXPR_INITIAL (t);
13121 
13122 	/* If the initializer is non-void, then it's a normal expression
13123 	   that will be assigned to the slot.  */
13124 	if (!VOID_TYPE_P (t))
13125 	  return RECURSE (t);
13126 
13127 	/* Otherwise, the initializer sets the slot in some way.  One common
13128 	   way is an assignment statement at the end of the initializer.  */
13129 	while (1)
13130 	  {
13131 	    if (TREE_CODE (t) == BIND_EXPR)
13132 	      t = expr_last (BIND_EXPR_BODY (t));
13133 	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13134 		     || TREE_CODE (t) == TRY_CATCH_EXPR)
13135 	      t = expr_last (TREE_OPERAND (t, 0));
13136 	    else if (TREE_CODE (t) == STATEMENT_LIST)
13137 	      t = expr_last (t);
13138 	    else
13139 	      break;
13140 	  }
13141 	if (TREE_CODE (t) == MODIFY_EXPR
13142 	    && TREE_OPERAND (t, 0) == temp)
13143 	  return RECURSE (TREE_OPERAND (t, 1));
13144 
13145 	return false;
13146       }
13147 
13148     case CALL_EXPR:
13149       {
13150 	tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
13151 	tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
13152 
13153 	return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13154 					      get_call_combined_fn (t),
13155 					      arg0,
13156 					      arg1,
13157 					      strict_overflow_p, depth);
13158       }
13159     case COMPOUND_EXPR:
13160     case MODIFY_EXPR:
13161       return RECURSE (TREE_OPERAND (t, 1));
13162 
13163     case BIND_EXPR:
13164       return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13165 
13166     case SAVE_EXPR:
13167       return RECURSE (TREE_OPERAND (t, 0));
13168 
13169     default:
13170       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13171     }
13172 }
13173 
13174 #undef RECURSE
13175 #undef tree_expr_nonnegative_warnv_p
13176 
13177 /* Return true if T is known to be non-negative.  If the return
13178    value is based on the assumption that signed overflow is undefined,
13179    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13180    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13181 
13182 bool
13183 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13184 {
13185   enum tree_code code;
13186   if (t == error_mark_node)
13187     return false;
13188 
13189   code = TREE_CODE (t);
13190   switch (TREE_CODE_CLASS (code))
13191     {
13192     case tcc_binary:
13193     case tcc_comparison:
13194       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13195 					      TREE_TYPE (t),
13196 					      TREE_OPERAND (t, 0),
13197 					      TREE_OPERAND (t, 1),
13198 					      strict_overflow_p, depth);
13199 
13200     case tcc_unary:
13201       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13202 					     TREE_TYPE (t),
13203 					     TREE_OPERAND (t, 0),
13204 					     strict_overflow_p, depth);
13205 
13206     case tcc_constant:
13207     case tcc_declaration:
13208     case tcc_reference:
13209       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13210 
13211     default:
13212       break;
13213     }
13214 
13215   switch (code)
13216     {
13217     case TRUTH_AND_EXPR:
13218     case TRUTH_OR_EXPR:
13219     case TRUTH_XOR_EXPR:
13220       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13221 					      TREE_TYPE (t),
13222 					      TREE_OPERAND (t, 0),
13223 					      TREE_OPERAND (t, 1),
13224 					      strict_overflow_p, depth);
13225     case TRUTH_NOT_EXPR:
13226       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13227 					     TREE_TYPE (t),
13228 					     TREE_OPERAND (t, 0),
13229 					     strict_overflow_p, depth);
13230 
13231     case COND_EXPR:
13232     case CONSTRUCTOR:
13233     case OBJ_TYPE_REF:
13234     case ASSERT_EXPR:
13235     case ADDR_EXPR:
13236     case WITH_SIZE_EXPR:
13237     case SSA_NAME:
13238       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13239 
13240     default:
13241       return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13242     }
13243 }
13244 
13245 /* Return true if `t' is known to be non-negative.  Handle warnings
13246    about undefined signed overflow.  */
13247 
13248 bool
13249 tree_expr_nonnegative_p (tree t)
13250 {
13251   bool ret, strict_overflow_p;
13252 
13253   strict_overflow_p = false;
13254   ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13255   if (strict_overflow_p)
13256     fold_overflow_warning (("assuming signed overflow does not occur when "
13257 			    "determining that expression is always "
13258 			    "non-negative"),
13259 			   WARN_STRICT_OVERFLOW_MISC);
13260   return ret;
13261 }
13262 
13263 
13264 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13265    For floating point we further ensure that T is not denormal.
13266    Similar logic is present in nonzero_address in rtlanal.h.
13267 
13268    If the return value is based on the assumption that signed overflow
13269    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13270    change *STRICT_OVERFLOW_P.  */
13271 
13272 bool
13273 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13274 				 bool *strict_overflow_p)
13275 {
13276   switch (code)
13277     {
13278     case ABS_EXPR:
13279       return tree_expr_nonzero_warnv_p (op0,
13280 					strict_overflow_p);
13281 
13282     case NOP_EXPR:
13283       {
13284 	tree inner_type = TREE_TYPE (op0);
13285 	tree outer_type = type;
13286 
13287 	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13288 		&& tree_expr_nonzero_warnv_p (op0,
13289 					      strict_overflow_p));
13290       }
13291       break;
13292 
13293     case NON_LVALUE_EXPR:
13294       return tree_expr_nonzero_warnv_p (op0,
13295 					strict_overflow_p);
13296 
13297     default:
13298       break;
13299   }
13300 
13301   return false;
13302 }
13303 
13304 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13305    For floating point we further ensure that T is not denormal.
13306    Similar logic is present in nonzero_address in rtlanal.h.
13307 
13308    If the return value is based on the assumption that signed overflow
13309    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13310    change *STRICT_OVERFLOW_P.  */
13311 
13312 bool
13313 tree_binary_nonzero_warnv_p (enum tree_code code,
13314 			     tree type,
13315 			     tree op0,
13316 			     tree op1, bool *strict_overflow_p)
13317 {
13318   bool sub_strict_overflow_p;
13319   switch (code)
13320     {
13321     case POINTER_PLUS_EXPR:
13322     case PLUS_EXPR:
13323       if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13324 	{
13325 	  /* With the presence of negative values it is hard
13326 	     to say something.  */
13327 	  sub_strict_overflow_p = false;
13328 	  if (!tree_expr_nonnegative_warnv_p (op0,
13329 					      &sub_strict_overflow_p)
13330 	      || !tree_expr_nonnegative_warnv_p (op1,
13331 						 &sub_strict_overflow_p))
13332 	    return false;
13333 	  /* One of operands must be positive and the other non-negative.  */
13334 	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
13335 	     overflows, on a twos-complement machine the sum of two
13336 	     nonnegative numbers can never be zero.  */
13337 	  return (tree_expr_nonzero_warnv_p (op0,
13338 					     strict_overflow_p)
13339 		  || tree_expr_nonzero_warnv_p (op1,
13340 						strict_overflow_p));
13341 	}
13342       break;
13343 
13344     case MULT_EXPR:
13345       if (TYPE_OVERFLOW_UNDEFINED (type))
13346 	{
13347 	  if (tree_expr_nonzero_warnv_p (op0,
13348 					 strict_overflow_p)
13349 	      && tree_expr_nonzero_warnv_p (op1,
13350 					    strict_overflow_p))
13351 	    {
13352 	      *strict_overflow_p = true;
13353 	      return true;
13354 	    }
13355 	}
13356       break;
13357 
13358     case MIN_EXPR:
13359       sub_strict_overflow_p = false;
13360       if (tree_expr_nonzero_warnv_p (op0,
13361 				     &sub_strict_overflow_p)
13362 	  && tree_expr_nonzero_warnv_p (op1,
13363 					&sub_strict_overflow_p))
13364 	{
13365 	  if (sub_strict_overflow_p)
13366 	    *strict_overflow_p = true;
13367 	}
13368       break;
13369 
13370     case MAX_EXPR:
13371       sub_strict_overflow_p = false;
13372       if (tree_expr_nonzero_warnv_p (op0,
13373 				     &sub_strict_overflow_p))
13374 	{
13375 	  if (sub_strict_overflow_p)
13376 	    *strict_overflow_p = true;
13377 
13378 	  /* When both operands are nonzero, then MAX must be too.  */
13379 	  if (tree_expr_nonzero_warnv_p (op1,
13380 					 strict_overflow_p))
13381 	    return true;
13382 
13383 	  /* MAX where operand 0 is positive is positive.  */
13384 	  return tree_expr_nonnegative_warnv_p (op0,
13385 					       strict_overflow_p);
13386 	}
13387       /* MAX where operand 1 is positive is positive.  */
13388       else if (tree_expr_nonzero_warnv_p (op1,
13389 					  &sub_strict_overflow_p)
13390 	       && tree_expr_nonnegative_warnv_p (op1,
13391 						 &sub_strict_overflow_p))
13392 	{
13393 	  if (sub_strict_overflow_p)
13394 	    *strict_overflow_p = true;
13395 	  return true;
13396 	}
13397       break;
13398 
13399     case BIT_IOR_EXPR:
13400       return (tree_expr_nonzero_warnv_p (op1,
13401 					 strict_overflow_p)
13402 	      || tree_expr_nonzero_warnv_p (op0,
13403 					    strict_overflow_p));
13404 
13405     default:
13406       break;
13407   }
13408 
13409   return false;
13410 }
13411 
13412 /* Return true when T is an address and is known to be nonzero.
13413    For floating point we further ensure that T is not denormal.
13414    Similar logic is present in nonzero_address in rtlanal.h.
13415 
13416    If the return value is based on the assumption that signed overflow
13417    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13418    change *STRICT_OVERFLOW_P.  */
13419 
13420 bool
13421 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13422 {
13423   bool sub_strict_overflow_p;
13424   switch (TREE_CODE (t))
13425     {
13426     case INTEGER_CST:
13427       return !integer_zerop (t);
13428 
13429     case ADDR_EXPR:
13430       {
13431 	tree base = TREE_OPERAND (t, 0);
13432 
13433 	if (!DECL_P (base))
13434 	  base = get_base_address (base);
13435 
13436 	if (base && TREE_CODE (base) == TARGET_EXPR)
13437 	  base = TARGET_EXPR_SLOT (base);
13438 
13439 	if (!base)
13440 	  return false;
13441 
13442 	/* For objects in symbol table check if we know they are non-zero.
13443 	   Don't do anything for variables and functions before symtab is built;
13444 	   it is quite possible that they will be declared weak later.  */
13445 	int nonzero_addr = maybe_nonzero_address (base);
13446 	if (nonzero_addr >= 0)
13447 	  return nonzero_addr;
13448 
13449 	/* Constants are never weak.  */
13450 	if (CONSTANT_CLASS_P (base))
13451 	  return true;
13452 
13453 	return false;
13454       }
13455 
13456     case COND_EXPR:
13457       sub_strict_overflow_p = false;
13458       if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13459 				     &sub_strict_overflow_p)
13460 	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13461 					&sub_strict_overflow_p))
13462 	{
13463 	  if (sub_strict_overflow_p)
13464 	    *strict_overflow_p = true;
13465 	  return true;
13466 	}
13467       break;
13468 
13469     case SSA_NAME:
13470       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
13471 	break;
13472       return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
13473 
13474     default:
13475       break;
13476     }
13477   return false;
13478 }
13479 
13480 #define integer_valued_real_p(X) \
13481   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13482 
13483 #define RECURSE(X) \
13484   ((integer_valued_real_p) (X, depth + 1))
13485 
13486 /* Return true if the floating point result of (CODE OP0) has an
13487    integer value.  We also allow +Inf, -Inf and NaN to be considered
13488    integer values. Return false for signaling NaN.
13489 
13490    DEPTH is the current nesting depth of the query.  */
13491 
13492 bool
13493 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13494 {
13495   switch (code)
13496     {
13497     case FLOAT_EXPR:
13498       return true;
13499 
13500     case ABS_EXPR:
13501       return RECURSE (op0);
13502 
13503     CASE_CONVERT:
13504       {
13505 	tree type = TREE_TYPE (op0);
13506 	if (TREE_CODE (type) == INTEGER_TYPE)
13507 	  return true;
13508 	if (TREE_CODE (type) == REAL_TYPE)
13509 	  return RECURSE (op0);
13510 	break;
13511       }
13512 
13513     default:
13514       break;
13515     }
13516   return false;
13517 }
13518 
13519 /* Return true if the floating point result of (CODE OP0 OP1) has an
13520    integer value.  We also allow +Inf, -Inf and NaN to be considered
13521    integer values. Return false for signaling NaN.
13522 
13523    DEPTH is the current nesting depth of the query.  */
13524 
13525 bool
13526 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13527 {
13528   switch (code)
13529     {
13530     case PLUS_EXPR:
13531     case MINUS_EXPR:
13532     case MULT_EXPR:
13533     case MIN_EXPR:
13534     case MAX_EXPR:
13535       return RECURSE (op0) && RECURSE (op1);
13536 
13537     default:
13538       break;
13539     }
13540   return false;
13541 }
13542 
13543 /* Return true if the floating point result of calling FNDECL with arguments
13544    ARG0 and ARG1 has an integer value.  We also allow +Inf, -Inf and NaN to be
13545    considered integer values. Return false for signaling NaN.  If FNDECL
13546    takes fewer than 2 arguments, the remaining ARGn are null.
13547 
13548    DEPTH is the current nesting depth of the query.  */
13549 
13550 bool
13551 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13552 {
13553   switch (fn)
13554     {
13555     CASE_CFN_CEIL:
13556     CASE_CFN_CEIL_FN:
13557     CASE_CFN_FLOOR:
13558     CASE_CFN_FLOOR_FN:
13559     CASE_CFN_NEARBYINT:
13560     CASE_CFN_NEARBYINT_FN:
13561     CASE_CFN_RINT:
13562     CASE_CFN_RINT_FN:
13563     CASE_CFN_ROUND:
13564     CASE_CFN_ROUND_FN:
13565     CASE_CFN_TRUNC:
13566     CASE_CFN_TRUNC_FN:
13567       return true;
13568 
13569     CASE_CFN_FMIN:
13570     CASE_CFN_FMIN_FN:
13571     CASE_CFN_FMAX:
13572     CASE_CFN_FMAX_FN:
13573       return RECURSE (arg0) && RECURSE (arg1);
13574 
13575     default:
13576       break;
13577     }
13578   return false;
13579 }
13580 
13581 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13582    has an integer value.  We also allow +Inf, -Inf and NaN to be
13583    considered integer values. Return false for signaling NaN.
13584 
13585    DEPTH is the current nesting depth of the query.  */
13586 
13587 bool
13588 integer_valued_real_single_p (tree t, int depth)
13589 {
13590   switch (TREE_CODE (t))
13591     {
13592     case REAL_CST:
13593       return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13594 
13595     case COND_EXPR:
13596       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13597 
13598     case SSA_NAME:
13599       /* Limit the depth of recursion to avoid quadratic behavior.
13600 	 This is expected to catch almost all occurrences in practice.
13601 	 If this code misses important cases that unbounded recursion
13602 	 would not, passes that need this information could be revised
13603 	 to provide it through dataflow propagation.  */
13604       return (!name_registered_for_update_p (t)
13605 	      && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13606 	      && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13607 						    depth));
13608 
13609     default:
13610       break;
13611     }
13612   return false;
13613 }
13614 
13615 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13616    has an integer value.  We also allow +Inf, -Inf and NaN to be
13617    considered integer values. Return false for signaling NaN.
13618 
13619    DEPTH is the current nesting depth of the query.  */
13620 
13621 static bool
13622 integer_valued_real_invalid_p (tree t, int depth)
13623 {
13624   switch (TREE_CODE (t))
13625     {
13626     case COMPOUND_EXPR:
13627     case MODIFY_EXPR:
13628     case BIND_EXPR:
13629       return RECURSE (TREE_OPERAND (t, 1));
13630 
13631     case SAVE_EXPR:
13632       return RECURSE (TREE_OPERAND (t, 0));
13633 
13634     default:
13635       break;
13636     }
13637   return false;
13638 }
13639 
13640 #undef RECURSE
13641 #undef integer_valued_real_p
13642 
13643 /* Return true if the floating point expression T has an integer value.
13644    We also allow +Inf, -Inf and NaN to be considered integer values.
13645    Return false for signaling NaN.
13646 
13647    DEPTH is the current nesting depth of the query.  */
13648 
13649 bool
13650 integer_valued_real_p (tree t, int depth)
13651 {
13652   if (t == error_mark_node)
13653     return false;
13654 
13655   tree_code code = TREE_CODE (t);
13656   switch (TREE_CODE_CLASS (code))
13657     {
13658     case tcc_binary:
13659     case tcc_comparison:
13660       return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13661 					   TREE_OPERAND (t, 1), depth);
13662 
13663     case tcc_unary:
13664       return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13665 
13666     case tcc_constant:
13667     case tcc_declaration:
13668     case tcc_reference:
13669       return integer_valued_real_single_p (t, depth);
13670 
13671     default:
13672       break;
13673     }
13674 
13675   switch (code)
13676     {
13677     case COND_EXPR:
13678     case SSA_NAME:
13679       return integer_valued_real_single_p (t, depth);
13680 
13681     case CALL_EXPR:
13682       {
13683 	tree arg0 = (call_expr_nargs (t) > 0
13684 		     ? CALL_EXPR_ARG (t, 0)
13685 		     : NULL_TREE);
13686 	tree arg1 = (call_expr_nargs (t) > 1
13687 		     ? CALL_EXPR_ARG (t, 1)
13688 		     : NULL_TREE);
13689 	return integer_valued_real_call_p (get_call_combined_fn (t),
13690 					   arg0, arg1, depth);
13691       }
13692 
13693     default:
13694       return integer_valued_real_invalid_p (t, depth);
13695     }
13696 }
13697 
13698 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13699    attempt to fold the expression to a constant without modifying TYPE,
13700    OP0 or OP1.
13701 
13702    If the expression could be simplified to a constant, then return
13703    the constant.  If the expression would not be simplified to a
13704    constant, then return NULL_TREE.  */
13705 
13706 tree
13707 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13708 {
13709   tree tem = fold_binary (code, type, op0, op1);
13710   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13711 }
13712 
13713 /* Given the components of a unary expression CODE, TYPE and OP0,
13714    attempt to fold the expression to a constant without modifying
13715    TYPE or OP0.
13716 
13717    If the expression could be simplified to a constant, then return
13718    the constant.  If the expression would not be simplified to a
13719    constant, then return NULL_TREE.  */
13720 
13721 tree
13722 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13723 {
13724   tree tem = fold_unary (code, type, op0);
13725   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13726 }
13727 
13728 /* If EXP represents referencing an element in a constant string
13729    (either via pointer arithmetic or array indexing), return the
13730    tree representing the value accessed, otherwise return NULL.  */
13731 
13732 tree
13733 fold_read_from_constant_string (tree exp)
13734 {
13735   if ((TREE_CODE (exp) == INDIRECT_REF
13736        || TREE_CODE (exp) == ARRAY_REF)
13737       && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13738     {
13739       tree exp1 = TREE_OPERAND (exp, 0);
13740       tree index;
13741       tree string;
13742       location_t loc = EXPR_LOCATION (exp);
13743 
13744       if (TREE_CODE (exp) == INDIRECT_REF)
13745 	string = string_constant (exp1, &index);
13746       else
13747 	{
13748 	  tree low_bound = array_ref_low_bound (exp);
13749 	  index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13750 
13751 	  /* Optimize the special-case of a zero lower bound.
13752 
13753 	     We convert the low_bound to sizetype to avoid some problems
13754 	     with constant folding.  (E.g. suppose the lower bound is 1,
13755 	     and its mode is QI.  Without the conversion,l (ARRAY
13756 	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13757 	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
13758 	  if (! integer_zerop (low_bound))
13759 	    index = size_diffop_loc (loc, index,
13760 				 fold_convert_loc (loc, sizetype, low_bound));
13761 
13762 	  string = exp1;
13763 	}
13764 
13765       scalar_int_mode char_mode;
13766       if (string
13767 	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13768 	  && TREE_CODE (string) == STRING_CST
13769 	  && TREE_CODE (index) == INTEGER_CST
13770 	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13771 	  && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
13772 			  &char_mode)
13773 	  && GET_MODE_SIZE (char_mode) == 1)
13774 	return build_int_cst_type (TREE_TYPE (exp),
13775 				   (TREE_STRING_POINTER (string)
13776 				    [TREE_INT_CST_LOW (index)]));
13777     }
13778   return NULL;
13779 }
13780 
13781 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13782    an integer constant, real, or fixed-point constant.
13783 
13784    TYPE is the type of the result.  */
13785 
13786 static tree
13787 fold_negate_const (tree arg0, tree type)
13788 {
13789   tree t = NULL_TREE;
13790 
13791   switch (TREE_CODE (arg0))
13792     {
13793     case REAL_CST:
13794       t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13795       break;
13796 
13797     case FIXED_CST:
13798       {
13799         FIXED_VALUE_TYPE f;
13800         bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13801 					    &(TREE_FIXED_CST (arg0)), NULL,
13802 					    TYPE_SATURATING (type));
13803 	t = build_fixed (type, f);
13804 	/* Propagate overflow flags.  */
13805 	if (overflow_p | TREE_OVERFLOW (arg0))
13806 	  TREE_OVERFLOW (t) = 1;
13807 	break;
13808       }
13809 
13810     default:
13811       if (poly_int_tree_p (arg0))
13812 	{
13813 	  bool overflow;
13814 	  poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
13815 	  t = force_fit_type (type, res, 1,
13816 			      (overflow && ! TYPE_UNSIGNED (type))
13817 			      || TREE_OVERFLOW (arg0));
13818 	  break;
13819 	}
13820 
13821       gcc_unreachable ();
13822     }
13823 
13824   return t;
13825 }
13826 
13827 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13828    an integer constant or real constant.
13829 
13830    TYPE is the type of the result.  */
13831 
13832 tree
13833 fold_abs_const (tree arg0, tree type)
13834 {
13835   tree t = NULL_TREE;
13836 
13837   switch (TREE_CODE (arg0))
13838     {
13839     case INTEGER_CST:
13840       {
13841         /* If the value is unsigned or non-negative, then the absolute value
13842 	   is the same as the ordinary value.  */
13843 	if (!wi::neg_p (wi::to_wide (arg0), TYPE_SIGN (type)))
13844 	  t = arg0;
13845 
13846 	/* If the value is negative, then the absolute value is
13847 	   its negation.  */
13848 	else
13849 	  {
13850 	    bool overflow;
13851 	    wide_int val = wi::neg (wi::to_wide (arg0), &overflow);
13852 	    t = force_fit_type (type, val, -1,
13853 				overflow | TREE_OVERFLOW (arg0));
13854 	  }
13855       }
13856       break;
13857 
13858     case REAL_CST:
13859       if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13860 	t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13861       else
13862 	t =  arg0;
13863       break;
13864 
13865     default:
13866       gcc_unreachable ();
13867     }
13868 
13869   return t;
13870 }
13871 
13872 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13873    constant.  TYPE is the type of the result.  */
13874 
13875 static tree
13876 fold_not_const (const_tree arg0, tree type)
13877 {
13878   gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13879 
13880   return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
13881 }
13882 
13883 /* Given CODE, a relational operator, the target type, TYPE and two
13884    constant operands OP0 and OP1, return the result of the
13885    relational operation.  If the result is not a compile time
13886    constant, then return NULL_TREE.  */
13887 
13888 static tree
13889 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13890 {
13891   int result, invert;
13892 
13893   /* From here on, the only cases we handle are when the result is
13894      known to be a constant.  */
13895 
13896   if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13897     {
13898       const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13899       const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13900 
13901       /* Handle the cases where either operand is a NaN.  */
13902       if (real_isnan (c0) || real_isnan (c1))
13903 	{
13904 	  switch (code)
13905 	    {
13906 	    case EQ_EXPR:
13907 	    case ORDERED_EXPR:
13908 	      result = 0;
13909 	      break;
13910 
13911 	    case NE_EXPR:
13912 	    case UNORDERED_EXPR:
13913 	    case UNLT_EXPR:
13914 	    case UNLE_EXPR:
13915 	    case UNGT_EXPR:
13916 	    case UNGE_EXPR:
13917 	    case UNEQ_EXPR:
13918               result = 1;
13919 	      break;
13920 
13921 	    case LT_EXPR:
13922 	    case LE_EXPR:
13923 	    case GT_EXPR:
13924 	    case GE_EXPR:
13925 	    case LTGT_EXPR:
13926 	      if (flag_trapping_math)
13927 		return NULL_TREE;
13928 	      result = 0;
13929 	      break;
13930 
13931 	    default:
13932 	      gcc_unreachable ();
13933 	    }
13934 
13935 	  return constant_boolean_node (result, type);
13936 	}
13937 
13938       return constant_boolean_node (real_compare (code, c0, c1), type);
13939     }
13940 
13941   if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13942     {
13943       const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13944       const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13945       return constant_boolean_node (fixed_compare (code, c0, c1), type);
13946     }
13947 
13948   /* Handle equality/inequality of complex constants.  */
13949   if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13950     {
13951       tree rcond = fold_relational_const (code, type,
13952 					  TREE_REALPART (op0),
13953 					  TREE_REALPART (op1));
13954       tree icond = fold_relational_const (code, type,
13955 					  TREE_IMAGPART (op0),
13956 					  TREE_IMAGPART (op1));
13957       if (code == EQ_EXPR)
13958 	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13959       else if (code == NE_EXPR)
13960 	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13961       else
13962 	return NULL_TREE;
13963     }
13964 
13965   if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13966     {
13967       if (!VECTOR_TYPE_P (type))
13968 	{
13969 	  /* Have vector comparison with scalar boolean result.  */
13970 	  gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
13971 		      && known_eq (VECTOR_CST_NELTS (op0),
13972 				   VECTOR_CST_NELTS (op1)));
13973 	  unsigned HOST_WIDE_INT nunits;
13974 	  if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
13975 	    return NULL_TREE;
13976 	  for (unsigned i = 0; i < nunits; i++)
13977 	    {
13978 	      tree elem0 = VECTOR_CST_ELT (op0, i);
13979 	      tree elem1 = VECTOR_CST_ELT (op1, i);
13980 	      tree tmp = fold_relational_const (code, type, elem0, elem1);
13981 	      if (tmp == NULL_TREE)
13982 		return NULL_TREE;
13983 	      if (integer_zerop (tmp))
13984 		return constant_boolean_node (false, type);
13985 	    }
13986 	  return constant_boolean_node (true, type);
13987 	}
13988       tree_vector_builder elts;
13989       if (!elts.new_binary_operation (type, op0, op1, false))
13990 	return NULL_TREE;
13991       unsigned int count = elts.encoded_nelts ();
13992       for (unsigned i = 0; i < count; i++)
13993 	{
13994 	  tree elem_type = TREE_TYPE (type);
13995 	  tree elem0 = VECTOR_CST_ELT (op0, i);
13996 	  tree elem1 = VECTOR_CST_ELT (op1, i);
13997 
13998 	  tree tem = fold_relational_const (code, elem_type,
13999 					    elem0, elem1);
14000 
14001 	  if (tem == NULL_TREE)
14002 	    return NULL_TREE;
14003 
14004 	  elts.quick_push (build_int_cst (elem_type,
14005 					  integer_zerop (tem) ? 0 : -1));
14006 	}
14007 
14008       return elts.build ();
14009     }
14010 
14011   /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14012 
14013      To compute GT, swap the arguments and do LT.
14014      To compute GE, do LT and invert the result.
14015      To compute LE, swap the arguments, do LT and invert the result.
14016      To compute NE, do EQ and invert the result.
14017 
14018      Therefore, the code below must handle only EQ and LT.  */
14019 
14020   if (code == LE_EXPR || code == GT_EXPR)
14021     {
14022       std::swap (op0, op1);
14023       code = swap_tree_comparison (code);
14024     }
14025 
14026   /* Note that it is safe to invert for real values here because we
14027      have already handled the one case that it matters.  */
14028 
14029   invert = 0;
14030   if (code == NE_EXPR || code == GE_EXPR)
14031     {
14032       invert = 1;
14033       code = invert_tree_comparison (code, false);
14034     }
14035 
14036   /* Compute a result for LT or EQ if args permit;
14037      Otherwise return T.  */
14038   if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14039     {
14040       if (code == EQ_EXPR)
14041 	result = tree_int_cst_equal (op0, op1);
14042       else
14043 	result = tree_int_cst_lt (op0, op1);
14044     }
14045   else
14046     return NULL_TREE;
14047 
14048   if (invert)
14049     result ^= 1;
14050   return constant_boolean_node (result, type);
14051 }
14052 
14053 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14054    indicated TYPE.  If no CLEANUP_POINT_EXPR is necessary, return EXPR
14055    itself.  */
14056 
14057 tree
14058 fold_build_cleanup_point_expr (tree type, tree expr)
14059 {
14060   /* If the expression does not have side effects then we don't have to wrap
14061      it with a cleanup point expression.  */
14062   if (!TREE_SIDE_EFFECTS (expr))
14063     return expr;
14064 
14065   /* If the expression is a return, check to see if the expression inside the
14066      return has no side effects or the right hand side of the modify expression
14067      inside the return. If either don't have side effects set we don't need to
14068      wrap the expression in a cleanup point expression.  Note we don't check the
14069      left hand side of the modify because it should always be a return decl.  */
14070   if (TREE_CODE (expr) == RETURN_EXPR)
14071     {
14072       tree op = TREE_OPERAND (expr, 0);
14073       if (!op || !TREE_SIDE_EFFECTS (op))
14074         return expr;
14075       op = TREE_OPERAND (op, 1);
14076       if (!TREE_SIDE_EFFECTS (op))
14077         return expr;
14078     }
14079 
14080   return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14081 }
14082 
14083 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14084    of an indirection through OP0, or NULL_TREE if no simplification is
14085    possible.  */
14086 
14087 tree
14088 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14089 {
14090   tree sub = op0;
14091   tree subtype;
14092   poly_uint64 const_op01;
14093 
14094   STRIP_NOPS (sub);
14095   subtype = TREE_TYPE (sub);
14096   if (!POINTER_TYPE_P (subtype)
14097       || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14098     return NULL_TREE;
14099 
14100   if (TREE_CODE (sub) == ADDR_EXPR)
14101     {
14102       tree op = TREE_OPERAND (sub, 0);
14103       tree optype = TREE_TYPE (op);
14104 
14105       /* *&CONST_DECL -> to the value of the const decl.  */
14106       if (TREE_CODE (op) == CONST_DECL)
14107 	return DECL_INITIAL (op);
14108       /* *&p => p;  make sure to handle *&"str"[cst] here.  */
14109       if (type == optype)
14110 	{
14111 	  tree fop = fold_read_from_constant_string (op);
14112 	  if (fop)
14113 	    return fop;
14114 	  else
14115 	    return op;
14116 	}
14117       /* *(foo *)&fooarray => fooarray[0] */
14118       else if (TREE_CODE (optype) == ARRAY_TYPE
14119 	       && type == TREE_TYPE (optype)
14120 	       && (!in_gimple_form
14121 		   || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14122 	{
14123 	  tree type_domain = TYPE_DOMAIN (optype);
14124 	  tree min_val = size_zero_node;
14125 	  if (type_domain && TYPE_MIN_VALUE (type_domain))
14126 	    min_val = TYPE_MIN_VALUE (type_domain);
14127 	  if (in_gimple_form
14128 	      && TREE_CODE (min_val) != INTEGER_CST)
14129 	    return NULL_TREE;
14130 	  return build4_loc (loc, ARRAY_REF, type, op, min_val,
14131 			     NULL_TREE, NULL_TREE);
14132 	}
14133       /* *(foo *)&complexfoo => __real__ complexfoo */
14134       else if (TREE_CODE (optype) == COMPLEX_TYPE
14135 	       && type == TREE_TYPE (optype))
14136 	return fold_build1_loc (loc, REALPART_EXPR, type, op);
14137       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14138       else if (VECTOR_TYPE_P (optype)
14139 	       && type == TREE_TYPE (optype))
14140 	{
14141 	  tree part_width = TYPE_SIZE (type);
14142 	  tree index = bitsize_int (0);
14143 	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
14144 				  index);
14145 	}
14146     }
14147 
14148   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14149       && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
14150     {
14151       tree op00 = TREE_OPERAND (sub, 0);
14152       tree op01 = TREE_OPERAND (sub, 1);
14153 
14154       STRIP_NOPS (op00);
14155       if (TREE_CODE (op00) == ADDR_EXPR)
14156 	{
14157 	  tree op00type;
14158 	  op00 = TREE_OPERAND (op00, 0);
14159 	  op00type = TREE_TYPE (op00);
14160 
14161 	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14162 	  if (VECTOR_TYPE_P (op00type)
14163 	      && type == TREE_TYPE (op00type)
14164 	      /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
14165 		 but we want to treat offsets with MSB set as negative.
14166 		 For the code below negative offsets are invalid and
14167 		 TYPE_SIZE of the element is something unsigned, so
14168 		 check whether op01 fits into poly_int64, which implies
14169 		 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
14170 		 then just use poly_uint64 because we want to treat the
14171 		 value as unsigned.  */
14172 	      && tree_fits_poly_int64_p (op01))
14173 	    {
14174 	      tree part_width = TYPE_SIZE (type);
14175 	      poly_uint64 max_offset
14176 		= (tree_to_uhwi (part_width) / BITS_PER_UNIT
14177 		   * TYPE_VECTOR_SUBPARTS (op00type));
14178 	      if (known_lt (const_op01, max_offset))
14179 		{
14180 		  tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
14181 		  return fold_build3_loc (loc,
14182 					  BIT_FIELD_REF, type, op00,
14183 					  part_width, index);
14184 		}
14185 	    }
14186 	  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14187 	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
14188 		   && type == TREE_TYPE (op00type))
14189 	    {
14190 	      if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
14191 			    const_op01))
14192 		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14193 	    }
14194 	  /* ((foo *)&fooarray)[1] => fooarray[1] */
14195 	  else if (TREE_CODE (op00type) == ARRAY_TYPE
14196 		   && type == TREE_TYPE (op00type))
14197 	    {
14198 	      tree type_domain = TYPE_DOMAIN (op00type);
14199 	      tree min_val = size_zero_node;
14200 	      if (type_domain && TYPE_MIN_VALUE (type_domain))
14201 		min_val = TYPE_MIN_VALUE (type_domain);
14202 	      offset_int off = wi::to_offset (op01);
14203 	      offset_int el_sz = wi::to_offset (TYPE_SIZE_UNIT (type));
14204 	      offset_int remainder;
14205 	      off = wi::divmod_trunc (off, el_sz, SIGNED, &remainder);
14206 	      if (remainder == 0 && TREE_CODE (min_val) == INTEGER_CST)
14207 		{
14208 		  off = off + wi::to_offset (min_val);
14209 		  op01 = wide_int_to_tree (sizetype, off);
14210 		  return build4_loc (loc, ARRAY_REF, type, op00, op01,
14211 				     NULL_TREE, NULL_TREE);
14212 		}
14213 	    }
14214 	}
14215     }
14216 
14217   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14218   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14219       && type == TREE_TYPE (TREE_TYPE (subtype))
14220       && (!in_gimple_form
14221 	  || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14222     {
14223       tree type_domain;
14224       tree min_val = size_zero_node;
14225       sub = build_fold_indirect_ref_loc (loc, sub);
14226       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14227       if (type_domain && TYPE_MIN_VALUE (type_domain))
14228 	min_val = TYPE_MIN_VALUE (type_domain);
14229       if (in_gimple_form
14230 	  && TREE_CODE (min_val) != INTEGER_CST)
14231 	return NULL_TREE;
14232       return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14233 			 NULL_TREE);
14234     }
14235 
14236   return NULL_TREE;
14237 }
14238 
14239 /* Builds an expression for an indirection through T, simplifying some
14240    cases.  */
14241 
14242 tree
14243 build_fold_indirect_ref_loc (location_t loc, tree t)
14244 {
14245   tree type = TREE_TYPE (TREE_TYPE (t));
14246   tree sub = fold_indirect_ref_1 (loc, type, t);
14247 
14248   if (sub)
14249     return sub;
14250 
14251   return build1_loc (loc, INDIRECT_REF, type, t);
14252 }
14253 
14254 /* Given an INDIRECT_REF T, return either T or a simplified version.  */
14255 
14256 tree
14257 fold_indirect_ref_loc (location_t loc, tree t)
14258 {
14259   tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14260 
14261   if (sub)
14262     return sub;
14263   else
14264     return t;
14265 }
14266 
14267 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14268    whose result is ignored.  The type of the returned tree need not be
14269    the same as the original expression.  */
14270 
14271 tree
14272 fold_ignored_result (tree t)
14273 {
14274   if (!TREE_SIDE_EFFECTS (t))
14275     return integer_zero_node;
14276 
14277   for (;;)
14278     switch (TREE_CODE_CLASS (TREE_CODE (t)))
14279       {
14280       case tcc_unary:
14281 	t = TREE_OPERAND (t, 0);
14282 	break;
14283 
14284       case tcc_binary:
14285       case tcc_comparison:
14286 	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14287 	  t = TREE_OPERAND (t, 0);
14288 	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14289 	  t = TREE_OPERAND (t, 1);
14290 	else
14291 	  return t;
14292 	break;
14293 
14294       case tcc_expression:
14295 	switch (TREE_CODE (t))
14296 	  {
14297 	  case COMPOUND_EXPR:
14298 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14299 	      return t;
14300 	    t = TREE_OPERAND (t, 0);
14301 	    break;
14302 
14303 	  case COND_EXPR:
14304 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14305 		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14306 	      return t;
14307 	    t = TREE_OPERAND (t, 0);
14308 	    break;
14309 
14310 	  default:
14311 	    return t;
14312 	  }
14313 	break;
14314 
14315       default:
14316 	return t;
14317       }
14318 }
14319 
14320 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14321 
14322 tree
14323 round_up_loc (location_t loc, tree value, unsigned int divisor)
14324 {
14325   tree div = NULL_TREE;
14326 
14327   if (divisor == 1)
14328     return value;
14329 
14330   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
14331      have to do anything.  Only do this when we are not given a const,
14332      because in that case, this check is more expensive than just
14333      doing it.  */
14334   if (TREE_CODE (value) != INTEGER_CST)
14335     {
14336       div = build_int_cst (TREE_TYPE (value), divisor);
14337 
14338       if (multiple_of_p (TREE_TYPE (value), value, div))
14339 	return value;
14340     }
14341 
14342   /* If divisor is a power of two, simplify this to bit manipulation.  */
14343   if (pow2_or_zerop (divisor))
14344     {
14345       if (TREE_CODE (value) == INTEGER_CST)
14346 	{
14347 	  wide_int val = wi::to_wide (value);
14348 	  bool overflow_p;
14349 
14350 	  if ((val & (divisor - 1)) == 0)
14351 	    return value;
14352 
14353 	  overflow_p = TREE_OVERFLOW (value);
14354 	  val += divisor - 1;
14355 	  val &= (int) -divisor;
14356 	  if (val == 0)
14357 	    overflow_p = true;
14358 
14359 	  return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14360 	}
14361       else
14362 	{
14363 	  tree t;
14364 
14365 	  t = build_int_cst (TREE_TYPE (value), divisor - 1);
14366 	  value = size_binop_loc (loc, PLUS_EXPR, value, t);
14367 	  t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14368 	  value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14369 	}
14370     }
14371   else
14372     {
14373       if (!div)
14374 	div = build_int_cst (TREE_TYPE (value), divisor);
14375       value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14376       value = size_binop_loc (loc, MULT_EXPR, value, div);
14377     }
14378 
14379   return value;
14380 }
14381 
14382 /* Likewise, but round down.  */
14383 
14384 tree
14385 round_down_loc (location_t loc, tree value, int divisor)
14386 {
14387   tree div = NULL_TREE;
14388 
14389   gcc_assert (divisor > 0);
14390   if (divisor == 1)
14391     return value;
14392 
14393   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
14394      have to do anything.  Only do this when we are not given a const,
14395      because in that case, this check is more expensive than just
14396      doing it.  */
14397   if (TREE_CODE (value) != INTEGER_CST)
14398     {
14399       div = build_int_cst (TREE_TYPE (value), divisor);
14400 
14401       if (multiple_of_p (TREE_TYPE (value), value, div))
14402 	return value;
14403     }
14404 
14405   /* If divisor is a power of two, simplify this to bit manipulation.  */
14406   if (pow2_or_zerop (divisor))
14407     {
14408       tree t;
14409 
14410       t = build_int_cst (TREE_TYPE (value), -divisor);
14411       value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14412     }
14413   else
14414     {
14415       if (!div)
14416 	div = build_int_cst (TREE_TYPE (value), divisor);
14417       value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14418       value = size_binop_loc (loc, MULT_EXPR, value, div);
14419     }
14420 
14421   return value;
14422 }
14423 
14424 /* Returns the pointer to the base of the object addressed by EXP and
14425    extracts the information about the offset of the access, storing it
14426    to PBITPOS and POFFSET.  */
14427 
14428 static tree
14429 split_address_to_core_and_offset (tree exp,
14430 				  poly_int64_pod *pbitpos, tree *poffset)
14431 {
14432   tree core;
14433   machine_mode mode;
14434   int unsignedp, reversep, volatilep;
14435   poly_int64 bitsize;
14436   location_t loc = EXPR_LOCATION (exp);
14437 
14438   if (TREE_CODE (exp) == ADDR_EXPR)
14439     {
14440       core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14441 				  poffset, &mode, &unsignedp, &reversep,
14442 				  &volatilep);
14443       core = build_fold_addr_expr_loc (loc, core);
14444     }
14445   else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
14446     {
14447       core = TREE_OPERAND (exp, 0);
14448       STRIP_NOPS (core);
14449       *pbitpos = 0;
14450       *poffset = TREE_OPERAND (exp, 1);
14451       if (poly_int_tree_p (*poffset))
14452 	{
14453 	  poly_offset_int tem
14454 	    = wi::sext (wi::to_poly_offset (*poffset),
14455 			TYPE_PRECISION (TREE_TYPE (*poffset)));
14456 	  tem <<= LOG2_BITS_PER_UNIT;
14457 	  if (tem.to_shwi (pbitpos))
14458 	    *poffset = NULL_TREE;
14459 	}
14460     }
14461   else
14462     {
14463       core = exp;
14464       *pbitpos = 0;
14465       *poffset = NULL_TREE;
14466     }
14467 
14468   return core;
14469 }
14470 
14471 /* Returns true if addresses of E1 and E2 differ by a constant, false
14472    otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
14473 
14474 bool
14475 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
14476 {
14477   tree core1, core2;
14478   poly_int64 bitpos1, bitpos2;
14479   tree toffset1, toffset2, tdiff, type;
14480 
14481   core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14482   core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14483 
14484   poly_int64 bytepos1, bytepos2;
14485   if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
14486       || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
14487       || !operand_equal_p (core1, core2, 0))
14488     return false;
14489 
14490   if (toffset1 && toffset2)
14491     {
14492       type = TREE_TYPE (toffset1);
14493       if (type != TREE_TYPE (toffset2))
14494 	toffset2 = fold_convert (type, toffset2);
14495 
14496       tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14497       if (!cst_and_fits_in_hwi (tdiff))
14498 	return false;
14499 
14500       *diff = int_cst_value (tdiff);
14501     }
14502   else if (toffset1 || toffset2)
14503     {
14504       /* If only one of the offsets is non-constant, the difference cannot
14505 	 be a constant.  */
14506       return false;
14507     }
14508   else
14509     *diff = 0;
14510 
14511   *diff += bytepos1 - bytepos2;
14512   return true;
14513 }
14514 
14515 /* Return OFF converted to a pointer offset type suitable as offset for
14516    POINTER_PLUS_EXPR.  Use location LOC for this conversion.  */
14517 tree
14518 convert_to_ptrofftype_loc (location_t loc, tree off)
14519 {
14520   return fold_convert_loc (loc, sizetype, off);
14521 }
14522 
14523 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
14524 tree
14525 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14526 {
14527   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14528 			  ptr, convert_to_ptrofftype_loc (loc, off));
14529 }
14530 
14531 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
14532 tree
14533 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14534 {
14535   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14536 			  ptr, size_int (off));
14537 }
14538 
14539 /* Return a char pointer for a C string if it is a string constant
14540    or sum of string constant and integer constant.  We only support
14541    string constants properly terminated with '\0' character.
14542    If STRLEN is a valid pointer, length (including terminating character)
14543    of returned string is stored to the argument.  */
14544 
14545 const char *
14546 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen)
14547 {
14548   tree offset_node;
14549 
14550   if (strlen)
14551     *strlen = 0;
14552 
14553   src = string_constant (src, &offset_node);
14554   if (src == 0)
14555     return NULL;
14556 
14557   unsigned HOST_WIDE_INT offset = 0;
14558   if (offset_node != NULL_TREE)
14559     {
14560       if (!tree_fits_uhwi_p (offset_node))
14561 	return NULL;
14562       else
14563 	offset = tree_to_uhwi (offset_node);
14564     }
14565 
14566   unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
14567   const char *string = TREE_STRING_POINTER (src);
14568 
14569   /* Support only properly null-terminated strings.  */
14570   if (string_length == 0
14571       || string[string_length - 1] != '\0'
14572       || offset >= string_length)
14573     return NULL;
14574 
14575   if (strlen)
14576     *strlen = string_length - offset;
14577   return string + offset;
14578 }
14579 
14580 #if CHECKING_P
14581 
14582 namespace selftest {
14583 
14584 /* Helper functions for writing tests of folding trees.  */
14585 
14586 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT.  */
14587 
14588 static void
14589 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
14590 			     tree constant)
14591 {
14592   ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
14593 }
14594 
14595 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
14596    wrapping WRAPPED_EXPR.  */
14597 
14598 static void
14599 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
14600 				 tree wrapped_expr)
14601 {
14602   tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
14603   ASSERT_NE (wrapped_expr, result);
14604   ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
14605   ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
14606 }
14607 
14608 /* Verify that various arithmetic binary operations are folded
14609    correctly.  */
14610 
14611 static void
14612 test_arithmetic_folding ()
14613 {
14614   tree type = integer_type_node;
14615   tree x = create_tmp_var_raw (type, "x");
14616   tree zero = build_zero_cst (type);
14617   tree one = build_int_cst (type, 1);
14618 
14619   /* Addition.  */
14620   /* 1 <-- (0 + 1) */
14621   assert_binop_folds_to_const (zero, PLUS_EXPR, one,
14622 			       one);
14623   assert_binop_folds_to_const (one, PLUS_EXPR, zero,
14624 			       one);
14625 
14626   /* (nonlvalue)x <-- (x + 0) */
14627   assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
14628 				   x);
14629 
14630   /* Subtraction.  */
14631   /* 0 <-- (x - x) */
14632   assert_binop_folds_to_const (x, MINUS_EXPR, x,
14633 			       zero);
14634   assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
14635 				   x);
14636 
14637   /* Multiplication.  */
14638   /* 0 <-- (x * 0) */
14639   assert_binop_folds_to_const (x, MULT_EXPR, zero,
14640 			       zero);
14641 
14642   /* (nonlvalue)x <-- (x * 1) */
14643   assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
14644 				   x);
14645 }
14646 
14647 /* Verify that various binary operations on vectors are folded
14648    correctly.  */
14649 
14650 static void
14651 test_vector_folding ()
14652 {
14653   tree inner_type = integer_type_node;
14654   tree type = build_vector_type (inner_type, 4);
14655   tree zero = build_zero_cst (type);
14656   tree one = build_one_cst (type);
14657 
14658   /* Verify equality tests that return a scalar boolean result.  */
14659   tree res_type = boolean_type_node;
14660   ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
14661   ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
14662   ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
14663   ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
14664 }
14665 
14666 /* Verify folding of VEC_DUPLICATE_EXPRs.  */
14667 
14668 static void
14669 test_vec_duplicate_folding ()
14670 {
14671   scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
14672   machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
14673   /* This will be 1 if VEC_MODE isn't a vector mode.  */
14674   poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
14675 
14676   tree type = build_vector_type (ssizetype, nunits);
14677   tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
14678   tree dup5_cst = build_vector_from_val (type, ssize_int (5));
14679   ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
14680 }
14681 
14682 /* Run all of the selftests within this file.  */
14683 
14684 void
14685 fold_const_c_tests ()
14686 {
14687   test_arithmetic_folding ();
14688   test_vector_folding ();
14689   test_vec_duplicate_folding ();
14690 }
14691 
14692 } // namespace selftest
14693 
14694 #endif /* CHECKING_P */
14695