1 /* Fold a constant sub-tree into a single node for C-compiler
2    Copyright (C) 1987-2020 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /*@@ This file should be rewritten to use an arbitrary precision
21   @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22   @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23   @@ The routines that translate from the ap rep should
24   @@ warn if precision et. al. is lost.
25   @@ This would also make life easier when this technology is used
26   @@ for cross-compilers.  */
27 
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29 
30    fold takes a tree as argument and returns a simplified tree.
31 
32    size_binop takes a tree code for an arithmetic operation
33    and two operands that are trees, and produces a tree for the
34    result, assuming the type comes from `sizetype'.
35 
36    size_int takes an integer value, and creates a tree constant
37    with type from `sizetype'.
38 
39    Note: Since the folders get called on non-gimple code as well as
40    gimple code, we need to handle GIMPLE tuples as well as their
41    corresponding tree equivalents.  */
42 
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
85 
86 /* Nonzero if we are folding constants inside an initializer; zero
87    otherwise.  */
88 int folding_initializer = 0;
89 
90 /* The following constants represent a bit based encoding of GCC's
91    comparison operators.  This encoding simplifies transformations
92    on relational comparison operators, such as AND and OR.  */
93 enum comparison_code {
94   COMPCODE_FALSE = 0,
95   COMPCODE_LT = 1,
96   COMPCODE_EQ = 2,
97   COMPCODE_LE = 3,
98   COMPCODE_GT = 4,
99   COMPCODE_LTGT = 5,
100   COMPCODE_GE = 6,
101   COMPCODE_ORD = 7,
102   COMPCODE_UNORD = 8,
103   COMPCODE_UNLT = 9,
104   COMPCODE_UNEQ = 10,
105   COMPCODE_UNLE = 11,
106   COMPCODE_UNGT = 12,
107   COMPCODE_NE = 13,
108   COMPCODE_UNGE = 14,
109   COMPCODE_TRUE = 15
110 };
111 
112 static bool negate_expr_p (tree);
113 static tree negate_expr (tree);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static bool twoval_comparison_p (tree, tree *, tree *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree optimize_bit_field_compare (location_t, enum tree_code,
120 					tree, tree, tree);
121 static bool simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
130 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
131 static tree fold_binary_op_with_conditional_arg (location_t,
132 						 enum tree_code, tree,
133 						 tree, tree,
134 						 tree, tree, int);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139 static tree fold_view_convert_expr (tree, tree);
140 static tree fold_negate_expr (location_t, tree);
141 
142 
143 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
144    Otherwise, return LOC.  */
145 
146 static location_t
expr_location_or(tree t,location_t loc)147 expr_location_or (tree t, location_t loc)
148 {
149   location_t tloc = EXPR_LOCATION (t);
150   return tloc == UNKNOWN_LOCATION ? loc : tloc;
151 }
152 
153 /* Similar to protected_set_expr_location, but never modify x in place,
154    if location can and needs to be set, unshare it.  */
155 
156 static inline tree
protected_set_expr_location_unshare(tree x,location_t loc)157 protected_set_expr_location_unshare (tree x, location_t loc)
158 {
159   if (CAN_HAVE_LOCATION_P (x)
160       && EXPR_LOCATION (x) != loc
161       && !(TREE_CODE (x) == SAVE_EXPR
162 	   || TREE_CODE (x) == TARGET_EXPR
163 	   || TREE_CODE (x) == BIND_EXPR))
164     {
165       x = copy_node (x);
166       SET_EXPR_LOCATION (x, loc);
167     }
168   return x;
169 }
170 
171 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
172    division and returns the quotient.  Otherwise returns
173    NULL_TREE.  */
174 
175 tree
div_if_zero_remainder(const_tree arg1,const_tree arg2)176 div_if_zero_remainder (const_tree arg1, const_tree arg2)
177 {
178   widest_int quo;
179 
180   if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
181 			 SIGNED, &quo))
182     return wide_int_to_tree (TREE_TYPE (arg1), quo);
183 
184   return NULL_TREE;
185 }
186 
187 /* This is nonzero if we should defer warnings about undefined
188    overflow.  This facility exists because these warnings are a
189    special case.  The code to estimate loop iterations does not want
190    to issue any warnings, since it works with expressions which do not
191    occur in user code.  Various bits of cleanup code call fold(), but
192    only use the result if it has certain characteristics (e.g., is a
193    constant); that code only wants to issue a warning if the result is
194    used.  */
195 
196 static int fold_deferring_overflow_warnings;
197 
198 /* If a warning about undefined overflow is deferred, this is the
199    warning.  Note that this may cause us to turn two warnings into
200    one, but that is fine since it is sufficient to only give one
201    warning per expression.  */
202 
203 static const char* fold_deferred_overflow_warning;
204 
205 /* If a warning about undefined overflow is deferred, this is the
206    level at which the warning should be emitted.  */
207 
208 static enum warn_strict_overflow_code fold_deferred_overflow_code;
209 
210 /* Start deferring overflow warnings.  We could use a stack here to
211    permit nested calls, but at present it is not necessary.  */
212 
213 void
fold_defer_overflow_warnings(void)214 fold_defer_overflow_warnings (void)
215 {
216   ++fold_deferring_overflow_warnings;
217 }
218 
219 /* Stop deferring overflow warnings.  If there is a pending warning,
220    and ISSUE is true, then issue the warning if appropriate.  STMT is
221    the statement with which the warning should be associated (used for
222    location information); STMT may be NULL.  CODE is the level of the
223    warning--a warn_strict_overflow_code value.  This function will use
224    the smaller of CODE and the deferred code when deciding whether to
225    issue the warning.  CODE may be zero to mean to always use the
226    deferred code.  */
227 
228 void
fold_undefer_overflow_warnings(bool issue,const gimple * stmt,int code)229 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
230 {
231   const char *warnmsg;
232   location_t locus;
233 
234   gcc_assert (fold_deferring_overflow_warnings > 0);
235   --fold_deferring_overflow_warnings;
236   if (fold_deferring_overflow_warnings > 0)
237     {
238       if (fold_deferred_overflow_warning != NULL
239 	  && code != 0
240 	  && code < (int) fold_deferred_overflow_code)
241 	fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
242       return;
243     }
244 
245   warnmsg = fold_deferred_overflow_warning;
246   fold_deferred_overflow_warning = NULL;
247 
248   if (!issue || warnmsg == NULL)
249     return;
250 
251   if (gimple_no_warning_p (stmt))
252     return;
253 
254   /* Use the smallest code level when deciding to issue the
255      warning.  */
256   if (code == 0 || code > (int) fold_deferred_overflow_code)
257     code = fold_deferred_overflow_code;
258 
259   if (!issue_strict_overflow_warning (code))
260     return;
261 
262   if (stmt == NULL)
263     locus = input_location;
264   else
265     locus = gimple_location (stmt);
266   warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
267 }
268 
269 /* Stop deferring overflow warnings, ignoring any deferred
270    warnings.  */
271 
272 void
fold_undefer_and_ignore_overflow_warnings(void)273 fold_undefer_and_ignore_overflow_warnings (void)
274 {
275   fold_undefer_overflow_warnings (false, NULL, 0);
276 }
277 
278 /* Whether we are deferring overflow warnings.  */
279 
280 bool
fold_deferring_overflow_warnings_p(void)281 fold_deferring_overflow_warnings_p (void)
282 {
283   return fold_deferring_overflow_warnings > 0;
284 }
285 
286 /* This is called when we fold something based on the fact that signed
287    overflow is undefined.  */
288 
289 void
fold_overflow_warning(const char * gmsgid,enum warn_strict_overflow_code wc)290 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
291 {
292   if (fold_deferring_overflow_warnings > 0)
293     {
294       if (fold_deferred_overflow_warning == NULL
295 	  || wc < fold_deferred_overflow_code)
296 	{
297 	  fold_deferred_overflow_warning = gmsgid;
298 	  fold_deferred_overflow_code = wc;
299 	}
300     }
301   else if (issue_strict_overflow_warning (wc))
302     warning (OPT_Wstrict_overflow, gmsgid);
303 }
304 
305 /* Return true if the built-in mathematical function specified by CODE
306    is odd, i.e. -f(x) == f(-x).  */
307 
308 bool
negate_mathfn_p(combined_fn fn)309 negate_mathfn_p (combined_fn fn)
310 {
311   switch (fn)
312     {
313     CASE_CFN_ASIN:
314     CASE_CFN_ASINH:
315     CASE_CFN_ATAN:
316     CASE_CFN_ATANH:
317     CASE_CFN_CASIN:
318     CASE_CFN_CASINH:
319     CASE_CFN_CATAN:
320     CASE_CFN_CATANH:
321     CASE_CFN_CBRT:
322     CASE_CFN_CPROJ:
323     CASE_CFN_CSIN:
324     CASE_CFN_CSINH:
325     CASE_CFN_CTAN:
326     CASE_CFN_CTANH:
327     CASE_CFN_ERF:
328     CASE_CFN_LLROUND:
329     CASE_CFN_LROUND:
330     CASE_CFN_ROUND:
331     CASE_CFN_ROUNDEVEN:
332     CASE_CFN_ROUNDEVEN_FN:
333     CASE_CFN_SIN:
334     CASE_CFN_SINH:
335     CASE_CFN_TAN:
336     CASE_CFN_TANH:
337     CASE_CFN_TRUNC:
338       return true;
339 
340     CASE_CFN_LLRINT:
341     CASE_CFN_LRINT:
342     CASE_CFN_NEARBYINT:
343     CASE_CFN_RINT:
344       return !flag_rounding_math;
345 
346     default:
347       break;
348     }
349   return false;
350 }
351 
352 /* Check whether we may negate an integer constant T without causing
353    overflow.  */
354 
355 bool
may_negate_without_overflow_p(const_tree t)356 may_negate_without_overflow_p (const_tree t)
357 {
358   tree type;
359 
360   gcc_assert (TREE_CODE (t) == INTEGER_CST);
361 
362   type = TREE_TYPE (t);
363   if (TYPE_UNSIGNED (type))
364     return false;
365 
366   return !wi::only_sign_bit_p (wi::to_wide (t));
367 }
368 
369 /* Determine whether an expression T can be cheaply negated using
370    the function negate_expr without introducing undefined overflow.  */
371 
372 static bool
negate_expr_p(tree t)373 negate_expr_p (tree t)
374 {
375   tree type;
376 
377   if (t == 0)
378     return false;
379 
380   type = TREE_TYPE (t);
381 
382   STRIP_SIGN_NOPS (t);
383   switch (TREE_CODE (t))
384     {
385     case INTEGER_CST:
386       if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
387 	return true;
388 
389       /* Check that -CST will not overflow type.  */
390       return may_negate_without_overflow_p (t);
391     case BIT_NOT_EXPR:
392       return (INTEGRAL_TYPE_P (type)
393 	      && TYPE_OVERFLOW_WRAPS (type));
394 
395     case FIXED_CST:
396       return true;
397 
398     case NEGATE_EXPR:
399       return !TYPE_OVERFLOW_SANITIZED (type);
400 
401     case REAL_CST:
402       /* We want to canonicalize to positive real constants.  Pretend
403          that only negative ones can be easily negated.  */
404       return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
405 
406     case COMPLEX_CST:
407       return negate_expr_p (TREE_REALPART (t))
408 	     && negate_expr_p (TREE_IMAGPART (t));
409 
410     case VECTOR_CST:
411       {
412 	if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
413 	  return true;
414 
415 	/* Steps don't prevent negation.  */
416 	unsigned int count = vector_cst_encoded_nelts (t);
417 	for (unsigned int i = 0; i < count; ++i)
418 	  if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
419 	    return false;
420 
421 	return true;
422       }
423 
424     case COMPLEX_EXPR:
425       return negate_expr_p (TREE_OPERAND (t, 0))
426 	     && negate_expr_p (TREE_OPERAND (t, 1));
427 
428     case CONJ_EXPR:
429       return negate_expr_p (TREE_OPERAND (t, 0));
430 
431     case PLUS_EXPR:
432       if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
433 	  || HONOR_SIGNED_ZEROS (element_mode (type))
434 	  || (ANY_INTEGRAL_TYPE_P (type)
435 	      && ! TYPE_OVERFLOW_WRAPS (type)))
436 	return false;
437       /* -(A + B) -> (-B) - A.  */
438       if (negate_expr_p (TREE_OPERAND (t, 1)))
439 	return true;
440       /* -(A + B) -> (-A) - B.  */
441       return negate_expr_p (TREE_OPERAND (t, 0));
442 
443     case MINUS_EXPR:
444       /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
445       return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
446 	     && !HONOR_SIGNED_ZEROS (element_mode (type))
447 	     && (! ANY_INTEGRAL_TYPE_P (type)
448 		 || TYPE_OVERFLOW_WRAPS (type));
449 
450     case MULT_EXPR:
451       if (TYPE_UNSIGNED (type))
452 	break;
453       /* INT_MIN/n * n doesn't overflow while negating one operand it does
454          if n is a (negative) power of two.  */
455       if (INTEGRAL_TYPE_P (TREE_TYPE (t))
456 	  && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
457 	  && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
458 		 && (wi::popcount
459 		     (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
460 		|| (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
461 		    && (wi::popcount
462 			(wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
463 	break;
464 
465       /* Fall through.  */
466 
467     case RDIV_EXPR:
468       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
469 	return negate_expr_p (TREE_OPERAND (t, 1))
470 	       || negate_expr_p (TREE_OPERAND (t, 0));
471       break;
472 
473     case TRUNC_DIV_EXPR:
474     case ROUND_DIV_EXPR:
475     case EXACT_DIV_EXPR:
476       if (TYPE_UNSIGNED (type))
477 	break;
478       /* In general we can't negate A in A / B, because if A is INT_MIN and
479          B is not 1 we change the sign of the result.  */
480       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
481 	  && negate_expr_p (TREE_OPERAND (t, 0)))
482 	return true;
483       /* In general we can't negate B in A / B, because if A is INT_MIN and
484 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
485 	 and actually traps on some architectures.  */
486       if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
487 	  || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
488 	  || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
489 	      && ! integer_onep (TREE_OPERAND (t, 1))))
490 	return negate_expr_p (TREE_OPERAND (t, 1));
491       break;
492 
493     case NOP_EXPR:
494       /* Negate -((double)float) as (double)(-float).  */
495       if (TREE_CODE (type) == REAL_TYPE)
496 	{
497 	  tree tem = strip_float_extensions (t);
498 	  if (tem != t)
499 	    return negate_expr_p (tem);
500 	}
501       break;
502 
503     case CALL_EXPR:
504       /* Negate -f(x) as f(-x).  */
505       if (negate_mathfn_p (get_call_combined_fn (t)))
506 	return negate_expr_p (CALL_EXPR_ARG (t, 0));
507       break;
508 
509     case RSHIFT_EXPR:
510       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
511       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
512 	{
513 	  tree op1 = TREE_OPERAND (t, 1);
514 	  if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
515 	    return true;
516 	}
517       break;
518 
519     default:
520       break;
521     }
522   return false;
523 }
524 
525 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
526    simplification is possible.
527    If negate_expr_p would return true for T, NULL_TREE will never be
528    returned.  */
529 
530 static tree
fold_negate_expr_1(location_t loc,tree t)531 fold_negate_expr_1 (location_t loc, tree t)
532 {
533   tree type = TREE_TYPE (t);
534   tree tem;
535 
536   switch (TREE_CODE (t))
537     {
538     /* Convert - (~A) to A + 1.  */
539     case BIT_NOT_EXPR:
540       if (INTEGRAL_TYPE_P (type))
541         return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
542 				build_one_cst (type));
543       break;
544 
545     case INTEGER_CST:
546       tem = fold_negate_const (t, type);
547       if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
548 	  || (ANY_INTEGRAL_TYPE_P (type)
549 	      && !TYPE_OVERFLOW_TRAPS (type)
550 	      && TYPE_OVERFLOW_WRAPS (type))
551 	  || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
552 	return tem;
553       break;
554 
555     case POLY_INT_CST:
556     case REAL_CST:
557     case FIXED_CST:
558       tem = fold_negate_const (t, type);
559       return tem;
560 
561     case COMPLEX_CST:
562       {
563 	tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
564 	tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
565 	if (rpart && ipart)
566 	  return build_complex (type, rpart, ipart);
567       }
568       break;
569 
570     case VECTOR_CST:
571       {
572 	tree_vector_builder elts;
573 	elts.new_unary_operation (type, t, true);
574 	unsigned int count = elts.encoded_nelts ();
575 	for (unsigned int i = 0; i < count; ++i)
576 	  {
577 	    tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
578 	    if (elt == NULL_TREE)
579 	      return NULL_TREE;
580 	    elts.quick_push (elt);
581 	  }
582 
583 	return elts.build ();
584       }
585 
586     case COMPLEX_EXPR:
587       if (negate_expr_p (t))
588 	return fold_build2_loc (loc, COMPLEX_EXPR, type,
589 				fold_negate_expr (loc, TREE_OPERAND (t, 0)),
590 				fold_negate_expr (loc, TREE_OPERAND (t, 1)));
591       break;
592 
593     case CONJ_EXPR:
594       if (negate_expr_p (t))
595 	return fold_build1_loc (loc, CONJ_EXPR, type,
596 				fold_negate_expr (loc, TREE_OPERAND (t, 0)));
597       break;
598 
599     case NEGATE_EXPR:
600       if (!TYPE_OVERFLOW_SANITIZED (type))
601 	return TREE_OPERAND (t, 0);
602       break;
603 
604     case PLUS_EXPR:
605       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
606 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
607 	{
608 	  /* -(A + B) -> (-B) - A.  */
609 	  if (negate_expr_p (TREE_OPERAND (t, 1)))
610 	    {
611 	      tem = negate_expr (TREE_OPERAND (t, 1));
612 	      return fold_build2_loc (loc, MINUS_EXPR, type,
613 				      tem, TREE_OPERAND (t, 0));
614 	    }
615 
616 	  /* -(A + B) -> (-A) - B.  */
617 	  if (negate_expr_p (TREE_OPERAND (t, 0)))
618 	    {
619 	      tem = negate_expr (TREE_OPERAND (t, 0));
620 	      return fold_build2_loc (loc, MINUS_EXPR, type,
621 				      tem, TREE_OPERAND (t, 1));
622 	    }
623 	}
624       break;
625 
626     case MINUS_EXPR:
627       /* - (A - B) -> B - A  */
628       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
629 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
630 	return fold_build2_loc (loc, MINUS_EXPR, type,
631 				TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
632       break;
633 
634     case MULT_EXPR:
635       if (TYPE_UNSIGNED (type))
636         break;
637 
638       /* Fall through.  */
639 
640     case RDIV_EXPR:
641       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
642 	{
643 	  tem = TREE_OPERAND (t, 1);
644 	  if (negate_expr_p (tem))
645 	    return fold_build2_loc (loc, TREE_CODE (t), type,
646 				    TREE_OPERAND (t, 0), negate_expr (tem));
647 	  tem = TREE_OPERAND (t, 0);
648 	  if (negate_expr_p (tem))
649 	    return fold_build2_loc (loc, TREE_CODE (t), type,
650 				    negate_expr (tem), TREE_OPERAND (t, 1));
651 	}
652       break;
653 
654     case TRUNC_DIV_EXPR:
655     case ROUND_DIV_EXPR:
656     case EXACT_DIV_EXPR:
657       if (TYPE_UNSIGNED (type))
658 	break;
659       /* In general we can't negate A in A / B, because if A is INT_MIN and
660 	 B is not 1 we change the sign of the result.  */
661       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
662 	  && negate_expr_p (TREE_OPERAND (t, 0)))
663 	return fold_build2_loc (loc, TREE_CODE (t), type,
664 				negate_expr (TREE_OPERAND (t, 0)),
665 				TREE_OPERAND (t, 1));
666       /* In general we can't negate B in A / B, because if A is INT_MIN and
667 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
668 	 and actually traps on some architectures.  */
669       if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
670 	   || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
671 	   || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
672 	       && ! integer_onep (TREE_OPERAND (t, 1))))
673 	  && negate_expr_p (TREE_OPERAND (t, 1)))
674 	return fold_build2_loc (loc, TREE_CODE (t), type,
675 				TREE_OPERAND (t, 0),
676 				negate_expr (TREE_OPERAND (t, 1)));
677       break;
678 
679     case NOP_EXPR:
680       /* Convert -((double)float) into (double)(-float).  */
681       if (TREE_CODE (type) == REAL_TYPE)
682 	{
683 	  tem = strip_float_extensions (t);
684 	  if (tem != t && negate_expr_p (tem))
685 	    return fold_convert_loc (loc, type, negate_expr (tem));
686 	}
687       break;
688 
689     case CALL_EXPR:
690       /* Negate -f(x) as f(-x).  */
691       if (negate_mathfn_p (get_call_combined_fn (t))
692 	  && negate_expr_p (CALL_EXPR_ARG (t, 0)))
693 	{
694 	  tree fndecl, arg;
695 
696 	  fndecl = get_callee_fndecl (t);
697 	  arg = negate_expr (CALL_EXPR_ARG (t, 0));
698 	  return build_call_expr_loc (loc, fndecl, 1, arg);
699 	}
700       break;
701 
702     case RSHIFT_EXPR:
703       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
704       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
705 	{
706 	  tree op1 = TREE_OPERAND (t, 1);
707 	  if (wi::to_wide (op1) == TYPE_PRECISION (type) - 1)
708 	    {
709 	      tree ntype = TYPE_UNSIGNED (type)
710 			   ? signed_type_for (type)
711 			   : unsigned_type_for (type);
712 	      tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
713 	      temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
714 	      return fold_convert_loc (loc, type, temp);
715 	    }
716 	}
717       break;
718 
719     default:
720       break;
721     }
722 
723   return NULL_TREE;
724 }
725 
726 /* A wrapper for fold_negate_expr_1.  */
727 
728 static tree
fold_negate_expr(location_t loc,tree t)729 fold_negate_expr (location_t loc, tree t)
730 {
731   tree type = TREE_TYPE (t);
732   STRIP_SIGN_NOPS (t);
733   tree tem = fold_negate_expr_1 (loc, t);
734   if (tem == NULL_TREE)
735     return NULL_TREE;
736   return fold_convert_loc (loc, type, tem);
737 }
738 
739 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
740    negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
741    return NULL_TREE. */
742 
743 static tree
negate_expr(tree t)744 negate_expr (tree t)
745 {
746   tree type, tem;
747   location_t loc;
748 
749   if (t == NULL_TREE)
750     return NULL_TREE;
751 
752   loc = EXPR_LOCATION (t);
753   type = TREE_TYPE (t);
754   STRIP_SIGN_NOPS (t);
755 
756   tem = fold_negate_expr (loc, t);
757   if (!tem)
758     tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
759   return fold_convert_loc (loc, type, tem);
760 }
761 
762 /* Split a tree IN into a constant, literal and variable parts that could be
763    combined with CODE to make IN.  "constant" means an expression with
764    TREE_CONSTANT but that isn't an actual constant.  CODE must be a
765    commutative arithmetic operation.  Store the constant part into *CONP,
766    the literal in *LITP and return the variable part.  If a part isn't
767    present, set it to null.  If the tree does not decompose in this way,
768    return the entire tree as the variable part and the other parts as null.
769 
770    If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
771    case, we negate an operand that was subtracted.  Except if it is a
772    literal for which we use *MINUS_LITP instead.
773 
774    If NEGATE_P is true, we are negating all of IN, again except a literal
775    for which we use *MINUS_LITP instead.  If a variable part is of pointer
776    type, it is negated after converting to TYPE.  This prevents us from
777    generating illegal MINUS pointer expression.  LOC is the location of
778    the converted variable part.
779 
780    If IN is itself a literal or constant, return it as appropriate.
781 
782    Note that we do not guarantee that any of the three values will be the
783    same type as IN, but they will have the same signedness and mode.  */
784 
785 static tree
split_tree(tree in,tree type,enum tree_code code,tree * minus_varp,tree * conp,tree * minus_conp,tree * litp,tree * minus_litp,int negate_p)786 split_tree (tree in, tree type, enum tree_code code,
787 	    tree *minus_varp, tree *conp, tree *minus_conp,
788 	    tree *litp, tree *minus_litp, int negate_p)
789 {
790   tree var = 0;
791   *minus_varp = 0;
792   *conp = 0;
793   *minus_conp = 0;
794   *litp = 0;
795   *minus_litp = 0;
796 
797   /* Strip any conversions that don't change the machine mode or signedness.  */
798   STRIP_SIGN_NOPS (in);
799 
800   if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
801       || TREE_CODE (in) == FIXED_CST)
802     *litp = in;
803   else if (TREE_CODE (in) == code
804 	   || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
805 	       && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
806 	       /* We can associate addition and subtraction together (even
807 		  though the C standard doesn't say so) for integers because
808 		  the value is not affected.  For reals, the value might be
809 		  affected, so we can't.  */
810 	       && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
811 		   || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
812 		   || (code == MINUS_EXPR
813 		       && (TREE_CODE (in) == PLUS_EXPR
814 			   || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
815     {
816       tree op0 = TREE_OPERAND (in, 0);
817       tree op1 = TREE_OPERAND (in, 1);
818       int neg1_p = TREE_CODE (in) == MINUS_EXPR;
819       int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
820 
821       /* First see if either of the operands is a literal, then a constant.  */
822       if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
823 	  || TREE_CODE (op0) == FIXED_CST)
824 	*litp = op0, op0 = 0;
825       else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
826 	       || TREE_CODE (op1) == FIXED_CST)
827 	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
828 
829       if (op0 != 0 && TREE_CONSTANT (op0))
830 	*conp = op0, op0 = 0;
831       else if (op1 != 0 && TREE_CONSTANT (op1))
832 	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
833 
834       /* If we haven't dealt with either operand, this is not a case we can
835 	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
836       if (op0 != 0 && op1 != 0)
837 	var = in;
838       else if (op0 != 0)
839 	var = op0;
840       else
841 	var = op1, neg_var_p = neg1_p;
842 
843       /* Now do any needed negations.  */
844       if (neg_litp_p)
845 	*minus_litp = *litp, *litp = 0;
846       if (neg_conp_p && *conp)
847 	*minus_conp = *conp, *conp = 0;
848       if (neg_var_p && var)
849 	*minus_varp = var, var = 0;
850     }
851   else if (TREE_CONSTANT (in))
852     *conp = in;
853   else if (TREE_CODE (in) == BIT_NOT_EXPR
854 	   && code == PLUS_EXPR)
855     {
856       /* -1 - X is folded to ~X, undo that here.  Do _not_ do this
857          when IN is constant.  */
858       *litp = build_minus_one_cst (type);
859       *minus_varp = TREE_OPERAND (in, 0);
860     }
861   else
862     var = in;
863 
864   if (negate_p)
865     {
866       if (*litp)
867 	*minus_litp = *litp, *litp = 0;
868       else if (*minus_litp)
869 	*litp = *minus_litp, *minus_litp = 0;
870       if (*conp)
871 	*minus_conp = *conp, *conp = 0;
872       else if (*minus_conp)
873 	*conp = *minus_conp, *minus_conp = 0;
874       if (var)
875 	*minus_varp = var, var = 0;
876       else if (*minus_varp)
877 	var = *minus_varp, *minus_varp = 0;
878     }
879 
880   if (*litp
881       && TREE_OVERFLOW_P (*litp))
882     *litp = drop_tree_overflow (*litp);
883   if (*minus_litp
884       && TREE_OVERFLOW_P (*minus_litp))
885     *minus_litp = drop_tree_overflow (*minus_litp);
886 
887   return var;
888 }
889 
890 /* Re-associate trees split by the above function.  T1 and T2 are
891    either expressions to associate or null.  Return the new
892    expression, if any.  LOC is the location of the new expression.  If
893    we build an operation, do it in TYPE and with CODE.  */
894 
895 static tree
associate_trees(location_t loc,tree t1,tree t2,enum tree_code code,tree type)896 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
897 {
898   if (t1 == 0)
899     {
900       gcc_assert (t2 == 0 || code != MINUS_EXPR);
901       return t2;
902     }
903   else if (t2 == 0)
904     return t1;
905 
906   /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
907      try to fold this since we will have infinite recursion.  But do
908      deal with any NEGATE_EXPRs.  */
909   if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
910       || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
911       || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
912     {
913       if (code == PLUS_EXPR)
914 	{
915 	  if (TREE_CODE (t1) == NEGATE_EXPR)
916 	    return build2_loc (loc, MINUS_EXPR, type,
917 			       fold_convert_loc (loc, type, t2),
918 			       fold_convert_loc (loc, type,
919 						 TREE_OPERAND (t1, 0)));
920 	  else if (TREE_CODE (t2) == NEGATE_EXPR)
921 	    return build2_loc (loc, MINUS_EXPR, type,
922 			       fold_convert_loc (loc, type, t1),
923 			       fold_convert_loc (loc, type,
924 						 TREE_OPERAND (t2, 0)));
925 	  else if (integer_zerop (t2))
926 	    return fold_convert_loc (loc, type, t1);
927 	}
928       else if (code == MINUS_EXPR)
929 	{
930 	  if (integer_zerop (t2))
931 	    return fold_convert_loc (loc, type, t1);
932 	}
933 
934       return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
935 			 fold_convert_loc (loc, type, t2));
936     }
937 
938   return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 			  fold_convert_loc (loc, type, t2));
940 }
941 
942 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
943    for use in int_const_binop, size_binop and size_diffop.  */
944 
945 static bool
int_binop_types_match_p(enum tree_code code,const_tree type1,const_tree type2)946 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
947 {
948   if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
949     return false;
950   if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
951     return false;
952 
953   switch (code)
954     {
955     case LSHIFT_EXPR:
956     case RSHIFT_EXPR:
957     case LROTATE_EXPR:
958     case RROTATE_EXPR:
959       return true;
960 
961     default:
962       break;
963     }
964 
965   return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
966 	 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
967 	 && TYPE_MODE (type1) == TYPE_MODE (type2);
968 }
969 
970 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
971    a new constant in RES.  Return FALSE if we don't know how to
972    evaluate CODE at compile-time.  */
973 
974 bool
wide_int_binop(wide_int & res,enum tree_code code,const wide_int & arg1,const wide_int & arg2,signop sign,wi::overflow_type * overflow)975 wide_int_binop (wide_int &res,
976 		enum tree_code code, const wide_int &arg1, const wide_int &arg2,
977 		signop sign, wi::overflow_type *overflow)
978 {
979   wide_int tmp;
980   *overflow = wi::OVF_NONE;
981   switch (code)
982     {
983     case BIT_IOR_EXPR:
984       res = wi::bit_or (arg1, arg2);
985       break;
986 
987     case BIT_XOR_EXPR:
988       res = wi::bit_xor (arg1, arg2);
989       break;
990 
991     case BIT_AND_EXPR:
992       res = wi::bit_and (arg1, arg2);
993       break;
994 
995     case RSHIFT_EXPR:
996     case LSHIFT_EXPR:
997       if (wi::neg_p (arg2))
998 	{
999 	  tmp = -arg2;
1000 	  if (code == RSHIFT_EXPR)
1001 	    code = LSHIFT_EXPR;
1002 	  else
1003 	    code = RSHIFT_EXPR;
1004 	}
1005       else
1006         tmp = arg2;
1007 
1008       if (code == RSHIFT_EXPR)
1009 	/* It's unclear from the C standard whether shifts can overflow.
1010 	   The following code ignores overflow; perhaps a C standard
1011 	   interpretation ruling is needed.  */
1012 	res = wi::rshift (arg1, tmp, sign);
1013       else
1014 	res = wi::lshift (arg1, tmp);
1015       break;
1016 
1017     case RROTATE_EXPR:
1018     case LROTATE_EXPR:
1019       if (wi::neg_p (arg2))
1020 	{
1021 	  tmp = -arg2;
1022 	  if (code == RROTATE_EXPR)
1023 	    code = LROTATE_EXPR;
1024 	  else
1025 	    code = RROTATE_EXPR;
1026 	}
1027       else
1028         tmp = arg2;
1029 
1030       if (code == RROTATE_EXPR)
1031 	res = wi::rrotate (arg1, tmp);
1032       else
1033 	res = wi::lrotate (arg1, tmp);
1034       break;
1035 
1036     case PLUS_EXPR:
1037       res = wi::add (arg1, arg2, sign, overflow);
1038       break;
1039 
1040     case MINUS_EXPR:
1041       res = wi::sub (arg1, arg2, sign, overflow);
1042       break;
1043 
1044     case MULT_EXPR:
1045       res = wi::mul (arg1, arg2, sign, overflow);
1046       break;
1047 
1048     case MULT_HIGHPART_EXPR:
1049       res = wi::mul_high (arg1, arg2, sign);
1050       break;
1051 
1052     case TRUNC_DIV_EXPR:
1053     case EXACT_DIV_EXPR:
1054       if (arg2 == 0)
1055 	return false;
1056       res = wi::div_trunc (arg1, arg2, sign, overflow);
1057       break;
1058 
1059     case FLOOR_DIV_EXPR:
1060       if (arg2 == 0)
1061 	return false;
1062       res = wi::div_floor (arg1, arg2, sign, overflow);
1063       break;
1064 
1065     case CEIL_DIV_EXPR:
1066       if (arg2 == 0)
1067 	return false;
1068       res = wi::div_ceil (arg1, arg2, sign, overflow);
1069       break;
1070 
1071     case ROUND_DIV_EXPR:
1072       if (arg2 == 0)
1073 	return false;
1074       res = wi::div_round (arg1, arg2, sign, overflow);
1075       break;
1076 
1077     case TRUNC_MOD_EXPR:
1078       if (arg2 == 0)
1079 	return false;
1080       res = wi::mod_trunc (arg1, arg2, sign, overflow);
1081       break;
1082 
1083     case FLOOR_MOD_EXPR:
1084       if (arg2 == 0)
1085 	return false;
1086       res = wi::mod_floor (arg1, arg2, sign, overflow);
1087       break;
1088 
1089     case CEIL_MOD_EXPR:
1090       if (arg2 == 0)
1091 	return false;
1092       res = wi::mod_ceil (arg1, arg2, sign, overflow);
1093       break;
1094 
1095     case ROUND_MOD_EXPR:
1096       if (arg2 == 0)
1097 	return false;
1098       res = wi::mod_round (arg1, arg2, sign, overflow);
1099       break;
1100 
1101     case MIN_EXPR:
1102       res = wi::min (arg1, arg2, sign);
1103       break;
1104 
1105     case MAX_EXPR:
1106       res = wi::max (arg1, arg2, sign);
1107       break;
1108 
1109     default:
1110       return false;
1111     }
1112   return true;
1113 }
1114 
1115 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1116    produce a new constant in RES.  Return FALSE if we don't know how
1117    to evaluate CODE at compile-time.  */
1118 
1119 static bool
poly_int_binop(poly_wide_int & res,enum tree_code code,const_tree arg1,const_tree arg2,signop sign,wi::overflow_type * overflow)1120 poly_int_binop (poly_wide_int &res, enum tree_code code,
1121 		const_tree arg1, const_tree arg2,
1122 		signop sign, wi::overflow_type *overflow)
1123 {
1124   gcc_assert (NUM_POLY_INT_COEFFS != 1);
1125   gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1126   switch (code)
1127     {
1128     case PLUS_EXPR:
1129       res = wi::add (wi::to_poly_wide (arg1),
1130 		     wi::to_poly_wide (arg2), sign, overflow);
1131       break;
1132 
1133     case MINUS_EXPR:
1134       res = wi::sub (wi::to_poly_wide (arg1),
1135 		     wi::to_poly_wide (arg2), sign, overflow);
1136       break;
1137 
1138     case MULT_EXPR:
1139       if (TREE_CODE (arg2) == INTEGER_CST)
1140 	res = wi::mul (wi::to_poly_wide (arg1),
1141 		       wi::to_wide (arg2), sign, overflow);
1142       else if (TREE_CODE (arg1) == INTEGER_CST)
1143 	res = wi::mul (wi::to_poly_wide (arg2),
1144 		       wi::to_wide (arg1), sign, overflow);
1145       else
1146 	return NULL_TREE;
1147       break;
1148 
1149     case LSHIFT_EXPR:
1150       if (TREE_CODE (arg2) == INTEGER_CST)
1151 	res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1152       else
1153 	return false;
1154       break;
1155 
1156     case BIT_IOR_EXPR:
1157       if (TREE_CODE (arg2) != INTEGER_CST
1158 	  || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1159 			 &res))
1160 	return false;
1161       break;
1162 
1163     default:
1164       return false;
1165     }
1166   return true;
1167 }
1168 
1169 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1170    produce a new constant.  Return NULL_TREE if we don't know how to
1171    evaluate CODE at compile-time.  */
1172 
1173 tree
int_const_binop(enum tree_code code,const_tree arg1,const_tree arg2,int overflowable)1174 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1175 		 int overflowable)
1176 {
1177   poly_wide_int poly_res;
1178   tree type = TREE_TYPE (arg1);
1179   signop sign = TYPE_SIGN (type);
1180   wi::overflow_type overflow = wi::OVF_NONE;
1181 
1182   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1183     {
1184       wide_int warg1 = wi::to_wide (arg1), res;
1185       wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1186       if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1187 	return NULL_TREE;
1188       poly_res = res;
1189     }
1190   else if (!poly_int_tree_p (arg1)
1191 	   || !poly_int_tree_p (arg2)
1192 	   || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1193     return NULL_TREE;
1194   return force_fit_type (type, poly_res, overflowable,
1195 			 (((sign == SIGNED || overflowable == -1)
1196 			   && overflow)
1197 			  | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1198 }
1199 
1200 /* Return true if binary operation OP distributes over addition in operand
1201    OPNO, with the other operand being held constant.  OPNO counts from 1.  */
1202 
1203 static bool
distributes_over_addition_p(tree_code op,int opno)1204 distributes_over_addition_p (tree_code op, int opno)
1205 {
1206   switch (op)
1207     {
1208     case PLUS_EXPR:
1209     case MINUS_EXPR:
1210     case MULT_EXPR:
1211       return true;
1212 
1213     case LSHIFT_EXPR:
1214       return opno == 1;
1215 
1216     default:
1217       return false;
1218     }
1219 }
1220 
1221 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1222    constant.  We assume ARG1 and ARG2 have the same data type, or at least
1223    are the same kind of constant and the same machine mode.  Return zero if
1224    combining the constants is not allowed in the current operating mode.  */
1225 
1226 static tree
const_binop(enum tree_code code,tree arg1,tree arg2)1227 const_binop (enum tree_code code, tree arg1, tree arg2)
1228 {
1229   /* Sanity check for the recursive cases.  */
1230   if (!arg1 || !arg2)
1231     return NULL_TREE;
1232 
1233   STRIP_NOPS (arg1);
1234   STRIP_NOPS (arg2);
1235 
1236   if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1237     {
1238       if (code == POINTER_PLUS_EXPR)
1239 	return int_const_binop (PLUS_EXPR,
1240 				arg1, fold_convert (TREE_TYPE (arg1), arg2));
1241 
1242       return int_const_binop (code, arg1, arg2);
1243     }
1244 
1245   if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1246     {
1247       machine_mode mode;
1248       REAL_VALUE_TYPE d1;
1249       REAL_VALUE_TYPE d2;
1250       REAL_VALUE_TYPE value;
1251       REAL_VALUE_TYPE result;
1252       bool inexact;
1253       tree t, type;
1254 
1255       /* The following codes are handled by real_arithmetic.  */
1256       switch (code)
1257 	{
1258 	case PLUS_EXPR:
1259 	case MINUS_EXPR:
1260 	case MULT_EXPR:
1261 	case RDIV_EXPR:
1262 	case MIN_EXPR:
1263 	case MAX_EXPR:
1264 	  break;
1265 
1266 	default:
1267 	  return NULL_TREE;
1268 	}
1269 
1270       d1 = TREE_REAL_CST (arg1);
1271       d2 = TREE_REAL_CST (arg2);
1272 
1273       type = TREE_TYPE (arg1);
1274       mode = TYPE_MODE (type);
1275 
1276       /* Don't perform operation if we honor signaling NaNs and
1277 	 either operand is a signaling NaN.  */
1278       if (HONOR_SNANS (mode)
1279 	  && (REAL_VALUE_ISSIGNALING_NAN (d1)
1280 	      || REAL_VALUE_ISSIGNALING_NAN (d2)))
1281 	return NULL_TREE;
1282 
1283       /* Don't perform operation if it would raise a division
1284 	 by zero exception.  */
1285       if (code == RDIV_EXPR
1286 	  && real_equal (&d2, &dconst0)
1287 	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1288 	return NULL_TREE;
1289 
1290       /* If either operand is a NaN, just return it.  Otherwise, set up
1291 	 for floating-point trap; we return an overflow.  */
1292       if (REAL_VALUE_ISNAN (d1))
1293       {
1294 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1295 	   is off.  */
1296 	d1.signalling = 0;
1297 	t = build_real (type, d1);
1298 	return t;
1299       }
1300       else if (REAL_VALUE_ISNAN (d2))
1301       {
1302 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1303 	   is off.  */
1304 	d2.signalling = 0;
1305 	t = build_real (type, d2);
1306 	return t;
1307       }
1308 
1309       inexact = real_arithmetic (&value, code, &d1, &d2);
1310       real_convert (&result, mode, &value);
1311 
1312       /* Don't constant fold this floating point operation if
1313 	 the result has overflowed and flag_trapping_math.  */
1314       if (flag_trapping_math
1315 	  && MODE_HAS_INFINITIES (mode)
1316 	  && REAL_VALUE_ISINF (result)
1317 	  && !REAL_VALUE_ISINF (d1)
1318 	  && !REAL_VALUE_ISINF (d2))
1319 	return NULL_TREE;
1320 
1321       /* Don't constant fold this floating point operation if the
1322 	 result may dependent upon the run-time rounding mode and
1323 	 flag_rounding_math is set, or if GCC's software emulation
1324 	 is unable to accurately represent the result.  */
1325       if ((flag_rounding_math
1326 	   || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1327 	  && (inexact || !real_identical (&result, &value)))
1328 	return NULL_TREE;
1329 
1330       t = build_real (type, result);
1331 
1332       TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1333       return t;
1334     }
1335 
1336   if (TREE_CODE (arg1) == FIXED_CST)
1337     {
1338       FIXED_VALUE_TYPE f1;
1339       FIXED_VALUE_TYPE f2;
1340       FIXED_VALUE_TYPE result;
1341       tree t, type;
1342       int sat_p;
1343       bool overflow_p;
1344 
1345       /* The following codes are handled by fixed_arithmetic.  */
1346       switch (code)
1347         {
1348 	case PLUS_EXPR:
1349 	case MINUS_EXPR:
1350 	case MULT_EXPR:
1351 	case TRUNC_DIV_EXPR:
1352 	  if (TREE_CODE (arg2) != FIXED_CST)
1353 	    return NULL_TREE;
1354 	  f2 = TREE_FIXED_CST (arg2);
1355 	  break;
1356 
1357 	case LSHIFT_EXPR:
1358 	case RSHIFT_EXPR:
1359 	  {
1360 	    if (TREE_CODE (arg2) != INTEGER_CST)
1361 	      return NULL_TREE;
1362 	    wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1363 	    f2.data.high = w2.elt (1);
1364 	    f2.data.low = w2.ulow ();
1365 	    f2.mode = SImode;
1366 	  }
1367 	  break;
1368 
1369         default:
1370 	  return NULL_TREE;
1371         }
1372 
1373       f1 = TREE_FIXED_CST (arg1);
1374       type = TREE_TYPE (arg1);
1375       sat_p = TYPE_SATURATING (type);
1376       overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1377       t = build_fixed (type, result);
1378       /* Propagate overflow flags.  */
1379       if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1380 	TREE_OVERFLOW (t) = 1;
1381       return t;
1382     }
1383 
1384   if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1385     {
1386       tree type = TREE_TYPE (arg1);
1387       tree r1 = TREE_REALPART (arg1);
1388       tree i1 = TREE_IMAGPART (arg1);
1389       tree r2 = TREE_REALPART (arg2);
1390       tree i2 = TREE_IMAGPART (arg2);
1391       tree real, imag;
1392 
1393       switch (code)
1394 	{
1395 	case PLUS_EXPR:
1396 	case MINUS_EXPR:
1397 	  real = const_binop (code, r1, r2);
1398 	  imag = const_binop (code, i1, i2);
1399 	  break;
1400 
1401 	case MULT_EXPR:
1402 	  if (COMPLEX_FLOAT_TYPE_P (type))
1403 	    return do_mpc_arg2 (arg1, arg2, type,
1404 				/* do_nonfinite= */ folding_initializer,
1405 				mpc_mul);
1406 
1407 	  real = const_binop (MINUS_EXPR,
1408 			      const_binop (MULT_EXPR, r1, r2),
1409 			      const_binop (MULT_EXPR, i1, i2));
1410 	  imag = const_binop (PLUS_EXPR,
1411 			      const_binop (MULT_EXPR, r1, i2),
1412 			      const_binop (MULT_EXPR, i1, r2));
1413 	  break;
1414 
1415 	case RDIV_EXPR:
1416 	  if (COMPLEX_FLOAT_TYPE_P (type))
1417 	    return do_mpc_arg2 (arg1, arg2, type,
1418                                 /* do_nonfinite= */ folding_initializer,
1419 				mpc_div);
1420 	  /* Fallthru. */
1421 	case TRUNC_DIV_EXPR:
1422 	case CEIL_DIV_EXPR:
1423 	case FLOOR_DIV_EXPR:
1424 	case ROUND_DIV_EXPR:
1425 	  if (flag_complex_method == 0)
1426 	  {
1427 	    /* Keep this algorithm in sync with
1428 	       tree-complex.c:expand_complex_div_straight().
1429 
1430 	       Expand complex division to scalars, straightforward algorithm.
1431 	       a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1432 	       t = br*br + bi*bi
1433 	    */
1434 	    tree magsquared
1435 	      = const_binop (PLUS_EXPR,
1436 			     const_binop (MULT_EXPR, r2, r2),
1437 			     const_binop (MULT_EXPR, i2, i2));
1438 	    tree t1
1439 	      = const_binop (PLUS_EXPR,
1440 			     const_binop (MULT_EXPR, r1, r2),
1441 			     const_binop (MULT_EXPR, i1, i2));
1442 	    tree t2
1443 	      = const_binop (MINUS_EXPR,
1444 			     const_binop (MULT_EXPR, i1, r2),
1445 			     const_binop (MULT_EXPR, r1, i2));
1446 
1447 	    real = const_binop (code, t1, magsquared);
1448 	    imag = const_binop (code, t2, magsquared);
1449 	  }
1450 	  else
1451 	  {
1452 	    /* Keep this algorithm in sync with
1453                tree-complex.c:expand_complex_div_wide().
1454 
1455 	       Expand complex division to scalars, modified algorithm to minimize
1456 	       overflow with wide input ranges.  */
1457 	    tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1458 					fold_abs_const (r2, TREE_TYPE (type)),
1459 					fold_abs_const (i2, TREE_TYPE (type)));
1460 
1461 	    if (integer_nonzerop (compare))
1462 	      {
1463 		/* In the TRUE branch, we compute
1464 		   ratio = br/bi;
1465 		   div = (br * ratio) + bi;
1466 		   tr = (ar * ratio) + ai;
1467 		   ti = (ai * ratio) - ar;
1468 		   tr = tr / div;
1469 		   ti = ti / div;  */
1470 		tree ratio = const_binop (code, r2, i2);
1471 		tree div = const_binop (PLUS_EXPR, i2,
1472 					const_binop (MULT_EXPR, r2, ratio));
1473 		real = const_binop (MULT_EXPR, r1, ratio);
1474 		real = const_binop (PLUS_EXPR, real, i1);
1475 		real = const_binop (code, real, div);
1476 
1477 		imag = const_binop (MULT_EXPR, i1, ratio);
1478 		imag = const_binop (MINUS_EXPR, imag, r1);
1479 		imag = const_binop (code, imag, div);
1480 	      }
1481 	    else
1482 	      {
1483 		/* In the FALSE branch, we compute
1484 		   ratio = d/c;
1485 		   divisor = (d * ratio) + c;
1486 		   tr = (b * ratio) + a;
1487 		   ti = b - (a * ratio);
1488 		   tr = tr / div;
1489 		   ti = ti / div;  */
1490 		tree ratio = const_binop (code, i2, r2);
1491 		tree div = const_binop (PLUS_EXPR, r2,
1492                                         const_binop (MULT_EXPR, i2, ratio));
1493 
1494 		real = const_binop (MULT_EXPR, i1, ratio);
1495 		real = const_binop (PLUS_EXPR, real, r1);
1496 		real = const_binop (code, real, div);
1497 
1498 		imag = const_binop (MULT_EXPR, r1, ratio);
1499 		imag = const_binop (MINUS_EXPR, i1, imag);
1500 		imag = const_binop (code, imag, div);
1501 	      }
1502 	  }
1503 	  break;
1504 
1505 	default:
1506 	  return NULL_TREE;
1507 	}
1508 
1509       if (real && imag)
1510 	return build_complex (type, real, imag);
1511     }
1512 
1513   if (TREE_CODE (arg1) == VECTOR_CST
1514       && TREE_CODE (arg2) == VECTOR_CST
1515       && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1516 		   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1517     {
1518       tree type = TREE_TYPE (arg1);
1519       bool step_ok_p;
1520       if (VECTOR_CST_STEPPED_P (arg1)
1521 	  && VECTOR_CST_STEPPED_P (arg2))
1522 	/* We can operate directly on the encoding if:
1523 
1524 	      a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1525 	    implies
1526 	      (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1527 
1528 	   Addition and subtraction are the supported operators
1529 	   for which this is true.  */
1530 	step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1531       else if (VECTOR_CST_STEPPED_P (arg1))
1532 	/* We can operate directly on stepped encodings if:
1533 
1534 	     a3 - a2 == a2 - a1
1535 	   implies:
1536 	     (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1537 
1538 	   which is true if (x -> x op c) distributes over addition.  */
1539 	step_ok_p = distributes_over_addition_p (code, 1);
1540       else
1541 	/* Similarly in reverse.  */
1542 	step_ok_p = distributes_over_addition_p (code, 2);
1543       tree_vector_builder elts;
1544       if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1545 	return NULL_TREE;
1546       unsigned int count = elts.encoded_nelts ();
1547       for (unsigned int i = 0; i < count; ++i)
1548 	{
1549 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1550 	  tree elem2 = VECTOR_CST_ELT (arg2, i);
1551 
1552 	  tree elt = const_binop (code, elem1, elem2);
1553 
1554 	  /* It is possible that const_binop cannot handle the given
1555 	     code and return NULL_TREE */
1556 	  if (elt == NULL_TREE)
1557 	    return NULL_TREE;
1558 	  elts.quick_push (elt);
1559 	}
1560 
1561       return elts.build ();
1562     }
1563 
1564   /* Shifts allow a scalar offset for a vector.  */
1565   if (TREE_CODE (arg1) == VECTOR_CST
1566       && TREE_CODE (arg2) == INTEGER_CST)
1567     {
1568       tree type = TREE_TYPE (arg1);
1569       bool step_ok_p = distributes_over_addition_p (code, 1);
1570       tree_vector_builder elts;
1571       if (!elts.new_unary_operation (type, arg1, step_ok_p))
1572 	return NULL_TREE;
1573       unsigned int count = elts.encoded_nelts ();
1574       for (unsigned int i = 0; i < count; ++i)
1575 	{
1576 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1577 
1578 	  tree elt = const_binop (code, elem1, arg2);
1579 
1580 	  /* It is possible that const_binop cannot handle the given
1581 	     code and return NULL_TREE.  */
1582 	  if (elt == NULL_TREE)
1583 	    return NULL_TREE;
1584 	  elts.quick_push (elt);
1585 	}
1586 
1587       return elts.build ();
1588     }
1589   return NULL_TREE;
1590 }
1591 
1592 /* Overload that adds a TYPE parameter to be able to dispatch
1593    to fold_relational_const.  */
1594 
1595 tree
const_binop(enum tree_code code,tree type,tree arg1,tree arg2)1596 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1597 {
1598   if (TREE_CODE_CLASS (code) == tcc_comparison)
1599     return fold_relational_const (code, type, arg1, arg2);
1600 
1601   /* ???  Until we make the const_binop worker take the type of the
1602      result as argument put those cases that need it here.  */
1603   switch (code)
1604     {
1605     case VEC_SERIES_EXPR:
1606       if (CONSTANT_CLASS_P (arg1)
1607 	  && CONSTANT_CLASS_P (arg2))
1608 	return build_vec_series (type, arg1, arg2);
1609       return NULL_TREE;
1610 
1611     case COMPLEX_EXPR:
1612       if ((TREE_CODE (arg1) == REAL_CST
1613 	   && TREE_CODE (arg2) == REAL_CST)
1614 	  || (TREE_CODE (arg1) == INTEGER_CST
1615 	      && TREE_CODE (arg2) == INTEGER_CST))
1616 	return build_complex (type, arg1, arg2);
1617       return NULL_TREE;
1618 
1619     case POINTER_DIFF_EXPR:
1620       if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1621 	{
1622 	  poly_offset_int res = (wi::to_poly_offset (arg1)
1623 				 - wi::to_poly_offset (arg2));
1624 	  return force_fit_type (type, res, 1,
1625 				 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1626 	}
1627       return NULL_TREE;
1628 
1629     case VEC_PACK_TRUNC_EXPR:
1630     case VEC_PACK_FIX_TRUNC_EXPR:
1631     case VEC_PACK_FLOAT_EXPR:
1632       {
1633 	unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1634 
1635 	if (TREE_CODE (arg1) != VECTOR_CST
1636 	    || TREE_CODE (arg2) != VECTOR_CST)
1637 	  return NULL_TREE;
1638 
1639 	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1640 	  return NULL_TREE;
1641 
1642 	out_nelts = in_nelts * 2;
1643 	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1644 		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1645 
1646 	tree_vector_builder elts (type, out_nelts, 1);
1647 	for (i = 0; i < out_nelts; i++)
1648 	  {
1649 	    tree elt = (i < in_nelts
1650 			? VECTOR_CST_ELT (arg1, i)
1651 			: VECTOR_CST_ELT (arg2, i - in_nelts));
1652 	    elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1653 				      ? NOP_EXPR
1654 				      : code == VEC_PACK_FLOAT_EXPR
1655 				      ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1656 				      TREE_TYPE (type), elt);
1657 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1658 	      return NULL_TREE;
1659 	    elts.quick_push (elt);
1660 	  }
1661 
1662 	return elts.build ();
1663       }
1664 
1665     case VEC_WIDEN_MULT_LO_EXPR:
1666     case VEC_WIDEN_MULT_HI_EXPR:
1667     case VEC_WIDEN_MULT_EVEN_EXPR:
1668     case VEC_WIDEN_MULT_ODD_EXPR:
1669       {
1670 	unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1671 
1672 	if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1673 	  return NULL_TREE;
1674 
1675 	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1676 	  return NULL_TREE;
1677 	out_nelts = in_nelts / 2;
1678 	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1679 		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1680 
1681 	if (code == VEC_WIDEN_MULT_LO_EXPR)
1682 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1683 	else if (code == VEC_WIDEN_MULT_HI_EXPR)
1684 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1685 	else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1686 	  scale = 1, ofs = 0;
1687 	else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1688 	  scale = 1, ofs = 1;
1689 
1690 	tree_vector_builder elts (type, out_nelts, 1);
1691 	for (out = 0; out < out_nelts; out++)
1692 	  {
1693 	    unsigned int in = (out << scale) + ofs;
1694 	    tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1695 					  VECTOR_CST_ELT (arg1, in));
1696 	    tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1697 					  VECTOR_CST_ELT (arg2, in));
1698 
1699 	    if (t1 == NULL_TREE || t2 == NULL_TREE)
1700 	      return NULL_TREE;
1701 	    tree elt = const_binop (MULT_EXPR, t1, t2);
1702 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1703 	      return NULL_TREE;
1704 	    elts.quick_push (elt);
1705 	  }
1706 
1707 	return elts.build ();
1708       }
1709 
1710     default:;
1711     }
1712 
1713   if (TREE_CODE_CLASS (code) != tcc_binary)
1714     return NULL_TREE;
1715 
1716   /* Make sure type and arg0 have the same saturating flag.  */
1717   gcc_checking_assert (TYPE_SATURATING (type)
1718 		       == TYPE_SATURATING (TREE_TYPE (arg1)));
1719 
1720   return const_binop (code, arg1, arg2);
1721 }
1722 
1723 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1724    Return zero if computing the constants is not possible.  */
1725 
1726 tree
const_unop(enum tree_code code,tree type,tree arg0)1727 const_unop (enum tree_code code, tree type, tree arg0)
1728 {
1729   /* Don't perform the operation, other than NEGATE and ABS, if
1730      flag_signaling_nans is on and the operand is a signaling NaN.  */
1731   if (TREE_CODE (arg0) == REAL_CST
1732       && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1733       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1734       && code != NEGATE_EXPR
1735       && code != ABS_EXPR
1736       && code != ABSU_EXPR)
1737     return NULL_TREE;
1738 
1739   switch (code)
1740     {
1741     CASE_CONVERT:
1742     case FLOAT_EXPR:
1743     case FIX_TRUNC_EXPR:
1744     case FIXED_CONVERT_EXPR:
1745       return fold_convert_const (code, type, arg0);
1746 
1747     case ADDR_SPACE_CONVERT_EXPR:
1748       /* If the source address is 0, and the source address space
1749 	 cannot have a valid object at 0, fold to dest type null.  */
1750       if (integer_zerop (arg0)
1751 	  && !(targetm.addr_space.zero_address_valid
1752 	       (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1753 	return fold_convert_const (code, type, arg0);
1754       break;
1755 
1756     case VIEW_CONVERT_EXPR:
1757       return fold_view_convert_expr (type, arg0);
1758 
1759     case NEGATE_EXPR:
1760       {
1761 	/* Can't call fold_negate_const directly here as that doesn't
1762 	   handle all cases and we might not be able to negate some
1763 	   constants.  */
1764 	tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1765 	if (tem && CONSTANT_CLASS_P (tem))
1766 	  return tem;
1767 	break;
1768       }
1769 
1770     case ABS_EXPR:
1771     case ABSU_EXPR:
1772       if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1773 	return fold_abs_const (arg0, type);
1774       break;
1775 
1776     case CONJ_EXPR:
1777       if (TREE_CODE (arg0) == COMPLEX_CST)
1778 	{
1779 	  tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1780 					  TREE_TYPE (type));
1781 	  return build_complex (type, TREE_REALPART (arg0), ipart);
1782 	}
1783       break;
1784 
1785     case BIT_NOT_EXPR:
1786       if (TREE_CODE (arg0) == INTEGER_CST)
1787 	return fold_not_const (arg0, type);
1788       else if (POLY_INT_CST_P (arg0))
1789 	return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1790       /* Perform BIT_NOT_EXPR on each element individually.  */
1791       else if (TREE_CODE (arg0) == VECTOR_CST)
1792 	{
1793 	  tree elem;
1794 
1795 	  /* This can cope with stepped encodings because ~x == -1 - x.  */
1796 	  tree_vector_builder elements;
1797 	  elements.new_unary_operation (type, arg0, true);
1798 	  unsigned int i, count = elements.encoded_nelts ();
1799 	  for (i = 0; i < count; ++i)
1800 	    {
1801 	      elem = VECTOR_CST_ELT (arg0, i);
1802 	      elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1803 	      if (elem == NULL_TREE)
1804 		break;
1805 	      elements.quick_push (elem);
1806 	    }
1807 	  if (i == count)
1808 	    return elements.build ();
1809 	}
1810       break;
1811 
1812     case TRUTH_NOT_EXPR:
1813       if (TREE_CODE (arg0) == INTEGER_CST)
1814 	return constant_boolean_node (integer_zerop (arg0), type);
1815       break;
1816 
1817     case REALPART_EXPR:
1818       if (TREE_CODE (arg0) == COMPLEX_CST)
1819 	return fold_convert (type, TREE_REALPART (arg0));
1820       break;
1821 
1822     case IMAGPART_EXPR:
1823       if (TREE_CODE (arg0) == COMPLEX_CST)
1824 	return fold_convert (type, TREE_IMAGPART (arg0));
1825       break;
1826 
1827     case VEC_UNPACK_LO_EXPR:
1828     case VEC_UNPACK_HI_EXPR:
1829     case VEC_UNPACK_FLOAT_LO_EXPR:
1830     case VEC_UNPACK_FLOAT_HI_EXPR:
1831     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1832     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1833       {
1834 	unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1835 	enum tree_code subcode;
1836 
1837 	if (TREE_CODE (arg0) != VECTOR_CST)
1838 	  return NULL_TREE;
1839 
1840 	if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1841 	  return NULL_TREE;
1842 	out_nelts = in_nelts / 2;
1843 	gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1844 
1845 	unsigned int offset = 0;
1846 	if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1847 				   || code == VEC_UNPACK_FLOAT_LO_EXPR
1848 				   || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1849 	  offset = out_nelts;
1850 
1851 	if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1852 	  subcode = NOP_EXPR;
1853 	else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1854 		 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1855 	  subcode = FLOAT_EXPR;
1856 	else
1857 	  subcode = FIX_TRUNC_EXPR;
1858 
1859 	tree_vector_builder elts (type, out_nelts, 1);
1860 	for (i = 0; i < out_nelts; i++)
1861 	  {
1862 	    tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1863 					   VECTOR_CST_ELT (arg0, i + offset));
1864 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1865 	      return NULL_TREE;
1866 	    elts.quick_push (elt);
1867 	  }
1868 
1869 	return elts.build ();
1870       }
1871 
1872     case VEC_DUPLICATE_EXPR:
1873       if (CONSTANT_CLASS_P (arg0))
1874 	return build_vector_from_val (type, arg0);
1875       return NULL_TREE;
1876 
1877     default:
1878       break;
1879     }
1880 
1881   return NULL_TREE;
1882 }
1883 
1884 /* Create a sizetype INT_CST node with NUMBER sign extended.  KIND
1885    indicates which particular sizetype to create.  */
1886 
1887 tree
size_int_kind(poly_int64 number,enum size_type_kind kind)1888 size_int_kind (poly_int64 number, enum size_type_kind kind)
1889 {
1890   return build_int_cst (sizetype_tab[(int) kind], number);
1891 }
1892 
1893 /* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1894    is a tree code.  The type of the result is taken from the operands.
1895    Both must be equivalent integer types, ala int_binop_types_match_p.
1896    If the operands are constant, so is the result.  */
1897 
1898 tree
size_binop_loc(location_t loc,enum tree_code code,tree arg0,tree arg1)1899 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1900 {
1901   tree type = TREE_TYPE (arg0);
1902 
1903   if (arg0 == error_mark_node || arg1 == error_mark_node)
1904     return error_mark_node;
1905 
1906   gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1907                                        TREE_TYPE (arg1)));
1908 
1909   /* Handle the special case of two poly_int constants faster.  */
1910   if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1911     {
1912       /* And some specific cases even faster than that.  */
1913       if (code == PLUS_EXPR)
1914 	{
1915 	  if (integer_zerop (arg0)
1916 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1917 	    return arg1;
1918 	  if (integer_zerop (arg1)
1919 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1920 	    return arg0;
1921 	}
1922       else if (code == MINUS_EXPR)
1923 	{
1924 	  if (integer_zerop (arg1)
1925 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1926 	    return arg0;
1927 	}
1928       else if (code == MULT_EXPR)
1929 	{
1930 	  if (integer_onep (arg0)
1931 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1932 	    return arg1;
1933 	}
1934 
1935       /* Handle general case of two integer constants.  For sizetype
1936          constant calculations we always want to know about overflow,
1937 	 even in the unsigned case.  */
1938       tree res = int_const_binop (code, arg0, arg1, -1);
1939       if (res != NULL_TREE)
1940 	return res;
1941     }
1942 
1943   return fold_build2_loc (loc, code, type, arg0, arg1);
1944 }
1945 
1946 /* Given two values, either both of sizetype or both of bitsizetype,
1947    compute the difference between the two values.  Return the value
1948    in signed type corresponding to the type of the operands.  */
1949 
1950 tree
size_diffop_loc(location_t loc,tree arg0,tree arg1)1951 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1952 {
1953   tree type = TREE_TYPE (arg0);
1954   tree ctype;
1955 
1956   gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1957 				       TREE_TYPE (arg1)));
1958 
1959   /* If the type is already signed, just do the simple thing.  */
1960   if (!TYPE_UNSIGNED (type))
1961     return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1962 
1963   if (type == sizetype)
1964     ctype = ssizetype;
1965   else if (type == bitsizetype)
1966     ctype = sbitsizetype;
1967   else
1968     ctype = signed_type_for (type);
1969 
1970   /* If either operand is not a constant, do the conversions to the signed
1971      type and subtract.  The hardware will do the right thing with any
1972      overflow in the subtraction.  */
1973   if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1974     return size_binop_loc (loc, MINUS_EXPR,
1975 			   fold_convert_loc (loc, ctype, arg0),
1976 			   fold_convert_loc (loc, ctype, arg1));
1977 
1978   /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1979      Otherwise, subtract the other way, convert to CTYPE (we know that can't
1980      overflow) and negate (which can't either).  Special-case a result
1981      of zero while we're here.  */
1982   if (tree_int_cst_equal (arg0, arg1))
1983     return build_int_cst (ctype, 0);
1984   else if (tree_int_cst_lt (arg1, arg0))
1985     return fold_convert_loc (loc, ctype,
1986 			     size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1987   else
1988     return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1989 			   fold_convert_loc (loc, ctype,
1990 					     size_binop_loc (loc,
1991 							     MINUS_EXPR,
1992 							     arg1, arg0)));
1993 }
1994 
1995 /* A subroutine of fold_convert_const handling conversions of an
1996    INTEGER_CST to another integer type.  */
1997 
1998 static tree
fold_convert_const_int_from_int(tree type,const_tree arg1)1999 fold_convert_const_int_from_int (tree type, const_tree arg1)
2000 {
2001   /* Given an integer constant, make new constant with new type,
2002      appropriately sign-extended or truncated.  Use widest_int
2003      so that any extension is done according ARG1's type.  */
2004   return force_fit_type (type, wi::to_widest (arg1),
2005 			 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2006 			 TREE_OVERFLOW (arg1));
2007 }
2008 
2009 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2010    to an integer type.  */
2011 
2012 static tree
fold_convert_const_int_from_real(enum tree_code code,tree type,const_tree arg1)2013 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2014 {
2015   bool overflow = false;
2016   tree t;
2017 
2018   /* The following code implements the floating point to integer
2019      conversion rules required by the Java Language Specification,
2020      that IEEE NaNs are mapped to zero and values that overflow
2021      the target precision saturate, i.e. values greater than
2022      INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2023      are mapped to INT_MIN.  These semantics are allowed by the
2024      C and C++ standards that simply state that the behavior of
2025      FP-to-integer conversion is unspecified upon overflow.  */
2026 
2027   wide_int val;
2028   REAL_VALUE_TYPE r;
2029   REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2030 
2031   switch (code)
2032     {
2033     case FIX_TRUNC_EXPR:
2034       real_trunc (&r, VOIDmode, &x);
2035       break;
2036 
2037     default:
2038       gcc_unreachable ();
2039     }
2040 
2041   /* If R is NaN, return zero and show we have an overflow.  */
2042   if (REAL_VALUE_ISNAN (r))
2043     {
2044       overflow = true;
2045       val = wi::zero (TYPE_PRECISION (type));
2046     }
2047 
2048   /* See if R is less than the lower bound or greater than the
2049      upper bound.  */
2050 
2051   if (! overflow)
2052     {
2053       tree lt = TYPE_MIN_VALUE (type);
2054       REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2055       if (real_less (&r, &l))
2056 	{
2057 	  overflow = true;
2058 	  val = wi::to_wide (lt);
2059 	}
2060     }
2061 
2062   if (! overflow)
2063     {
2064       tree ut = TYPE_MAX_VALUE (type);
2065       if (ut)
2066 	{
2067 	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2068 	  if (real_less (&u, &r))
2069 	    {
2070 	      overflow = true;
2071 	      val = wi::to_wide (ut);
2072 	    }
2073 	}
2074     }
2075 
2076   if (! overflow)
2077     val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2078 
2079   t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2080   return t;
2081 }
2082 
2083 /* A subroutine of fold_convert_const handling conversions of a
2084    FIXED_CST to an integer type.  */
2085 
2086 static tree
fold_convert_const_int_from_fixed(tree type,const_tree arg1)2087 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2088 {
2089   tree t;
2090   double_int temp, temp_trunc;
2091   scalar_mode mode;
2092 
2093   /* Right shift FIXED_CST to temp by fbit.  */
2094   temp = TREE_FIXED_CST (arg1).data;
2095   mode = TREE_FIXED_CST (arg1).mode;
2096   if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2097     {
2098       temp = temp.rshift (GET_MODE_FBIT (mode),
2099 			  HOST_BITS_PER_DOUBLE_INT,
2100 			  SIGNED_FIXED_POINT_MODE_P (mode));
2101 
2102       /* Left shift temp to temp_trunc by fbit.  */
2103       temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2104 				HOST_BITS_PER_DOUBLE_INT,
2105 				SIGNED_FIXED_POINT_MODE_P (mode));
2106     }
2107   else
2108     {
2109       temp = double_int_zero;
2110       temp_trunc = double_int_zero;
2111     }
2112 
2113   /* If FIXED_CST is negative, we need to round the value toward 0.
2114      By checking if the fractional bits are not zero to add 1 to temp.  */
2115   if (SIGNED_FIXED_POINT_MODE_P (mode)
2116       && temp_trunc.is_negative ()
2117       && TREE_FIXED_CST (arg1).data != temp_trunc)
2118     temp += double_int_one;
2119 
2120   /* Given a fixed-point constant, make new constant with new type,
2121      appropriately sign-extended or truncated.  */
2122   t = force_fit_type (type, temp, -1,
2123 		      (temp.is_negative ()
2124 		       && (TYPE_UNSIGNED (type)
2125 			   < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2126 		      | TREE_OVERFLOW (arg1));
2127 
2128   return t;
2129 }
2130 
2131 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2132    to another floating point type.  */
2133 
2134 static tree
fold_convert_const_real_from_real(tree type,const_tree arg1)2135 fold_convert_const_real_from_real (tree type, const_tree arg1)
2136 {
2137   REAL_VALUE_TYPE value;
2138   tree t;
2139 
2140   /* Don't perform the operation if flag_signaling_nans is on
2141      and the operand is a signaling NaN.  */
2142   if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2143       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2144     return NULL_TREE;
2145 
2146   real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2147   t = build_real (type, value);
2148 
2149   /* If converting an infinity or NAN to a representation that doesn't
2150      have one, set the overflow bit so that we can produce some kind of
2151      error message at the appropriate point if necessary.  It's not the
2152      most user-friendly message, but it's better than nothing.  */
2153   if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2154       && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2155     TREE_OVERFLOW (t) = 1;
2156   else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2157 	   && !MODE_HAS_NANS (TYPE_MODE (type)))
2158     TREE_OVERFLOW (t) = 1;
2159   /* Regular overflow, conversion produced an infinity in a mode that
2160      can't represent them.  */
2161   else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2162 	   && REAL_VALUE_ISINF (value)
2163 	   && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2164     TREE_OVERFLOW (t) = 1;
2165   else
2166     TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2167   return t;
2168 }
2169 
2170 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2171    to a floating point type.  */
2172 
2173 static tree
fold_convert_const_real_from_fixed(tree type,const_tree arg1)2174 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2175 {
2176   REAL_VALUE_TYPE value;
2177   tree t;
2178 
2179   real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2180 			   &TREE_FIXED_CST (arg1));
2181   t = build_real (type, value);
2182 
2183   TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2184   return t;
2185 }
2186 
2187 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2188    to another fixed-point type.  */
2189 
2190 static tree
fold_convert_const_fixed_from_fixed(tree type,const_tree arg1)2191 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2192 {
2193   FIXED_VALUE_TYPE value;
2194   tree t;
2195   bool overflow_p;
2196 
2197   overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2198 			      &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2199   t = build_fixed (type, value);
2200 
2201   /* Propagate overflow flags.  */
2202   if (overflow_p | TREE_OVERFLOW (arg1))
2203     TREE_OVERFLOW (t) = 1;
2204   return t;
2205 }
2206 
2207 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2208    to a fixed-point type.  */
2209 
2210 static tree
fold_convert_const_fixed_from_int(tree type,const_tree arg1)2211 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2212 {
2213   FIXED_VALUE_TYPE value;
2214   tree t;
2215   bool overflow_p;
2216   double_int di;
2217 
2218   gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2219 
2220   di.low = TREE_INT_CST_ELT (arg1, 0);
2221   if (TREE_INT_CST_NUNITS (arg1) == 1)
2222     di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2223   else
2224     di.high = TREE_INT_CST_ELT (arg1, 1);
2225 
2226   overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2227 				       TYPE_UNSIGNED (TREE_TYPE (arg1)),
2228 				       TYPE_SATURATING (type));
2229   t = build_fixed (type, value);
2230 
2231   /* Propagate overflow flags.  */
2232   if (overflow_p | TREE_OVERFLOW (arg1))
2233     TREE_OVERFLOW (t) = 1;
2234   return t;
2235 }
2236 
2237 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2238    to a fixed-point type.  */
2239 
2240 static tree
fold_convert_const_fixed_from_real(tree type,const_tree arg1)2241 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2242 {
2243   FIXED_VALUE_TYPE value;
2244   tree t;
2245   bool overflow_p;
2246 
2247   overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2248 					&TREE_REAL_CST (arg1),
2249 					TYPE_SATURATING (type));
2250   t = build_fixed (type, value);
2251 
2252   /* Propagate overflow flags.  */
2253   if (overflow_p | TREE_OVERFLOW (arg1))
2254     TREE_OVERFLOW (t) = 1;
2255   return t;
2256 }
2257 
2258 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2259    type TYPE.  If no simplification can be done return NULL_TREE.  */
2260 
2261 static tree
fold_convert_const(enum tree_code code,tree type,tree arg1)2262 fold_convert_const (enum tree_code code, tree type, tree arg1)
2263 {
2264   tree arg_type = TREE_TYPE (arg1);
2265   if (arg_type == type)
2266     return arg1;
2267 
2268   /* We can't widen types, since the runtime value could overflow the
2269      original type before being extended to the new type.  */
2270   if (POLY_INT_CST_P (arg1)
2271       && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2272       && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2273     return build_poly_int_cst (type,
2274 			       poly_wide_int::from (poly_int_cst_value (arg1),
2275 						    TYPE_PRECISION (type),
2276 						    TYPE_SIGN (arg_type)));
2277 
2278   if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2279       || TREE_CODE (type) == OFFSET_TYPE)
2280     {
2281       if (TREE_CODE (arg1) == INTEGER_CST)
2282 	return fold_convert_const_int_from_int (type, arg1);
2283       else if (TREE_CODE (arg1) == REAL_CST)
2284 	return fold_convert_const_int_from_real (code, type, arg1);
2285       else if (TREE_CODE (arg1) == FIXED_CST)
2286 	return fold_convert_const_int_from_fixed (type, arg1);
2287     }
2288   else if (TREE_CODE (type) == REAL_TYPE)
2289     {
2290       if (TREE_CODE (arg1) == INTEGER_CST)
2291 	return build_real_from_int_cst (type, arg1);
2292       else if (TREE_CODE (arg1) == REAL_CST)
2293 	return fold_convert_const_real_from_real (type, arg1);
2294       else if (TREE_CODE (arg1) == FIXED_CST)
2295 	return fold_convert_const_real_from_fixed (type, arg1);
2296     }
2297   else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2298     {
2299       if (TREE_CODE (arg1) == FIXED_CST)
2300 	return fold_convert_const_fixed_from_fixed (type, arg1);
2301       else if (TREE_CODE (arg1) == INTEGER_CST)
2302 	return fold_convert_const_fixed_from_int (type, arg1);
2303       else if (TREE_CODE (arg1) == REAL_CST)
2304 	return fold_convert_const_fixed_from_real (type, arg1);
2305     }
2306   else if (TREE_CODE (type) == VECTOR_TYPE)
2307     {
2308       if (TREE_CODE (arg1) == VECTOR_CST
2309 	  && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2310 	{
2311 	  tree elttype = TREE_TYPE (type);
2312 	  tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2313 	  /* We can't handle steps directly when extending, since the
2314 	     values need to wrap at the original precision first.  */
2315 	  bool step_ok_p
2316 	    = (INTEGRAL_TYPE_P (elttype)
2317 	       && INTEGRAL_TYPE_P (arg1_elttype)
2318 	       && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2319 	  tree_vector_builder v;
2320 	  if (!v.new_unary_operation (type, arg1, step_ok_p))
2321 	    return NULL_TREE;
2322 	  unsigned int len = v.encoded_nelts ();
2323 	  for (unsigned int i = 0; i < len; ++i)
2324 	    {
2325 	      tree elt = VECTOR_CST_ELT (arg1, i);
2326 	      tree cvt = fold_convert_const (code, elttype, elt);
2327 	      if (cvt == NULL_TREE)
2328 		return NULL_TREE;
2329 	      v.quick_push (cvt);
2330 	    }
2331 	  return v.build ();
2332 	}
2333     }
2334   return NULL_TREE;
2335 }
2336 
2337 /* Construct a vector of zero elements of vector type TYPE.  */
2338 
2339 static tree
build_zero_vector(tree type)2340 build_zero_vector (tree type)
2341 {
2342   tree t;
2343 
2344   t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2345   return build_vector_from_val (type, t);
2346 }
2347 
2348 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR.  */
2349 
2350 bool
fold_convertible_p(const_tree type,const_tree arg)2351 fold_convertible_p (const_tree type, const_tree arg)
2352 {
2353   tree orig = TREE_TYPE (arg);
2354 
2355   if (type == orig)
2356     return true;
2357 
2358   if (TREE_CODE (arg) == ERROR_MARK
2359       || TREE_CODE (type) == ERROR_MARK
2360       || TREE_CODE (orig) == ERROR_MARK)
2361     return false;
2362 
2363   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2364     return true;
2365 
2366   switch (TREE_CODE (type))
2367     {
2368     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2369     case POINTER_TYPE: case REFERENCE_TYPE:
2370     case OFFSET_TYPE:
2371       return (INTEGRAL_TYPE_P (orig)
2372 	      || (POINTER_TYPE_P (orig)
2373 		  && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2374 	      || TREE_CODE (orig) == OFFSET_TYPE);
2375 
2376     case REAL_TYPE:
2377     case FIXED_POINT_TYPE:
2378     case VOID_TYPE:
2379       return TREE_CODE (type) == TREE_CODE (orig);
2380 
2381     case VECTOR_TYPE:
2382       return (VECTOR_TYPE_P (orig)
2383 	      && known_eq (TYPE_VECTOR_SUBPARTS (type),
2384 			   TYPE_VECTOR_SUBPARTS (orig))
2385 	      && fold_convertible_p (TREE_TYPE (type), TREE_TYPE (orig)));
2386 
2387     default:
2388       return false;
2389     }
2390 }
2391 
2392 /* Convert expression ARG to type TYPE.  Used by the middle-end for
2393    simple conversions in preference to calling the front-end's convert.  */
2394 
2395 tree
fold_convert_loc(location_t loc,tree type,tree arg)2396 fold_convert_loc (location_t loc, tree type, tree arg)
2397 {
2398   tree orig = TREE_TYPE (arg);
2399   tree tem;
2400 
2401   if (type == orig)
2402     return arg;
2403 
2404   if (TREE_CODE (arg) == ERROR_MARK
2405       || TREE_CODE (type) == ERROR_MARK
2406       || TREE_CODE (orig) == ERROR_MARK)
2407     return error_mark_node;
2408 
2409   switch (TREE_CODE (type))
2410     {
2411     case POINTER_TYPE:
2412     case REFERENCE_TYPE:
2413       /* Handle conversions between pointers to different address spaces.  */
2414       if (POINTER_TYPE_P (orig)
2415 	  && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2416 	      != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2417 	return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2418       /* fall through */
2419 
2420     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2421     case OFFSET_TYPE:
2422       if (TREE_CODE (arg) == INTEGER_CST)
2423 	{
2424 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2425 	  if (tem != NULL_TREE)
2426 	    return tem;
2427 	}
2428       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2429 	  || TREE_CODE (orig) == OFFSET_TYPE)
2430 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2431       if (TREE_CODE (orig) == COMPLEX_TYPE)
2432 	return fold_convert_loc (loc, type,
2433 				 fold_build1_loc (loc, REALPART_EXPR,
2434 						  TREE_TYPE (orig), arg));
2435       gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2436 		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2437       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2438 
2439     case REAL_TYPE:
2440       if (TREE_CODE (arg) == INTEGER_CST)
2441 	{
2442 	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
2443 	  if (tem != NULL_TREE)
2444 	    return tem;
2445 	}
2446       else if (TREE_CODE (arg) == REAL_CST)
2447 	{
2448 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2449 	  if (tem != NULL_TREE)
2450 	    return tem;
2451 	}
2452       else if (TREE_CODE (arg) == FIXED_CST)
2453 	{
2454 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2455 	  if (tem != NULL_TREE)
2456 	    return tem;
2457 	}
2458 
2459       switch (TREE_CODE (orig))
2460 	{
2461 	case INTEGER_TYPE:
2462 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2463 	case POINTER_TYPE: case REFERENCE_TYPE:
2464 	  return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2465 
2466 	case REAL_TYPE:
2467 	  return fold_build1_loc (loc, NOP_EXPR, type, arg);
2468 
2469 	case FIXED_POINT_TYPE:
2470 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2471 
2472 	case COMPLEX_TYPE:
2473 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2474 	  return fold_convert_loc (loc, type, tem);
2475 
2476 	default:
2477 	  gcc_unreachable ();
2478 	}
2479 
2480     case FIXED_POINT_TYPE:
2481       if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2482 	  || TREE_CODE (arg) == REAL_CST)
2483 	{
2484 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2485 	  if (tem != NULL_TREE)
2486 	    goto fold_convert_exit;
2487 	}
2488 
2489       switch (TREE_CODE (orig))
2490 	{
2491 	case FIXED_POINT_TYPE:
2492 	case INTEGER_TYPE:
2493 	case ENUMERAL_TYPE:
2494 	case BOOLEAN_TYPE:
2495 	case REAL_TYPE:
2496 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2497 
2498 	case COMPLEX_TYPE:
2499 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2500 	  return fold_convert_loc (loc, type, tem);
2501 
2502 	default:
2503 	  gcc_unreachable ();
2504 	}
2505 
2506     case COMPLEX_TYPE:
2507       switch (TREE_CODE (orig))
2508 	{
2509 	case INTEGER_TYPE:
2510 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2511 	case POINTER_TYPE: case REFERENCE_TYPE:
2512 	case REAL_TYPE:
2513 	case FIXED_POINT_TYPE:
2514 	  return fold_build2_loc (loc, COMPLEX_EXPR, type,
2515 			      fold_convert_loc (loc, TREE_TYPE (type), arg),
2516 			      fold_convert_loc (loc, TREE_TYPE (type),
2517 					    integer_zero_node));
2518 	case COMPLEX_TYPE:
2519 	  {
2520 	    tree rpart, ipart;
2521 
2522 	    if (TREE_CODE (arg) == COMPLEX_EXPR)
2523 	      {
2524 		rpart = fold_convert_loc (loc, TREE_TYPE (type),
2525 				      TREE_OPERAND (arg, 0));
2526 		ipart = fold_convert_loc (loc, TREE_TYPE (type),
2527 				      TREE_OPERAND (arg, 1));
2528 		return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2529 	      }
2530 
2531 	    arg = save_expr (arg);
2532 	    rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2533 	    ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2534 	    rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2535 	    ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2536 	    return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2537 	  }
2538 
2539 	default:
2540 	  gcc_unreachable ();
2541 	}
2542 
2543     case VECTOR_TYPE:
2544       if (integer_zerop (arg))
2545 	return build_zero_vector (type);
2546       gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2547       gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2548 		  || TREE_CODE (orig) == VECTOR_TYPE);
2549       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2550 
2551     case VOID_TYPE:
2552       tem = fold_ignored_result (arg);
2553       return fold_build1_loc (loc, NOP_EXPR, type, tem);
2554 
2555     default:
2556       if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2557 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2558       gcc_unreachable ();
2559     }
2560  fold_convert_exit:
2561   protected_set_expr_location_unshare (tem, loc);
2562   return tem;
2563 }
2564 
2565 /* Return false if expr can be assumed not to be an lvalue, true
2566    otherwise.  */
2567 
2568 static bool
maybe_lvalue_p(const_tree x)2569 maybe_lvalue_p (const_tree x)
2570 {
2571   /* We only need to wrap lvalue tree codes.  */
2572   switch (TREE_CODE (x))
2573   {
2574   case VAR_DECL:
2575   case PARM_DECL:
2576   case RESULT_DECL:
2577   case LABEL_DECL:
2578   case FUNCTION_DECL:
2579   case SSA_NAME:
2580 
2581   case COMPONENT_REF:
2582   case MEM_REF:
2583   case INDIRECT_REF:
2584   case ARRAY_REF:
2585   case ARRAY_RANGE_REF:
2586   case BIT_FIELD_REF:
2587   case OBJ_TYPE_REF:
2588 
2589   case REALPART_EXPR:
2590   case IMAGPART_EXPR:
2591   case PREINCREMENT_EXPR:
2592   case PREDECREMENT_EXPR:
2593   case SAVE_EXPR:
2594   case TRY_CATCH_EXPR:
2595   case WITH_CLEANUP_EXPR:
2596   case COMPOUND_EXPR:
2597   case MODIFY_EXPR:
2598   case TARGET_EXPR:
2599   case COND_EXPR:
2600   case BIND_EXPR:
2601   case VIEW_CONVERT_EXPR:
2602     break;
2603 
2604   default:
2605     /* Assume the worst for front-end tree codes.  */
2606     if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2607       break;
2608     return false;
2609   }
2610 
2611   return true;
2612 }
2613 
2614 /* Return an expr equal to X but certainly not valid as an lvalue.  */
2615 
2616 tree
non_lvalue_loc(location_t loc,tree x)2617 non_lvalue_loc (location_t loc, tree x)
2618 {
2619   /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2620      us.  */
2621   if (in_gimple_form)
2622     return x;
2623 
2624   if (! maybe_lvalue_p (x))
2625     return x;
2626   return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2627 }
2628 
2629 /* When pedantic, return an expr equal to X but certainly not valid as a
2630    pedantic lvalue.  Otherwise, return X.  */
2631 
2632 static tree
pedantic_non_lvalue_loc(location_t loc,tree x)2633 pedantic_non_lvalue_loc (location_t loc, tree x)
2634 {
2635   return protected_set_expr_location_unshare (x, loc);
2636 }
2637 
2638 /* Given a tree comparison code, return the code that is the logical inverse.
2639    It is generally not safe to do this for floating-point comparisons, except
2640    for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2641    ERROR_MARK in this case.  */
2642 
2643 enum tree_code
invert_tree_comparison(enum tree_code code,bool honor_nans)2644 invert_tree_comparison (enum tree_code code, bool honor_nans)
2645 {
2646   if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2647       && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2648     return ERROR_MARK;
2649 
2650   switch (code)
2651     {
2652     case EQ_EXPR:
2653       return NE_EXPR;
2654     case NE_EXPR:
2655       return EQ_EXPR;
2656     case GT_EXPR:
2657       return honor_nans ? UNLE_EXPR : LE_EXPR;
2658     case GE_EXPR:
2659       return honor_nans ? UNLT_EXPR : LT_EXPR;
2660     case LT_EXPR:
2661       return honor_nans ? UNGE_EXPR : GE_EXPR;
2662     case LE_EXPR:
2663       return honor_nans ? UNGT_EXPR : GT_EXPR;
2664     case LTGT_EXPR:
2665       return UNEQ_EXPR;
2666     case UNEQ_EXPR:
2667       return LTGT_EXPR;
2668     case UNGT_EXPR:
2669       return LE_EXPR;
2670     case UNGE_EXPR:
2671       return LT_EXPR;
2672     case UNLT_EXPR:
2673       return GE_EXPR;
2674     case UNLE_EXPR:
2675       return GT_EXPR;
2676     case ORDERED_EXPR:
2677       return UNORDERED_EXPR;
2678     case UNORDERED_EXPR:
2679       return ORDERED_EXPR;
2680     default:
2681       gcc_unreachable ();
2682     }
2683 }
2684 
2685 /* Similar, but return the comparison that results if the operands are
2686    swapped.  This is safe for floating-point.  */
2687 
2688 enum tree_code
swap_tree_comparison(enum tree_code code)2689 swap_tree_comparison (enum tree_code code)
2690 {
2691   switch (code)
2692     {
2693     case EQ_EXPR:
2694     case NE_EXPR:
2695     case ORDERED_EXPR:
2696     case UNORDERED_EXPR:
2697     case LTGT_EXPR:
2698     case UNEQ_EXPR:
2699       return code;
2700     case GT_EXPR:
2701       return LT_EXPR;
2702     case GE_EXPR:
2703       return LE_EXPR;
2704     case LT_EXPR:
2705       return GT_EXPR;
2706     case LE_EXPR:
2707       return GE_EXPR;
2708     case UNGT_EXPR:
2709       return UNLT_EXPR;
2710     case UNGE_EXPR:
2711       return UNLE_EXPR;
2712     case UNLT_EXPR:
2713       return UNGT_EXPR;
2714     case UNLE_EXPR:
2715       return UNGE_EXPR;
2716     default:
2717       gcc_unreachable ();
2718     }
2719 }
2720 
2721 
2722 /* Convert a comparison tree code from an enum tree_code representation
2723    into a compcode bit-based encoding.  This function is the inverse of
2724    compcode_to_comparison.  */
2725 
2726 static enum comparison_code
comparison_to_compcode(enum tree_code code)2727 comparison_to_compcode (enum tree_code code)
2728 {
2729   switch (code)
2730     {
2731     case LT_EXPR:
2732       return COMPCODE_LT;
2733     case EQ_EXPR:
2734       return COMPCODE_EQ;
2735     case LE_EXPR:
2736       return COMPCODE_LE;
2737     case GT_EXPR:
2738       return COMPCODE_GT;
2739     case NE_EXPR:
2740       return COMPCODE_NE;
2741     case GE_EXPR:
2742       return COMPCODE_GE;
2743     case ORDERED_EXPR:
2744       return COMPCODE_ORD;
2745     case UNORDERED_EXPR:
2746       return COMPCODE_UNORD;
2747     case UNLT_EXPR:
2748       return COMPCODE_UNLT;
2749     case UNEQ_EXPR:
2750       return COMPCODE_UNEQ;
2751     case UNLE_EXPR:
2752       return COMPCODE_UNLE;
2753     case UNGT_EXPR:
2754       return COMPCODE_UNGT;
2755     case LTGT_EXPR:
2756       return COMPCODE_LTGT;
2757     case UNGE_EXPR:
2758       return COMPCODE_UNGE;
2759     default:
2760       gcc_unreachable ();
2761     }
2762 }
2763 
2764 /* Convert a compcode bit-based encoding of a comparison operator back
2765    to GCC's enum tree_code representation.  This function is the
2766    inverse of comparison_to_compcode.  */
2767 
2768 static enum tree_code
compcode_to_comparison(enum comparison_code code)2769 compcode_to_comparison (enum comparison_code code)
2770 {
2771   switch (code)
2772     {
2773     case COMPCODE_LT:
2774       return LT_EXPR;
2775     case COMPCODE_EQ:
2776       return EQ_EXPR;
2777     case COMPCODE_LE:
2778       return LE_EXPR;
2779     case COMPCODE_GT:
2780       return GT_EXPR;
2781     case COMPCODE_NE:
2782       return NE_EXPR;
2783     case COMPCODE_GE:
2784       return GE_EXPR;
2785     case COMPCODE_ORD:
2786       return ORDERED_EXPR;
2787     case COMPCODE_UNORD:
2788       return UNORDERED_EXPR;
2789     case COMPCODE_UNLT:
2790       return UNLT_EXPR;
2791     case COMPCODE_UNEQ:
2792       return UNEQ_EXPR;
2793     case COMPCODE_UNLE:
2794       return UNLE_EXPR;
2795     case COMPCODE_UNGT:
2796       return UNGT_EXPR;
2797     case COMPCODE_LTGT:
2798       return LTGT_EXPR;
2799     case COMPCODE_UNGE:
2800       return UNGE_EXPR;
2801     default:
2802       gcc_unreachable ();
2803     }
2804 }
2805 
2806 /* Return true if COND1 tests the opposite condition of COND2.  */
2807 
2808 bool
inverse_conditions_p(const_tree cond1,const_tree cond2)2809 inverse_conditions_p (const_tree cond1, const_tree cond2)
2810 {
2811   return (COMPARISON_CLASS_P (cond1)
2812 	  && COMPARISON_CLASS_P (cond2)
2813 	  && (invert_tree_comparison
2814 	      (TREE_CODE (cond1),
2815 	       HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2816 	  && operand_equal_p (TREE_OPERAND (cond1, 0),
2817 			      TREE_OPERAND (cond2, 0), 0)
2818 	  && operand_equal_p (TREE_OPERAND (cond1, 1),
2819 			      TREE_OPERAND (cond2, 1), 0));
2820 }
2821 
2822 /* Return a tree for the comparison which is the combination of
2823    doing the AND or OR (depending on CODE) of the two operations LCODE
2824    and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2825    the possibility of trapping if the mode has NaNs, and return NULL_TREE
2826    if this makes the transformation invalid.  */
2827 
2828 tree
combine_comparisons(location_t loc,enum tree_code code,enum tree_code lcode,enum tree_code rcode,tree truth_type,tree ll_arg,tree lr_arg)2829 combine_comparisons (location_t loc,
2830 		     enum tree_code code, enum tree_code lcode,
2831 		     enum tree_code rcode, tree truth_type,
2832 		     tree ll_arg, tree lr_arg)
2833 {
2834   bool honor_nans = HONOR_NANS (ll_arg);
2835   enum comparison_code lcompcode = comparison_to_compcode (lcode);
2836   enum comparison_code rcompcode = comparison_to_compcode (rcode);
2837   int compcode;
2838 
2839   switch (code)
2840     {
2841     case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2842       compcode = lcompcode & rcompcode;
2843       break;
2844 
2845     case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2846       compcode = lcompcode | rcompcode;
2847       break;
2848 
2849     default:
2850       return NULL_TREE;
2851     }
2852 
2853   if (!honor_nans)
2854     {
2855       /* Eliminate unordered comparisons, as well as LTGT and ORD
2856 	 which are not used unless the mode has NaNs.  */
2857       compcode &= ~COMPCODE_UNORD;
2858       if (compcode == COMPCODE_LTGT)
2859 	compcode = COMPCODE_NE;
2860       else if (compcode == COMPCODE_ORD)
2861 	compcode = COMPCODE_TRUE;
2862     }
2863    else if (flag_trapping_math)
2864      {
2865 	/* Check that the original operation and the optimized ones will trap
2866 	   under the same condition.  */
2867 	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2868 		     && (lcompcode != COMPCODE_EQ)
2869 		     && (lcompcode != COMPCODE_ORD);
2870 	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2871 		     && (rcompcode != COMPCODE_EQ)
2872 		     && (rcompcode != COMPCODE_ORD);
2873 	bool trap = (compcode & COMPCODE_UNORD) == 0
2874 		    && (compcode != COMPCODE_EQ)
2875 		    && (compcode != COMPCODE_ORD);
2876 
2877         /* In a short-circuited boolean expression the LHS might be
2878 	   such that the RHS, if evaluated, will never trap.  For
2879 	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2880 	   if neither x nor y is NaN.  (This is a mixed blessing: for
2881 	   example, the expression above will never trap, hence
2882 	   optimizing it to x < y would be invalid).  */
2883         if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2884             || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2885           rtrap = false;
2886 
2887         /* If the comparison was short-circuited, and only the RHS
2888 	   trapped, we may now generate a spurious trap.  */
2889 	if (rtrap && !ltrap
2890 	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2891 	  return NULL_TREE;
2892 
2893 	/* If we changed the conditions that cause a trap, we lose.  */
2894 	if ((ltrap || rtrap) != trap)
2895 	  return NULL_TREE;
2896       }
2897 
2898   if (compcode == COMPCODE_TRUE)
2899     return constant_boolean_node (true, truth_type);
2900   else if (compcode == COMPCODE_FALSE)
2901     return constant_boolean_node (false, truth_type);
2902   else
2903     {
2904       enum tree_code tcode;
2905 
2906       tcode = compcode_to_comparison ((enum comparison_code) compcode);
2907       return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2908     }
2909 }
2910 
2911 /* Return nonzero if two operands (typically of the same tree node)
2912    are necessarily equal. FLAGS modifies behavior as follows:
2913 
2914    If OEP_ONLY_CONST is set, only return nonzero for constants.
2915    This function tests whether the operands are indistinguishable;
2916    it does not test whether they are equal using C's == operation.
2917    The distinction is important for IEEE floating point, because
2918    (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2919    (2) two NaNs may be indistinguishable, but NaN!=NaN.
2920 
2921    If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2922    even though it may hold multiple values during a function.
2923    This is because a GCC tree node guarantees that nothing else is
2924    executed between the evaluation of its "operands" (which may often
2925    be evaluated in arbitrary order).  Hence if the operands themselves
2926    don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2927    same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2928    unset means assuming isochronic (or instantaneous) tree equivalence.
2929    Unless comparing arbitrary expression trees, such as from different
2930    statements, this flag can usually be left unset.
2931 
2932    If OEP_PURE_SAME is set, then pure functions with identical arguments
2933    are considered the same.  It is used when the caller has other ways
2934    to ensure that global memory is unchanged in between.
2935 
2936    If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2937    not values of expressions.
2938 
2939    If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2940    such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2941 
2942    If OEP_BITWISE is set, then require the values to be bitwise identical
2943    rather than simply numerically equal.  Do not take advantage of things
2944    like math-related flags or undefined behavior; only return true for
2945    values that are provably bitwise identical in all circumstances.
2946 
2947    Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2948    any operand with side effect.  This is unnecesarily conservative in the
2949    case we know that arg0 and arg1 are in disjoint code paths (such as in
2950    ?: operator).  In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2951    addresses with TREE_CONSTANT flag set so we know that &var == &var
2952    even if var is volatile.  */
2953 
2954 bool
operand_equal_p(const_tree arg0,const_tree arg1,unsigned int flags)2955 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2956 				  unsigned int flags)
2957 {
2958   bool r;
2959   if (verify_hash_value (arg0, arg1, flags, &r))
2960     return r;
2961 
2962   STRIP_ANY_LOCATION_WRAPPER (arg0);
2963   STRIP_ANY_LOCATION_WRAPPER (arg1);
2964 
2965   /* If either is ERROR_MARK, they aren't equal.  */
2966   if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2967       || TREE_TYPE (arg0) == error_mark_node
2968       || TREE_TYPE (arg1) == error_mark_node)
2969     return false;
2970 
2971   /* Similar, if either does not have a type (like a template id),
2972      they aren't equal.  */
2973   if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2974     return false;
2975 
2976   /* Bitwise identity makes no sense if the values have different layouts.  */
2977   if ((flags & OEP_BITWISE)
2978       && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2979     return false;
2980 
2981   /* We cannot consider pointers to different address space equal.  */
2982   if (POINTER_TYPE_P (TREE_TYPE (arg0))
2983       && POINTER_TYPE_P (TREE_TYPE (arg1))
2984       && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2985 	  != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2986     return false;
2987 
2988   /* Check equality of integer constants before bailing out due to
2989      precision differences.  */
2990   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2991     {
2992       /* Address of INTEGER_CST is not defined; check that we did not forget
2993 	 to drop the OEP_ADDRESS_OF flags.  */
2994       gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2995       return tree_int_cst_equal (arg0, arg1);
2996     }
2997 
2998   if (!(flags & OEP_ADDRESS_OF))
2999     {
3000       /* If both types don't have the same signedness, then we can't consider
3001 	 them equal.  We must check this before the STRIP_NOPS calls
3002 	 because they may change the signedness of the arguments.  As pointers
3003 	 strictly don't have a signedness, require either two pointers or
3004 	 two non-pointers as well.  */
3005       if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3006 	  || POINTER_TYPE_P (TREE_TYPE (arg0))
3007 			     != POINTER_TYPE_P (TREE_TYPE (arg1)))
3008 	return false;
3009 
3010       /* If both types don't have the same precision, then it is not safe
3011 	 to strip NOPs.  */
3012       if (element_precision (TREE_TYPE (arg0))
3013 	  != element_precision (TREE_TYPE (arg1)))
3014 	return false;
3015 
3016       STRIP_NOPS (arg0);
3017       STRIP_NOPS (arg1);
3018     }
3019 #if 0
3020   /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3021      sanity check once the issue is solved.  */
3022   else
3023     /* Addresses of conversions and SSA_NAMEs (and many other things)
3024        are not defined.  Check that we did not forget to drop the
3025        OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags.  */
3026     gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3027 			 && TREE_CODE (arg0) != SSA_NAME);
3028 #endif
3029 
3030   /* In case both args are comparisons but with different comparison
3031      code, try to swap the comparison operands of one arg to produce
3032      a match and compare that variant.  */
3033   if (TREE_CODE (arg0) != TREE_CODE (arg1)
3034       && COMPARISON_CLASS_P (arg0)
3035       && COMPARISON_CLASS_P (arg1))
3036     {
3037       enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3038 
3039       if (TREE_CODE (arg0) == swap_code)
3040 	return operand_equal_p (TREE_OPERAND (arg0, 0),
3041 			        TREE_OPERAND (arg1, 1), flags)
3042 	       && operand_equal_p (TREE_OPERAND (arg0, 1),
3043 				   TREE_OPERAND (arg1, 0), flags);
3044     }
3045 
3046   if (TREE_CODE (arg0) != TREE_CODE (arg1))
3047     {
3048       /* NOP_EXPR and CONVERT_EXPR are considered equal.  */
3049       if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3050 	;
3051       else if (flags & OEP_ADDRESS_OF)
3052 	{
3053 	  /* If we are interested in comparing addresses ignore
3054 	     MEM_REF wrappings of the base that can appear just for
3055 	     TBAA reasons.  */
3056 	  if (TREE_CODE (arg0) == MEM_REF
3057 	      && DECL_P (arg1)
3058 	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3059 	      && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3060 	      && integer_zerop (TREE_OPERAND (arg0, 1)))
3061 	    return true;
3062 	  else if (TREE_CODE (arg1) == MEM_REF
3063 		   && DECL_P (arg0)
3064 		   && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3065 		   && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3066 		   && integer_zerop (TREE_OPERAND (arg1, 1)))
3067 	    return true;
3068 	  return false;
3069 	}
3070       else
3071 	return false;
3072     }
3073 
3074   /* When not checking adddresses, this is needed for conversions and for
3075      COMPONENT_REF.  Might as well play it safe and always test this.  */
3076   if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3077       || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3078       || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3079 	  && !(flags & OEP_ADDRESS_OF)))
3080     return false;
3081 
3082   /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3083      We don't care about side effects in that case because the SAVE_EXPR
3084      takes care of that for us. In all other cases, two expressions are
3085      equal if they have no side effects.  If we have two identical
3086      expressions with side effects that should be treated the same due
3087      to the only side effects being identical SAVE_EXPR's, that will
3088      be detected in the recursive calls below.
3089      If we are taking an invariant address of two identical objects
3090      they are necessarily equal as well.  */
3091   if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3092       && (TREE_CODE (arg0) == SAVE_EXPR
3093 	  || (flags & OEP_MATCH_SIDE_EFFECTS)
3094 	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3095     return true;
3096 
3097   /* Next handle constant cases, those for which we can return 1 even
3098      if ONLY_CONST is set.  */
3099   if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3100     switch (TREE_CODE (arg0))
3101       {
3102       case INTEGER_CST:
3103 	return tree_int_cst_equal (arg0, arg1);
3104 
3105       case FIXED_CST:
3106 	return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3107 				       TREE_FIXED_CST (arg1));
3108 
3109       case REAL_CST:
3110 	if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3111 	  return true;
3112 
3113 	if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3114 	  {
3115 	    /* If we do not distinguish between signed and unsigned zero,
3116 	       consider them equal.  */
3117 	    if (real_zerop (arg0) && real_zerop (arg1))
3118 	      return true;
3119 	  }
3120 	return false;
3121 
3122       case VECTOR_CST:
3123 	{
3124 	  if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3125 	      != VECTOR_CST_LOG2_NPATTERNS (arg1))
3126 	    return false;
3127 
3128 	  if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3129 	      != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3130 	    return false;
3131 
3132 	  unsigned int count = vector_cst_encoded_nelts (arg0);
3133 	  for (unsigned int i = 0; i < count; ++i)
3134 	    if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3135 				  VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3136 	      return false;
3137 	  return true;
3138 	}
3139 
3140       case COMPLEX_CST:
3141 	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3142 				 flags)
3143 		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3144 				    flags));
3145 
3146       case STRING_CST:
3147 	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3148 		&& ! memcmp (TREE_STRING_POINTER (arg0),
3149 			      TREE_STRING_POINTER (arg1),
3150 			      TREE_STRING_LENGTH (arg0)));
3151 
3152       case ADDR_EXPR:
3153 	gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3154 	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3155 				flags | OEP_ADDRESS_OF
3156 				| OEP_MATCH_SIDE_EFFECTS);
3157       case CONSTRUCTOR:
3158 	/* In GIMPLE empty constructors are allowed in initializers of
3159 	   aggregates.  */
3160 	return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3161       default:
3162 	break;
3163       }
3164 
3165   /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3166      two instances of undefined behavior will give identical results.  */
3167   if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3168     return false;
3169 
3170 /* Define macros to test an operand from arg0 and arg1 for equality and a
3171    variant that allows null and views null as being different from any
3172    non-null value.  In the latter case, if either is null, the both
3173    must be; otherwise, do the normal comparison.  */
3174 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
3175 				    TREE_OPERAND (arg1, N), flags)
3176 
3177 #define OP_SAME_WITH_NULL(N)				\
3178   ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
3179    ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3180 
3181   switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3182     {
3183     case tcc_unary:
3184       /* Two conversions are equal only if signedness and modes match.  */
3185       switch (TREE_CODE (arg0))
3186         {
3187 	CASE_CONVERT:
3188         case FIX_TRUNC_EXPR:
3189 	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3190 	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3191 	    return false;
3192 	  break;
3193 	default:
3194 	  break;
3195 	}
3196 
3197       return OP_SAME (0);
3198 
3199 
3200     case tcc_comparison:
3201     case tcc_binary:
3202       if (OP_SAME (0) && OP_SAME (1))
3203 	return true;
3204 
3205       /* For commutative ops, allow the other order.  */
3206       return (commutative_tree_code (TREE_CODE (arg0))
3207 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
3208 				  TREE_OPERAND (arg1, 1), flags)
3209 	      && operand_equal_p (TREE_OPERAND (arg0, 1),
3210 				  TREE_OPERAND (arg1, 0), flags));
3211 
3212     case tcc_reference:
3213       /* If either of the pointer (or reference) expressions we are
3214 	 dereferencing contain a side effect, these cannot be equal,
3215 	 but their addresses can be.  */
3216       if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3217 	  && (TREE_SIDE_EFFECTS (arg0)
3218 	      || TREE_SIDE_EFFECTS (arg1)))
3219 	return false;
3220 
3221       switch (TREE_CODE (arg0))
3222 	{
3223 	case INDIRECT_REF:
3224 	  if (!(flags & OEP_ADDRESS_OF))
3225 	    {
3226 	      if (TYPE_ALIGN (TREE_TYPE (arg0))
3227 		  != TYPE_ALIGN (TREE_TYPE (arg1)))
3228 		return false;
3229 	      /* Verify that the access types are compatible.  */
3230 	      if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3231 		  != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3232 		return false;
3233 	    }
3234 	  flags &= ~OEP_ADDRESS_OF;
3235 	  return OP_SAME (0);
3236 
3237 	case IMAGPART_EXPR:
3238 	  /* Require the same offset.  */
3239 	  if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3240 				TYPE_SIZE (TREE_TYPE (arg1)),
3241 				flags & ~OEP_ADDRESS_OF))
3242 	    return false;
3243 
3244 	/* Fallthru.  */
3245 	case REALPART_EXPR:
3246 	case VIEW_CONVERT_EXPR:
3247 	  return OP_SAME (0);
3248 
3249 	case TARGET_MEM_REF:
3250 	case MEM_REF:
3251 	  if (!(flags & OEP_ADDRESS_OF))
3252 	    {
3253 	      /* Require equal access sizes */
3254 	      if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3255 		  && (!TYPE_SIZE (TREE_TYPE (arg0))
3256 		      || !TYPE_SIZE (TREE_TYPE (arg1))
3257 		      || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3258 					   TYPE_SIZE (TREE_TYPE (arg1)),
3259 					   flags)))
3260 		return false;
3261 	      /* Verify that access happens in similar types.  */
3262 	      if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3263 		return false;
3264 	      /* Verify that accesses are TBAA compatible.  */
3265 	      if (!alias_ptr_types_compatible_p
3266 		    (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3267 		     TREE_TYPE (TREE_OPERAND (arg1, 1)))
3268 		  || (MR_DEPENDENCE_CLIQUE (arg0)
3269 		      != MR_DEPENDENCE_CLIQUE (arg1))
3270 		  || (MR_DEPENDENCE_BASE (arg0)
3271 		      != MR_DEPENDENCE_BASE (arg1)))
3272 		return false;
3273 	     /* Verify that alignment is compatible.  */
3274 	     if (TYPE_ALIGN (TREE_TYPE (arg0))
3275 		 != TYPE_ALIGN (TREE_TYPE (arg1)))
3276 		return false;
3277 	    }
3278 	  flags &= ~OEP_ADDRESS_OF;
3279 	  return (OP_SAME (0) && OP_SAME (1)
3280 		  /* TARGET_MEM_REF require equal extra operands.  */
3281 		  && (TREE_CODE (arg0) != TARGET_MEM_REF
3282 		      || (OP_SAME_WITH_NULL (2)
3283 			  && OP_SAME_WITH_NULL (3)
3284 			  && OP_SAME_WITH_NULL (4))));
3285 
3286 	case ARRAY_REF:
3287 	case ARRAY_RANGE_REF:
3288 	  if (!OP_SAME (0))
3289 	    return false;
3290 	  flags &= ~OEP_ADDRESS_OF;
3291 	  /* Compare the array index by value if it is constant first as we
3292 	     may have different types but same value here.  */
3293 	  return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3294 				       TREE_OPERAND (arg1, 1))
3295 		   || OP_SAME (1))
3296 		  && OP_SAME_WITH_NULL (2)
3297 		  && OP_SAME_WITH_NULL (3)
3298 		  /* Compare low bound and element size as with OEP_ADDRESS_OF
3299 		     we have to account for the offset of the ref.  */
3300 		  && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3301 		      == TREE_TYPE (TREE_OPERAND (arg1, 0))
3302 		      || (operand_equal_p (array_ref_low_bound
3303 					     (CONST_CAST_TREE (arg0)),
3304 					   array_ref_low_bound
3305 					     (CONST_CAST_TREE (arg1)), flags)
3306 			  && operand_equal_p (array_ref_element_size
3307 					        (CONST_CAST_TREE (arg0)),
3308 					      array_ref_element_size
3309 					        (CONST_CAST_TREE (arg1)),
3310 					      flags))));
3311 
3312 	case COMPONENT_REF:
3313 	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
3314 	     may be NULL when we're called to compare MEM_EXPRs.  */
3315 	  if (!OP_SAME_WITH_NULL (0)
3316 	      || !OP_SAME (1))
3317 	    return false;
3318 	  flags &= ~OEP_ADDRESS_OF;
3319 	  return OP_SAME_WITH_NULL (2);
3320 
3321 	case BIT_FIELD_REF:
3322 	  if (!OP_SAME (0))
3323 	    return false;
3324 	  flags &= ~OEP_ADDRESS_OF;
3325 	  return OP_SAME (1) && OP_SAME (2);
3326 
3327 	/* Virtual table call.  */
3328 	case OBJ_TYPE_REF:
3329 	  {
3330 	    if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3331 				  OBJ_TYPE_REF_EXPR (arg1), flags))
3332 	      return false;
3333 	    if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3334 		!= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3335 	      return false;
3336 	    if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3337 				  OBJ_TYPE_REF_OBJECT (arg1), flags))
3338 	      return false;
3339 	    if (!types_same_for_odr (obj_type_ref_class (arg0),
3340 				     obj_type_ref_class (arg1)))
3341 	      return false;
3342 	    return true;
3343 	  }
3344 
3345 	default:
3346 	  return false;
3347 	}
3348 
3349     case tcc_expression:
3350       switch (TREE_CODE (arg0))
3351 	{
3352 	case ADDR_EXPR:
3353 	  /* Be sure we pass right ADDRESS_OF flag.  */
3354 	  gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3355 	  return operand_equal_p (TREE_OPERAND (arg0, 0),
3356 				  TREE_OPERAND (arg1, 0),
3357 				  flags | OEP_ADDRESS_OF);
3358 
3359 	case TRUTH_NOT_EXPR:
3360 	  return OP_SAME (0);
3361 
3362 	case TRUTH_ANDIF_EXPR:
3363 	case TRUTH_ORIF_EXPR:
3364 	  return OP_SAME (0) && OP_SAME (1);
3365 
3366 	case WIDEN_MULT_PLUS_EXPR:
3367 	case WIDEN_MULT_MINUS_EXPR:
3368 	  if (!OP_SAME (2))
3369 	    return false;
3370 	  /* The multiplcation operands are commutative.  */
3371 	  /* FALLTHRU */
3372 
3373 	case TRUTH_AND_EXPR:
3374 	case TRUTH_OR_EXPR:
3375 	case TRUTH_XOR_EXPR:
3376 	  if (OP_SAME (0) && OP_SAME (1))
3377 	    return true;
3378 
3379 	  /* Otherwise take into account this is a commutative operation.  */
3380 	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
3381 				   TREE_OPERAND (arg1, 1), flags)
3382 		  && operand_equal_p (TREE_OPERAND (arg0, 1),
3383 				      TREE_OPERAND (arg1, 0), flags));
3384 
3385 	case COND_EXPR:
3386 	  if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3387 	    return false;
3388 	  flags &= ~OEP_ADDRESS_OF;
3389 	  return OP_SAME (0);
3390 
3391 	case BIT_INSERT_EXPR:
3392 	  /* BIT_INSERT_EXPR has an implict operand as the type precision
3393 	     of op1.  Need to check to make sure they are the same.  */
3394 	  if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3395 	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3396 	      && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3397 		 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3398 	    return false;
3399 	  /* FALLTHRU */
3400 
3401 	case VEC_COND_EXPR:
3402 	case DOT_PROD_EXPR:
3403 	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3404 
3405 	case MODIFY_EXPR:
3406 	case INIT_EXPR:
3407 	case COMPOUND_EXPR:
3408 	case PREDECREMENT_EXPR:
3409 	case PREINCREMENT_EXPR:
3410 	case POSTDECREMENT_EXPR:
3411 	case POSTINCREMENT_EXPR:
3412 	  if (flags & OEP_LEXICOGRAPHIC)
3413 	    return OP_SAME (0) && OP_SAME (1);
3414 	  return false;
3415 
3416 	case CLEANUP_POINT_EXPR:
3417 	case EXPR_STMT:
3418 	case SAVE_EXPR:
3419 	  if (flags & OEP_LEXICOGRAPHIC)
3420 	    return OP_SAME (0);
3421 	  return false;
3422 
3423 	default:
3424 	  return false;
3425 	}
3426 
3427     case tcc_vl_exp:
3428       switch (TREE_CODE (arg0))
3429 	{
3430 	case CALL_EXPR:
3431 	  if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3432 	      != (CALL_EXPR_FN (arg1) == NULL_TREE))
3433 	    /* If not both CALL_EXPRs are either internal or normal function
3434 	       functions, then they are not equal.  */
3435 	    return false;
3436 	  else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3437 	    {
3438 	      /* If the CALL_EXPRs call different internal functions, then they
3439 		 are not equal.  */
3440 	      if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3441 		return false;
3442 	    }
3443 	  else
3444 	    {
3445 	      /* If the CALL_EXPRs call different functions, then they are not
3446 		 equal.  */
3447 	      if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3448 				     flags))
3449 		return false;
3450 	    }
3451 
3452 	  /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS.  */
3453 	  {
3454 	    unsigned int cef = call_expr_flags (arg0);
3455 	    if (flags & OEP_PURE_SAME)
3456 	      cef &= ECF_CONST | ECF_PURE;
3457 	    else
3458 	      cef &= ECF_CONST;
3459 	    if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3460 	      return false;
3461 	  }
3462 
3463 	  /* Now see if all the arguments are the same.  */
3464 	  {
3465 	    const_call_expr_arg_iterator iter0, iter1;
3466 	    const_tree a0, a1;
3467 	    for (a0 = first_const_call_expr_arg (arg0, &iter0),
3468 		   a1 = first_const_call_expr_arg (arg1, &iter1);
3469 		 a0 && a1;
3470 		 a0 = next_const_call_expr_arg (&iter0),
3471 		   a1 = next_const_call_expr_arg (&iter1))
3472 	      if (! operand_equal_p (a0, a1, flags))
3473 		return false;
3474 
3475 	    /* If we get here and both argument lists are exhausted
3476 	       then the CALL_EXPRs are equal.  */
3477 	    return ! (a0 || a1);
3478 	  }
3479 	default:
3480 	  return false;
3481 	}
3482 
3483     case tcc_declaration:
3484       /* Consider __builtin_sqrt equal to sqrt.  */
3485       return (TREE_CODE (arg0) == FUNCTION_DECL
3486 	      && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3487 	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3488 	      && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3489 		  == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3490 
3491     case tcc_exceptional:
3492       if (TREE_CODE (arg0) == CONSTRUCTOR)
3493 	{
3494 	  if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3495 	    return false;
3496 
3497 	  /* In GIMPLE constructors are used only to build vectors from
3498 	     elements.  Individual elements in the constructor must be
3499 	     indexed in increasing order and form an initial sequence.
3500 
3501 	     We make no effort to compare constructors in generic.
3502 	     (see sem_variable::equals in ipa-icf which can do so for
3503 	      constants).  */
3504 	  if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3505 	      || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3506 	    return false;
3507 
3508 	  /* Be sure that vectors constructed have the same representation.
3509 	     We only tested element precision and modes to match.
3510 	     Vectors may be BLKmode and thus also check that the number of
3511 	     parts match.  */
3512 	  if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3513 			TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3514 	    return false;
3515 
3516 	  vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3517 	  vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3518 	  unsigned int len = vec_safe_length (v0);
3519 
3520 	  if (len != vec_safe_length (v1))
3521 	    return false;
3522 
3523 	  for (unsigned int i = 0; i < len; i++)
3524 	    {
3525 	      constructor_elt *c0 = &(*v0)[i];
3526 	      constructor_elt *c1 = &(*v1)[i];
3527 
3528 	      if (!operand_equal_p (c0->value, c1->value, flags)
3529 		  /* In GIMPLE the indexes can be either NULL or matching i.
3530 		     Double check this so we won't get false
3531 		     positives for GENERIC.  */
3532 		  || (c0->index
3533 		      && (TREE_CODE (c0->index) != INTEGER_CST
3534 			  || compare_tree_int (c0->index, i)))
3535 		  || (c1->index
3536 		      && (TREE_CODE (c1->index) != INTEGER_CST
3537 			  || compare_tree_int (c1->index, i))))
3538 		return false;
3539 	    }
3540 	  return true;
3541 	}
3542       else if (TREE_CODE (arg0) == STATEMENT_LIST
3543 	       && (flags & OEP_LEXICOGRAPHIC))
3544 	{
3545 	  /* Compare the STATEMENT_LISTs.  */
3546 	  tree_stmt_iterator tsi1, tsi2;
3547 	  tree body1 = CONST_CAST_TREE (arg0);
3548 	  tree body2 = CONST_CAST_TREE (arg1);
3549 	  for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3550 	       tsi_next (&tsi1), tsi_next (&tsi2))
3551 	    {
3552 	      /* The lists don't have the same number of statements.  */
3553 	      if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3554 		return false;
3555 	      if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3556 		return true;
3557 	      if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3558 				    flags & (OEP_LEXICOGRAPHIC
3559 					     | OEP_NO_HASH_CHECK)))
3560 		return false;
3561 	    }
3562 	}
3563       return false;
3564 
3565     case tcc_statement:
3566       switch (TREE_CODE (arg0))
3567 	{
3568 	case RETURN_EXPR:
3569 	  if (flags & OEP_LEXICOGRAPHIC)
3570 	    return OP_SAME_WITH_NULL (0);
3571 	  return false;
3572 	case DEBUG_BEGIN_STMT:
3573 	  if (flags & OEP_LEXICOGRAPHIC)
3574 	    return true;
3575 	  return false;
3576 	default:
3577 	  return false;
3578 	 }
3579 
3580     default:
3581       return false;
3582     }
3583 
3584 #undef OP_SAME
3585 #undef OP_SAME_WITH_NULL
3586 }
3587 
3588 /* Generate a hash value for an expression.  This can be used iteratively
3589    by passing a previous result as the HSTATE argument.  */
3590 
3591 void
hash_operand(const_tree t,inchash::hash & hstate,unsigned int flags)3592 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3593 			       unsigned int flags)
3594 {
3595   int i;
3596   enum tree_code code;
3597   enum tree_code_class tclass;
3598 
3599   if (t == NULL_TREE || t == error_mark_node)
3600     {
3601       hstate.merge_hash (0);
3602       return;
3603     }
3604 
3605   STRIP_ANY_LOCATION_WRAPPER (t);
3606 
3607   if (!(flags & OEP_ADDRESS_OF))
3608     STRIP_NOPS (t);
3609 
3610   code = TREE_CODE (t);
3611 
3612   switch (code)
3613     {
3614     /* Alas, constants aren't shared, so we can't rely on pointer
3615        identity.  */
3616     case VOID_CST:
3617       hstate.merge_hash (0);
3618       return;
3619     case INTEGER_CST:
3620       gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3621       for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3622 	hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3623       return;
3624     case REAL_CST:
3625       {
3626 	unsigned int val2;
3627 	if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3628 	  val2 = rvc_zero;
3629 	else
3630 	  val2 = real_hash (TREE_REAL_CST_PTR (t));
3631 	hstate.merge_hash (val2);
3632 	return;
3633       }
3634     case FIXED_CST:
3635       {
3636 	unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3637 	hstate.merge_hash (val2);
3638 	return;
3639       }
3640     case STRING_CST:
3641       hstate.add ((const void *) TREE_STRING_POINTER (t),
3642 		  TREE_STRING_LENGTH (t));
3643       return;
3644     case COMPLEX_CST:
3645       hash_operand (TREE_REALPART (t), hstate, flags);
3646       hash_operand (TREE_IMAGPART (t), hstate, flags);
3647       return;
3648     case VECTOR_CST:
3649       {
3650 	hstate.add_int (VECTOR_CST_NPATTERNS (t));
3651 	hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3652 	unsigned int count = vector_cst_encoded_nelts (t);
3653 	for (unsigned int i = 0; i < count; ++i)
3654 	  hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3655 	return;
3656       }
3657     case SSA_NAME:
3658       /* We can just compare by pointer.  */
3659       hstate.add_hwi (SSA_NAME_VERSION (t));
3660       return;
3661     case PLACEHOLDER_EXPR:
3662       /* The node itself doesn't matter.  */
3663       return;
3664     case BLOCK:
3665     case OMP_CLAUSE:
3666       /* Ignore.  */
3667       return;
3668     case TREE_LIST:
3669       /* A list of expressions, for a CALL_EXPR or as the elements of a
3670 	 VECTOR_CST.  */
3671       for (; t; t = TREE_CHAIN (t))
3672 	hash_operand (TREE_VALUE (t), hstate, flags);
3673       return;
3674     case CONSTRUCTOR:
3675       {
3676 	unsigned HOST_WIDE_INT idx;
3677 	tree field, value;
3678 	flags &= ~OEP_ADDRESS_OF;
3679 	hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3680 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3681 	  {
3682 	    /* In GIMPLE the indexes can be either NULL or matching i.  */
3683 	    if (field == NULL_TREE)
3684 	      field = bitsize_int (idx);
3685 	    hash_operand (field, hstate, flags);
3686 	    hash_operand (value, hstate, flags);
3687 	  }
3688 	return;
3689       }
3690     case STATEMENT_LIST:
3691       {
3692 	tree_stmt_iterator i;
3693 	for (i = tsi_start (CONST_CAST_TREE (t));
3694 	     !tsi_end_p (i); tsi_next (&i))
3695 	  hash_operand (tsi_stmt (i), hstate, flags);
3696 	return;
3697       }
3698     case TREE_VEC:
3699       for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3700 	hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3701       return;
3702     case IDENTIFIER_NODE:
3703       hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3704       return;
3705     case FUNCTION_DECL:
3706       /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3707 	 Otherwise nodes that compare equal according to operand_equal_p might
3708 	 get different hash codes.  However, don't do this for machine specific
3709 	 or front end builtins, since the function code is overloaded in those
3710 	 cases.  */
3711       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3712 	  && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3713 	{
3714 	  t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3715 	  code = TREE_CODE (t);
3716 	}
3717       /* FALL THROUGH */
3718     default:
3719       if (POLY_INT_CST_P (t))
3720 	{
3721 	  for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3722 	    hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3723 	  return;
3724 	}
3725       tclass = TREE_CODE_CLASS (code);
3726 
3727       if (tclass == tcc_declaration)
3728 	{
3729 	  /* DECL's have a unique ID */
3730 	  hstate.add_hwi (DECL_UID (t));
3731 	}
3732       else if (tclass == tcc_comparison && !commutative_tree_code (code))
3733 	{
3734 	  /* For comparisons that can be swapped, use the lower
3735 	     tree code.  */
3736 	  enum tree_code ccode = swap_tree_comparison (code);
3737 	  if (code < ccode)
3738 	    ccode = code;
3739 	  hstate.add_object (ccode);
3740 	  hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3741 	  hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3742 	}
3743       else if (CONVERT_EXPR_CODE_P (code))
3744 	{
3745 	  /* NOP_EXPR and CONVERT_EXPR are considered equal by
3746 	     operand_equal_p.  */
3747 	  enum tree_code ccode = NOP_EXPR;
3748 	  hstate.add_object (ccode);
3749 
3750 	  /* Don't hash the type, that can lead to having nodes which
3751 	     compare equal according to operand_equal_p, but which
3752 	     have different hash codes.  Make sure to include signedness
3753 	     in the hash computation.  */
3754 	  hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3755 	  hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3756 	}
3757       /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl.  */
3758       else if (code == MEM_REF
3759 	       && (flags & OEP_ADDRESS_OF) != 0
3760 	       && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3761 	       && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3762 	       && integer_zerop (TREE_OPERAND (t, 1)))
3763 	hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3764 		      hstate, flags);
3765       /* Don't ICE on FE specific trees, or their arguments etc.
3766 	 during operand_equal_p hash verification.  */
3767       else if (!IS_EXPR_CODE_CLASS (tclass))
3768 	gcc_assert (flags & OEP_HASH_CHECK);
3769       else
3770 	{
3771 	  unsigned int sflags = flags;
3772 
3773 	  hstate.add_object (code);
3774 
3775 	  switch (code)
3776 	    {
3777 	    case ADDR_EXPR:
3778 	      gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3779 	      flags |= OEP_ADDRESS_OF;
3780 	      sflags = flags;
3781 	      break;
3782 
3783 	    case INDIRECT_REF:
3784 	    case MEM_REF:
3785 	    case TARGET_MEM_REF:
3786 	      flags &= ~OEP_ADDRESS_OF;
3787 	      sflags = flags;
3788 	      break;
3789 
3790 	    case ARRAY_REF:
3791 	    case ARRAY_RANGE_REF:
3792 	    case COMPONENT_REF:
3793 	    case BIT_FIELD_REF:
3794 	      sflags &= ~OEP_ADDRESS_OF;
3795 	      break;
3796 
3797 	    case COND_EXPR:
3798 	      flags &= ~OEP_ADDRESS_OF;
3799 	      break;
3800 
3801 	    case WIDEN_MULT_PLUS_EXPR:
3802 	    case WIDEN_MULT_MINUS_EXPR:
3803 	      {
3804 		/* The multiplication operands are commutative.  */
3805 		inchash::hash one, two;
3806 		hash_operand (TREE_OPERAND (t, 0), one, flags);
3807 		hash_operand (TREE_OPERAND (t, 1), two, flags);
3808 		hstate.add_commutative (one, two);
3809 		hash_operand (TREE_OPERAND (t, 2), two, flags);
3810 		return;
3811 	      }
3812 
3813 	    case CALL_EXPR:
3814 	      if (CALL_EXPR_FN (t) == NULL_TREE)
3815 		hstate.add_int (CALL_EXPR_IFN (t));
3816 	      break;
3817 
3818 	    case TARGET_EXPR:
3819 	      /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3820 		 Usually different TARGET_EXPRs just should use
3821 		 different temporaries in their slots.  */
3822 	      hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3823 	      return;
3824 
3825 	    /* Virtual table call.  */
3826 	    case OBJ_TYPE_REF:
3827 	      inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3828 	      inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3829 	      inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3830 	      return;
3831 	    default:
3832 	      break;
3833 	    }
3834 
3835 	  /* Don't hash the type, that can lead to having nodes which
3836 	     compare equal according to operand_equal_p, but which
3837 	     have different hash codes.  */
3838 	  if (code == NON_LVALUE_EXPR)
3839 	    {
3840 	      /* Make sure to include signness in the hash computation.  */
3841 	      hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3842 	      hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3843 	    }
3844 
3845 	  else if (commutative_tree_code (code))
3846 	    {
3847 	      /* It's a commutative expression.  We want to hash it the same
3848 		 however it appears.  We do this by first hashing both operands
3849 		 and then rehashing based on the order of their independent
3850 		 hashes.  */
3851 	      inchash::hash one, two;
3852 	      hash_operand (TREE_OPERAND (t, 0), one, flags);
3853 	      hash_operand (TREE_OPERAND (t, 1), two, flags);
3854 	      hstate.add_commutative (one, two);
3855 	    }
3856 	  else
3857 	    for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3858 	      hash_operand (TREE_OPERAND (t, i), hstate,
3859 			    i == 0 ? flags : sflags);
3860 	}
3861       return;
3862     }
3863 }
3864 
3865 bool
verify_hash_value(const_tree arg0,const_tree arg1,unsigned int flags,bool * ret)3866 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3867 				    unsigned int flags, bool *ret)
3868 {
3869   /* When checking, verify at the outermost operand_equal_p call that
3870      if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
3871      hash value.  */
3872   if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3873     {
3874       if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3875 	{
3876 	  if (arg0 != arg1)
3877 	    {
3878 	      inchash::hash hstate0 (0), hstate1 (0);
3879 	      hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3880 	      hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3881 	      hashval_t h0 = hstate0.end ();
3882 	      hashval_t h1 = hstate1.end ();
3883 	      gcc_assert (h0 == h1);
3884 	    }
3885 	  *ret = true;
3886 	}
3887       else
3888 	*ret = false;
3889 
3890       return true;
3891     }
3892 
3893   return false;
3894 }
3895 
3896 
3897 static operand_compare default_compare_instance;
3898 
3899 /* Conveinece wrapper around operand_compare class because usually we do
3900    not need to play with the valueizer.  */
3901 
3902 bool
operand_equal_p(const_tree arg0,const_tree arg1,unsigned int flags)3903 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3904 {
3905   return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3906 }
3907 
3908 namespace inchash
3909 {
3910 
3911 /* Generate a hash value for an expression.  This can be used iteratively
3912    by passing a previous result as the HSTATE argument.
3913 
3914    This function is intended to produce the same hash for expressions which
3915    would compare equal using operand_equal_p.  */
3916 void
add_expr(const_tree t,inchash::hash & hstate,unsigned int flags)3917 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
3918 {
3919   default_compare_instance.hash_operand (t, hstate, flags);
3920 }
3921 
3922 }
3923 
3924 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3925    with a different signedness or a narrower precision.  */
3926 
3927 static bool
operand_equal_for_comparison_p(tree arg0,tree arg1)3928 operand_equal_for_comparison_p (tree arg0, tree arg1)
3929 {
3930   if (operand_equal_p (arg0, arg1, 0))
3931     return true;
3932 
3933   if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3934       || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3935     return false;
3936 
3937   /* Discard any conversions that don't change the modes of ARG0 and ARG1
3938      and see if the inner values are the same.  This removes any
3939      signedness comparison, which doesn't matter here.  */
3940   tree op0 = arg0;
3941   tree op1 = arg1;
3942   STRIP_NOPS (op0);
3943   STRIP_NOPS (op1);
3944   if (operand_equal_p (op0, op1, 0))
3945     return true;
3946 
3947   /* Discard a single widening conversion from ARG1 and see if the inner
3948      value is the same as ARG0.  */
3949   if (CONVERT_EXPR_P (arg1)
3950       && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3951       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
3952          < TYPE_PRECISION (TREE_TYPE (arg1))
3953       && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
3954     return true;
3955 
3956   return false;
3957 }
3958 
3959 /* See if ARG is an expression that is either a comparison or is performing
3960    arithmetic on comparisons.  The comparisons must only be comparing
3961    two different values, which will be stored in *CVAL1 and *CVAL2; if
3962    they are nonzero it means that some operands have already been found.
3963    No variables may be used anywhere else in the expression except in the
3964    comparisons.
3965 
3966    If this is true, return 1.  Otherwise, return zero.  */
3967 
3968 static bool
twoval_comparison_p(tree arg,tree * cval1,tree * cval2)3969 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
3970 {
3971   enum tree_code code = TREE_CODE (arg);
3972   enum tree_code_class tclass = TREE_CODE_CLASS (code);
3973 
3974   /* We can handle some of the tcc_expression cases here.  */
3975   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3976     tclass = tcc_unary;
3977   else if (tclass == tcc_expression
3978 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3979 	       || code == COMPOUND_EXPR))
3980     tclass = tcc_binary;
3981 
3982   switch (tclass)
3983     {
3984     case tcc_unary:
3985       return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
3986 
3987     case tcc_binary:
3988       return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3989 	      && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
3990 
3991     case tcc_constant:
3992       return true;
3993 
3994     case tcc_expression:
3995       if (code == COND_EXPR)
3996 	return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
3997 		&& twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
3998 		&& twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
3999       return false;
4000 
4001     case tcc_comparison:
4002       /* First see if we can handle the first operand, then the second.  For
4003 	 the second operand, we know *CVAL1 can't be zero.  It must be that
4004 	 one side of the comparison is each of the values; test for the
4005 	 case where this isn't true by failing if the two operands
4006 	 are the same.  */
4007 
4008       if (operand_equal_p (TREE_OPERAND (arg, 0),
4009 			   TREE_OPERAND (arg, 1), 0))
4010 	return false;
4011 
4012       if (*cval1 == 0)
4013 	*cval1 = TREE_OPERAND (arg, 0);
4014       else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4015 	;
4016       else if (*cval2 == 0)
4017 	*cval2 = TREE_OPERAND (arg, 0);
4018       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4019 	;
4020       else
4021 	return false;
4022 
4023       if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4024 	;
4025       else if (*cval2 == 0)
4026 	*cval2 = TREE_OPERAND (arg, 1);
4027       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4028 	;
4029       else
4030 	return false;
4031 
4032       return true;
4033 
4034     default:
4035       return false;
4036     }
4037 }
4038 
4039 /* ARG is a tree that is known to contain just arithmetic operations and
4040    comparisons.  Evaluate the operations in the tree substituting NEW0 for
4041    any occurrence of OLD0 as an operand of a comparison and likewise for
4042    NEW1 and OLD1.  */
4043 
4044 static tree
eval_subst(location_t loc,tree arg,tree old0,tree new0,tree old1,tree new1)4045 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4046 	    tree old1, tree new1)
4047 {
4048   tree type = TREE_TYPE (arg);
4049   enum tree_code code = TREE_CODE (arg);
4050   enum tree_code_class tclass = TREE_CODE_CLASS (code);
4051 
4052   /* We can handle some of the tcc_expression cases here.  */
4053   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4054     tclass = tcc_unary;
4055   else if (tclass == tcc_expression
4056 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4057     tclass = tcc_binary;
4058 
4059   switch (tclass)
4060     {
4061     case tcc_unary:
4062       return fold_build1_loc (loc, code, type,
4063 			  eval_subst (loc, TREE_OPERAND (arg, 0),
4064 				      old0, new0, old1, new1));
4065 
4066     case tcc_binary:
4067       return fold_build2_loc (loc, code, type,
4068 			  eval_subst (loc, TREE_OPERAND (arg, 0),
4069 				      old0, new0, old1, new1),
4070 			  eval_subst (loc, TREE_OPERAND (arg, 1),
4071 				      old0, new0, old1, new1));
4072 
4073     case tcc_expression:
4074       switch (code)
4075 	{
4076 	case SAVE_EXPR:
4077 	  return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4078 			     old1, new1);
4079 
4080 	case COMPOUND_EXPR:
4081 	  return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4082 			     old1, new1);
4083 
4084 	case COND_EXPR:
4085 	  return fold_build3_loc (loc, code, type,
4086 			      eval_subst (loc, TREE_OPERAND (arg, 0),
4087 					  old0, new0, old1, new1),
4088 			      eval_subst (loc, TREE_OPERAND (arg, 1),
4089 					  old0, new0, old1, new1),
4090 			      eval_subst (loc, TREE_OPERAND (arg, 2),
4091 					  old0, new0, old1, new1));
4092 	default:
4093 	  break;
4094 	}
4095       /* Fall through - ???  */
4096 
4097     case tcc_comparison:
4098       {
4099 	tree arg0 = TREE_OPERAND (arg, 0);
4100 	tree arg1 = TREE_OPERAND (arg, 1);
4101 
4102 	/* We need to check both for exact equality and tree equality.  The
4103 	   former will be true if the operand has a side-effect.  In that
4104 	   case, we know the operand occurred exactly once.  */
4105 
4106 	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4107 	  arg0 = new0;
4108 	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4109 	  arg0 = new1;
4110 
4111 	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4112 	  arg1 = new0;
4113 	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4114 	  arg1 = new1;
4115 
4116 	return fold_build2_loc (loc, code, type, arg0, arg1);
4117       }
4118 
4119     default:
4120       return arg;
4121     }
4122 }
4123 
4124 /* Return a tree for the case when the result of an expression is RESULT
4125    converted to TYPE and OMITTED was previously an operand of the expression
4126    but is now not needed (e.g., we folded OMITTED * 0).
4127 
4128    If OMITTED has side effects, we must evaluate it.  Otherwise, just do
4129    the conversion of RESULT to TYPE.  */
4130 
4131 tree
omit_one_operand_loc(location_t loc,tree type,tree result,tree omitted)4132 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4133 {
4134   tree t = fold_convert_loc (loc, type, result);
4135 
4136   /* If the resulting operand is an empty statement, just return the omitted
4137      statement casted to void. */
4138   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4139     return build1_loc (loc, NOP_EXPR, void_type_node,
4140 		       fold_ignored_result (omitted));
4141 
4142   if (TREE_SIDE_EFFECTS (omitted))
4143     return build2_loc (loc, COMPOUND_EXPR, type,
4144 		       fold_ignored_result (omitted), t);
4145 
4146   return non_lvalue_loc (loc, t);
4147 }
4148 
4149 /* Return a tree for the case when the result of an expression is RESULT
4150    converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4151    of the expression but are now not needed.
4152 
4153    If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4154    If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4155    evaluated before OMITTED2.  Otherwise, if neither has side effects,
4156    just do the conversion of RESULT to TYPE.  */
4157 
4158 tree
omit_two_operands_loc(location_t loc,tree type,tree result,tree omitted1,tree omitted2)4159 omit_two_operands_loc (location_t loc, tree type, tree result,
4160 		       tree omitted1, tree omitted2)
4161 {
4162   tree t = fold_convert_loc (loc, type, result);
4163 
4164   if (TREE_SIDE_EFFECTS (omitted2))
4165     t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4166   if (TREE_SIDE_EFFECTS (omitted1))
4167     t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4168 
4169   return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4170 }
4171 
4172 
4173 /* Return a simplified tree node for the truth-negation of ARG.  This
4174    never alters ARG itself.  We assume that ARG is an operation that
4175    returns a truth value (0 or 1).
4176 
4177    FIXME: one would think we would fold the result, but it causes
4178    problems with the dominator optimizer.  */
4179 
4180 static tree
fold_truth_not_expr(location_t loc,tree arg)4181 fold_truth_not_expr (location_t loc, tree arg)
4182 {
4183   tree type = TREE_TYPE (arg);
4184   enum tree_code code = TREE_CODE (arg);
4185   location_t loc1, loc2;
4186 
4187   /* If this is a comparison, we can simply invert it, except for
4188      floating-point non-equality comparisons, in which case we just
4189      enclose a TRUTH_NOT_EXPR around what we have.  */
4190 
4191   if (TREE_CODE_CLASS (code) == tcc_comparison)
4192     {
4193       tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4194       if (FLOAT_TYPE_P (op_type)
4195 	  && flag_trapping_math
4196 	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
4197 	  && code != NE_EXPR && code != EQ_EXPR)
4198 	return NULL_TREE;
4199 
4200       code = invert_tree_comparison (code, HONOR_NANS (op_type));
4201       if (code == ERROR_MARK)
4202 	return NULL_TREE;
4203 
4204       tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4205 			     TREE_OPERAND (arg, 1));
4206       if (TREE_NO_WARNING (arg))
4207 	TREE_NO_WARNING (ret) = 1;
4208       return ret;
4209     }
4210 
4211   switch (code)
4212     {
4213     case INTEGER_CST:
4214       return constant_boolean_node (integer_zerop (arg), type);
4215 
4216     case TRUTH_AND_EXPR:
4217       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4218       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4219       return build2_loc (loc, TRUTH_OR_EXPR, type,
4220 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4221 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4222 
4223     case TRUTH_OR_EXPR:
4224       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4225       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4226       return build2_loc (loc, TRUTH_AND_EXPR, type,
4227 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4228 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4229 
4230     case TRUTH_XOR_EXPR:
4231       /* Here we can invert either operand.  We invert the first operand
4232 	 unless the second operand is a TRUTH_NOT_EXPR in which case our
4233 	 result is the XOR of the first operand with the inside of the
4234 	 negation of the second operand.  */
4235 
4236       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4237 	return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4238 			   TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4239       else
4240 	return build2_loc (loc, TRUTH_XOR_EXPR, type,
4241 			   invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4242 			   TREE_OPERAND (arg, 1));
4243 
4244     case TRUTH_ANDIF_EXPR:
4245       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4246       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4247       return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4248 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4249 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4250 
4251     case TRUTH_ORIF_EXPR:
4252       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4253       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4254       return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4255 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4256 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4257 
4258     case TRUTH_NOT_EXPR:
4259       return TREE_OPERAND (arg, 0);
4260 
4261     case COND_EXPR:
4262       {
4263 	tree arg1 = TREE_OPERAND (arg, 1);
4264 	tree arg2 = TREE_OPERAND (arg, 2);
4265 
4266 	loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4267 	loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4268 
4269 	/* A COND_EXPR may have a throw as one operand, which
4270 	   then has void type.  Just leave void operands
4271 	   as they are.  */
4272 	return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4273 			   VOID_TYPE_P (TREE_TYPE (arg1))
4274 			   ? arg1 : invert_truthvalue_loc (loc1, arg1),
4275 			   VOID_TYPE_P (TREE_TYPE (arg2))
4276 			   ? arg2 : invert_truthvalue_loc (loc2, arg2));
4277       }
4278 
4279     case COMPOUND_EXPR:
4280       loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4281       return build2_loc (loc, COMPOUND_EXPR, type,
4282 			 TREE_OPERAND (arg, 0),
4283 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4284 
4285     case NON_LVALUE_EXPR:
4286       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4287       return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4288 
4289     CASE_CONVERT:
4290       if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4291 	return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4292 
4293       /* fall through */
4294 
4295     case FLOAT_EXPR:
4296       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4297       return build1_loc (loc, TREE_CODE (arg), type,
4298 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4299 
4300     case BIT_AND_EXPR:
4301       if (!integer_onep (TREE_OPERAND (arg, 1)))
4302 	return NULL_TREE;
4303       return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4304 
4305     case SAVE_EXPR:
4306       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4307 
4308     case CLEANUP_POINT_EXPR:
4309       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4310       return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4311 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4312 
4313     default:
4314       return NULL_TREE;
4315     }
4316 }
4317 
4318 /* Fold the truth-negation of ARG.  This never alters ARG itself.  We
4319    assume that ARG is an operation that returns a truth value (0 or 1
4320    for scalars, 0 or -1 for vectors).  Return the folded expression if
4321    folding is successful.  Otherwise, return NULL_TREE.  */
4322 
4323 static tree
fold_invert_truthvalue(location_t loc,tree arg)4324 fold_invert_truthvalue (location_t loc, tree arg)
4325 {
4326   tree type = TREE_TYPE (arg);
4327   return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4328 			      ? BIT_NOT_EXPR
4329 			      : TRUTH_NOT_EXPR,
4330 			 type, arg);
4331 }
4332 
4333 /* Return a simplified tree node for the truth-negation of ARG.  This
4334    never alters ARG itself.  We assume that ARG is an operation that
4335    returns a truth value (0 or 1 for scalars, 0 or -1 for vectors).  */
4336 
4337 tree
invert_truthvalue_loc(location_t loc,tree arg)4338 invert_truthvalue_loc (location_t loc, tree arg)
4339 {
4340   if (TREE_CODE (arg) == ERROR_MARK)
4341     return arg;
4342 
4343   tree type = TREE_TYPE (arg);
4344   return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4345 			       ? BIT_NOT_EXPR
4346 			       : TRUTH_NOT_EXPR,
4347 			  type, arg);
4348 }
4349 
4350 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4351    starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero
4352    and uses reverse storage order if REVERSEP is nonzero.  ORIG_INNER
4353    is the original memory reference used to preserve the alias set of
4354    the access.  */
4355 
4356 static tree
make_bit_field_ref(location_t loc,tree inner,tree orig_inner,tree type,HOST_WIDE_INT bitsize,poly_int64 bitpos,int unsignedp,int reversep)4357 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4358 		    HOST_WIDE_INT bitsize, poly_int64 bitpos,
4359 		    int unsignedp, int reversep)
4360 {
4361   tree result, bftype;
4362 
4363   /* Attempt not to lose the access path if possible.  */
4364   if (TREE_CODE (orig_inner) == COMPONENT_REF)
4365     {
4366       tree ninner = TREE_OPERAND (orig_inner, 0);
4367       machine_mode nmode;
4368       poly_int64 nbitsize, nbitpos;
4369       tree noffset;
4370       int nunsignedp, nreversep, nvolatilep = 0;
4371       tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4372 				       &noffset, &nmode, &nunsignedp,
4373 				       &nreversep, &nvolatilep);
4374       if (base == inner
4375 	  && noffset == NULL_TREE
4376 	  && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4377 	  && !reversep
4378 	  && !nreversep
4379 	  && !nvolatilep)
4380 	{
4381 	  inner = ninner;
4382 	  bitpos -= nbitpos;
4383 	}
4384     }
4385 
4386   alias_set_type iset = get_alias_set (orig_inner);
4387   if (iset == 0 && get_alias_set (inner) != iset)
4388     inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4389 			 build_fold_addr_expr (inner),
4390 			 build_int_cst (ptr_type_node, 0));
4391 
4392   if (known_eq (bitpos, 0) && !reversep)
4393     {
4394       tree size = TYPE_SIZE (TREE_TYPE (inner));
4395       if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4396 	   || POINTER_TYPE_P (TREE_TYPE (inner)))
4397 	  && tree_fits_shwi_p (size)
4398 	  && tree_to_shwi (size) == bitsize)
4399 	return fold_convert_loc (loc, type, inner);
4400     }
4401 
4402   bftype = type;
4403   if (TYPE_PRECISION (bftype) != bitsize
4404       || TYPE_UNSIGNED (bftype) == !unsignedp)
4405     bftype = build_nonstandard_integer_type (bitsize, 0);
4406 
4407   result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4408 		       bitsize_int (bitsize), bitsize_int (bitpos));
4409   REF_REVERSE_STORAGE_ORDER (result) = reversep;
4410 
4411   if (bftype != type)
4412     result = fold_convert_loc (loc, type, result);
4413 
4414   return result;
4415 }
4416 
4417 /* Optimize a bit-field compare.
4418 
4419    There are two cases:  First is a compare against a constant and the
4420    second is a comparison of two items where the fields are at the same
4421    bit position relative to the start of a chunk (byte, halfword, word)
4422    large enough to contain it.  In these cases we can avoid the shift
4423    implicit in bitfield extractions.
4424 
4425    For constants, we emit a compare of the shifted constant with the
4426    BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4427    compared.  For two fields at the same position, we do the ANDs with the
4428    similar mask and compare the result of the ANDs.
4429 
4430    CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4431    COMPARE_TYPE is the type of the comparison, and LHS and RHS
4432    are the left and right operands of the comparison, respectively.
4433 
4434    If the optimization described above can be done, we return the resulting
4435    tree.  Otherwise we return zero.  */
4436 
4437 static tree
optimize_bit_field_compare(location_t loc,enum tree_code code,tree compare_type,tree lhs,tree rhs)4438 optimize_bit_field_compare (location_t loc, enum tree_code code,
4439 			    tree compare_type, tree lhs, tree rhs)
4440 {
4441   poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4442   HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4443   tree type = TREE_TYPE (lhs);
4444   tree unsigned_type;
4445   int const_p = TREE_CODE (rhs) == INTEGER_CST;
4446   machine_mode lmode, rmode;
4447   scalar_int_mode nmode;
4448   int lunsignedp, runsignedp;
4449   int lreversep, rreversep;
4450   int lvolatilep = 0, rvolatilep = 0;
4451   tree linner, rinner = NULL_TREE;
4452   tree mask;
4453   tree offset;
4454 
4455   /* Get all the information about the extractions being done.  If the bit size
4456      is the same as the size of the underlying object, we aren't doing an
4457      extraction at all and so can do nothing.  We also don't want to
4458      do anything if the inner expression is a PLACEHOLDER_EXPR since we
4459      then will no longer be able to replace it.  */
4460   linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4461 				&lunsignedp, &lreversep, &lvolatilep);
4462   if (linner == lhs
4463       || !known_size_p (plbitsize)
4464       || !plbitsize.is_constant (&lbitsize)
4465       || !plbitpos.is_constant (&lbitpos)
4466       || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4467       || offset != 0
4468       || TREE_CODE (linner) == PLACEHOLDER_EXPR
4469       || lvolatilep)
4470     return 0;
4471 
4472   if (const_p)
4473     rreversep = lreversep;
4474   else
4475    {
4476      /* If this is not a constant, we can only do something if bit positions,
4477 	sizes, signedness and storage order are the same.  */
4478      rinner
4479        = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4480 			      &runsignedp, &rreversep, &rvolatilep);
4481 
4482      if (rinner == rhs
4483 	 || maybe_ne (lbitpos, rbitpos)
4484 	 || maybe_ne (lbitsize, rbitsize)
4485 	 || lunsignedp != runsignedp
4486 	 || lreversep != rreversep
4487 	 || offset != 0
4488 	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4489 	 || rvolatilep)
4490        return 0;
4491    }
4492 
4493   /* Honor the C++ memory model and mimic what RTL expansion does.  */
4494   poly_uint64 bitstart = 0;
4495   poly_uint64 bitend = 0;
4496   if (TREE_CODE (lhs) == COMPONENT_REF)
4497     {
4498       get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4499       if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4500 	return 0;
4501     }
4502 
4503   /* See if we can find a mode to refer to this field.  We should be able to,
4504      but fail if we can't.  */
4505   if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4506 		      const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4507 		      : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4508 			     TYPE_ALIGN (TREE_TYPE (rinner))),
4509 		      BITS_PER_WORD, false, &nmode))
4510     return 0;
4511 
4512   /* Set signed and unsigned types of the precision of this mode for the
4513      shifts below.  */
4514   unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4515 
4516   /* Compute the bit position and size for the new reference and our offset
4517      within it. If the new reference is the same size as the original, we
4518      won't optimize anything, so return zero.  */
4519   nbitsize = GET_MODE_BITSIZE (nmode);
4520   nbitpos = lbitpos & ~ (nbitsize - 1);
4521   lbitpos -= nbitpos;
4522   if (nbitsize == lbitsize)
4523     return 0;
4524 
4525   if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4526     lbitpos = nbitsize - lbitsize - lbitpos;
4527 
4528   /* Make the mask to be used against the extracted field.  */
4529   mask = build_int_cst_type (unsigned_type, -1);
4530   mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4531   mask = const_binop (RSHIFT_EXPR, mask,
4532 		      size_int (nbitsize - lbitsize - lbitpos));
4533 
4534   if (! const_p)
4535     {
4536       if (nbitpos < 0)
4537 	return 0;
4538 
4539       /* If not comparing with constant, just rework the comparison
4540 	 and return.  */
4541       tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4542 				    nbitsize, nbitpos, 1, lreversep);
4543       t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4544       tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4545 				    nbitsize, nbitpos, 1, rreversep);
4546       t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4547       return fold_build2_loc (loc, code, compare_type, t1, t2);
4548     }
4549 
4550   /* Otherwise, we are handling the constant case.  See if the constant is too
4551      big for the field.  Warn and return a tree for 0 (false) if so.  We do
4552      this not only for its own sake, but to avoid having to test for this
4553      error case below.  If we didn't, we might generate wrong code.
4554 
4555      For unsigned fields, the constant shifted right by the field length should
4556      be all zero.  For signed fields, the high-order bits should agree with
4557      the sign bit.  */
4558 
4559   if (lunsignedp)
4560     {
4561       if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4562 	{
4563 	  warning (0, "comparison is always %d due to width of bit-field",
4564 		   code == NE_EXPR);
4565 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4566 	}
4567     }
4568   else
4569     {
4570       wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4571       if (tem != 0 && tem != -1)
4572 	{
4573 	  warning (0, "comparison is always %d due to width of bit-field",
4574 		   code == NE_EXPR);
4575 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4576 	}
4577     }
4578 
4579   if (nbitpos < 0)
4580     return 0;
4581 
4582   /* Single-bit compares should always be against zero.  */
4583   if (lbitsize == 1 && ! integer_zerop (rhs))
4584     {
4585       code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4586       rhs = build_int_cst (type, 0);
4587     }
4588 
4589   /* Make a new bitfield reference, shift the constant over the
4590      appropriate number of bits and mask it with the computed mask
4591      (in case this was a signed field).  If we changed it, make a new one.  */
4592   lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4593 			    nbitsize, nbitpos, 1, lreversep);
4594 
4595   rhs = const_binop (BIT_AND_EXPR,
4596 		     const_binop (LSHIFT_EXPR,
4597 				  fold_convert_loc (loc, unsigned_type, rhs),
4598 				  size_int (lbitpos)),
4599 		     mask);
4600 
4601   lhs = build2_loc (loc, code, compare_type,
4602 		    build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4603   return lhs;
4604 }
4605 
4606 /* Subroutine for fold_truth_andor_1: decode a field reference.
4607 
4608    If EXP is a comparison reference, we return the innermost reference.
4609 
4610    *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4611    set to the starting bit number.
4612 
4613    If the innermost field can be completely contained in a mode-sized
4614    unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
4615 
4616    *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4617    otherwise it is not changed.
4618 
4619    *PUNSIGNEDP is set to the signedness of the field.
4620 
4621    *PREVERSEP is set to the storage order of the field.
4622 
4623    *PMASK is set to the mask used.  This is either contained in a
4624    BIT_AND_EXPR or derived from the width of the field.
4625 
4626    *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4627 
4628    Return 0 if this is not a component reference or is one that we can't
4629    do anything with.  */
4630 
4631 static tree
decode_field_reference(location_t loc,tree * exp_,HOST_WIDE_INT * pbitsize,HOST_WIDE_INT * pbitpos,machine_mode * pmode,int * punsignedp,int * preversep,int * pvolatilep,tree * pmask,tree * pand_mask)4632 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4633 			HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4634 			int *punsignedp, int *preversep, int *pvolatilep,
4635 			tree *pmask, tree *pand_mask)
4636 {
4637   tree exp = *exp_;
4638   tree outer_type = 0;
4639   tree and_mask = 0;
4640   tree mask, inner, offset;
4641   tree unsigned_type;
4642   unsigned int precision;
4643 
4644   /* All the optimizations using this function assume integer fields.
4645      There are problems with FP fields since the type_for_size call
4646      below can fail for, e.g., XFmode.  */
4647   if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4648     return NULL_TREE;
4649 
4650   /* We are interested in the bare arrangement of bits, so strip everything
4651      that doesn't affect the machine mode.  However, record the type of the
4652      outermost expression if it may matter below.  */
4653   if (CONVERT_EXPR_P (exp)
4654       || TREE_CODE (exp) == NON_LVALUE_EXPR)
4655     outer_type = TREE_TYPE (exp);
4656   STRIP_NOPS (exp);
4657 
4658   if (TREE_CODE (exp) == BIT_AND_EXPR)
4659     {
4660       and_mask = TREE_OPERAND (exp, 1);
4661       exp = TREE_OPERAND (exp, 0);
4662       STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4663       if (TREE_CODE (and_mask) != INTEGER_CST)
4664 	return NULL_TREE;
4665     }
4666 
4667   poly_int64 poly_bitsize, poly_bitpos;
4668   inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4669 			       pmode, punsignedp, preversep, pvolatilep);
4670   if ((inner == exp && and_mask == 0)
4671       || !poly_bitsize.is_constant (pbitsize)
4672       || !poly_bitpos.is_constant (pbitpos)
4673       || *pbitsize < 0
4674       || offset != 0
4675       || TREE_CODE (inner) == PLACEHOLDER_EXPR
4676       /* Reject out-of-bound accesses (PR79731).  */
4677       || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4678 	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4679 			       *pbitpos + *pbitsize) < 0))
4680     return NULL_TREE;
4681 
4682   unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4683   if (unsigned_type == NULL_TREE)
4684     return NULL_TREE;
4685 
4686   *exp_ = exp;
4687 
4688   /* If the number of bits in the reference is the same as the bitsize of
4689      the outer type, then the outer type gives the signedness. Otherwise
4690      (in case of a small bitfield) the signedness is unchanged.  */
4691   if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4692     *punsignedp = TYPE_UNSIGNED (outer_type);
4693 
4694   /* Compute the mask to access the bitfield.  */
4695   precision = TYPE_PRECISION (unsigned_type);
4696 
4697   mask = build_int_cst_type (unsigned_type, -1);
4698 
4699   mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4700   mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4701 
4702   /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
4703   if (and_mask != 0)
4704     mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4705 			fold_convert_loc (loc, unsigned_type, and_mask), mask);
4706 
4707   *pmask = mask;
4708   *pand_mask = and_mask;
4709   return inner;
4710 }
4711 
4712 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4713    bit positions and MASK is SIGNED.  */
4714 
4715 static bool
all_ones_mask_p(const_tree mask,unsigned int size)4716 all_ones_mask_p (const_tree mask, unsigned int size)
4717 {
4718   tree type = TREE_TYPE (mask);
4719   unsigned int precision = TYPE_PRECISION (type);
4720 
4721   /* If this function returns true when the type of the mask is
4722      UNSIGNED, then there will be errors.  In particular see
4723      gcc.c-torture/execute/990326-1.c.  There does not appear to be
4724      any documentation paper trail as to why this is so.  But the pre
4725      wide-int worked with that restriction and it has been preserved
4726      here.  */
4727   if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4728     return false;
4729 
4730   return wi::mask (size, false, precision) == wi::to_wide (mask);
4731 }
4732 
4733 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4734    represents the sign bit of EXP's type.  If EXP represents a sign
4735    or zero extension, also test VAL against the unextended type.
4736    The return value is the (sub)expression whose sign bit is VAL,
4737    or NULL_TREE otherwise.  */
4738 
4739 tree
sign_bit_p(tree exp,const_tree val)4740 sign_bit_p (tree exp, const_tree val)
4741 {
4742   int width;
4743   tree t;
4744 
4745   /* Tree EXP must have an integral type.  */
4746   t = TREE_TYPE (exp);
4747   if (! INTEGRAL_TYPE_P (t))
4748     return NULL_TREE;
4749 
4750   /* Tree VAL must be an integer constant.  */
4751   if (TREE_CODE (val) != INTEGER_CST
4752       || TREE_OVERFLOW (val))
4753     return NULL_TREE;
4754 
4755   width = TYPE_PRECISION (t);
4756   if (wi::only_sign_bit_p (wi::to_wide (val), width))
4757     return exp;
4758 
4759   /* Handle extension from a narrower type.  */
4760   if (TREE_CODE (exp) == NOP_EXPR
4761       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4762     return sign_bit_p (TREE_OPERAND (exp, 0), val);
4763 
4764   return NULL_TREE;
4765 }
4766 
4767 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4768    to be evaluated unconditionally.  */
4769 
4770 static bool
simple_operand_p(const_tree exp)4771 simple_operand_p (const_tree exp)
4772 {
4773   /* Strip any conversions that don't change the machine mode.  */
4774   STRIP_NOPS (exp);
4775 
4776   return (CONSTANT_CLASS_P (exp)
4777   	  || TREE_CODE (exp) == SSA_NAME
4778 	  || (DECL_P (exp)
4779 	      && ! TREE_ADDRESSABLE (exp)
4780 	      && ! TREE_THIS_VOLATILE (exp)
4781 	      && ! DECL_NONLOCAL (exp)
4782 	      /* Don't regard global variables as simple.  They may be
4783 		 allocated in ways unknown to the compiler (shared memory,
4784 		 #pragma weak, etc).  */
4785 	      && ! TREE_PUBLIC (exp)
4786 	      && ! DECL_EXTERNAL (exp)
4787 	      /* Weakrefs are not safe to be read, since they can be NULL.
4788  		 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4789 		 have DECL_WEAK flag set.  */
4790 	      && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4791 	      /* Loading a static variable is unduly expensive, but global
4792 		 registers aren't expensive.  */
4793 	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4794 }
4795 
4796 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4797    to be evaluated unconditionally.
4798    I addition to simple_operand_p, we assume that comparisons, conversions,
4799    and logic-not operations are simple, if their operands are simple, too.  */
4800 
4801 static bool
simple_operand_p_2(tree exp)4802 simple_operand_p_2 (tree exp)
4803 {
4804   enum tree_code code;
4805 
4806   if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4807     return false;
4808 
4809   while (CONVERT_EXPR_P (exp))
4810     exp = TREE_OPERAND (exp, 0);
4811 
4812   code = TREE_CODE (exp);
4813 
4814   if (TREE_CODE_CLASS (code) == tcc_comparison)
4815     return (simple_operand_p (TREE_OPERAND (exp, 0))
4816 	    && simple_operand_p (TREE_OPERAND (exp, 1)));
4817 
4818   if (code == TRUTH_NOT_EXPR)
4819       return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4820 
4821   return simple_operand_p (exp);
4822 }
4823 
4824 
4825 /* The following functions are subroutines to fold_range_test and allow it to
4826    try to change a logical combination of comparisons into a range test.
4827 
4828    For example, both
4829 	X == 2 || X == 3 || X == 4 || X == 5
4830    and
4831 	X >= 2 && X <= 5
4832    are converted to
4833 	(unsigned) (X - 2) <= 3
4834 
4835    We describe each set of comparisons as being either inside or outside
4836    a range, using a variable named like IN_P, and then describe the
4837    range with a lower and upper bound.  If one of the bounds is omitted,
4838    it represents either the highest or lowest value of the type.
4839 
4840    In the comments below, we represent a range by two numbers in brackets
4841    preceded by a "+" to designate being inside that range, or a "-" to
4842    designate being outside that range, so the condition can be inverted by
4843    flipping the prefix.  An omitted bound is represented by a "-".  For
4844    example, "- [-, 10]" means being outside the range starting at the lowest
4845    possible value and ending at 10, in other words, being greater than 10.
4846    The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4847    always false.
4848 
4849    We set up things so that the missing bounds are handled in a consistent
4850    manner so neither a missing bound nor "true" and "false" need to be
4851    handled using a special case.  */
4852 
4853 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4854    of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4855    and UPPER1_P are nonzero if the respective argument is an upper bound
4856    and zero for a lower.  TYPE, if nonzero, is the type of the result; it
4857    must be specified for a comparison.  ARG1 will be converted to ARG0's
4858    type if both are specified.  */
4859 
4860 static tree
range_binop(enum tree_code code,tree type,tree arg0,int upper0_p,tree arg1,int upper1_p)4861 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4862 	     tree arg1, int upper1_p)
4863 {
4864   tree tem;
4865   int result;
4866   int sgn0, sgn1;
4867 
4868   /* If neither arg represents infinity, do the normal operation.
4869      Else, if not a comparison, return infinity.  Else handle the special
4870      comparison rules. Note that most of the cases below won't occur, but
4871      are handled for consistency.  */
4872 
4873   if (arg0 != 0 && arg1 != 0)
4874     {
4875       tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4876 			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4877       STRIP_NOPS (tem);
4878       return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4879     }
4880 
4881   if (TREE_CODE_CLASS (code) != tcc_comparison)
4882     return 0;
4883 
4884   /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4885      for neither.  In real maths, we cannot assume open ended ranges are
4886      the same. But, this is computer arithmetic, where numbers are finite.
4887      We can therefore make the transformation of any unbounded range with
4888      the value Z, Z being greater than any representable number. This permits
4889      us to treat unbounded ranges as equal.  */
4890   sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4891   sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4892   switch (code)
4893     {
4894     case EQ_EXPR:
4895       result = sgn0 == sgn1;
4896       break;
4897     case NE_EXPR:
4898       result = sgn0 != sgn1;
4899       break;
4900     case LT_EXPR:
4901       result = sgn0 < sgn1;
4902       break;
4903     case LE_EXPR:
4904       result = sgn0 <= sgn1;
4905       break;
4906     case GT_EXPR:
4907       result = sgn0 > sgn1;
4908       break;
4909     case GE_EXPR:
4910       result = sgn0 >= sgn1;
4911       break;
4912     default:
4913       gcc_unreachable ();
4914     }
4915 
4916   return constant_boolean_node (result, type);
4917 }
4918 
4919 /* Helper routine for make_range.  Perform one step for it, return
4920    new expression if the loop should continue or NULL_TREE if it should
4921    stop.  */
4922 
4923 tree
make_range_step(location_t loc,enum tree_code code,tree arg0,tree arg1,tree exp_type,tree * p_low,tree * p_high,int * p_in_p,bool * strict_overflow_p)4924 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4925 		 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4926 		 bool *strict_overflow_p)
4927 {
4928   tree arg0_type = TREE_TYPE (arg0);
4929   tree n_low, n_high, low = *p_low, high = *p_high;
4930   int in_p = *p_in_p, n_in_p;
4931 
4932   switch (code)
4933     {
4934     case TRUTH_NOT_EXPR:
4935       /* We can only do something if the range is testing for zero.  */
4936       if (low == NULL_TREE || high == NULL_TREE
4937 	  || ! integer_zerop (low) || ! integer_zerop (high))
4938 	return NULL_TREE;
4939       *p_in_p = ! in_p;
4940       return arg0;
4941 
4942     case EQ_EXPR: case NE_EXPR:
4943     case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4944       /* We can only do something if the range is testing for zero
4945 	 and if the second operand is an integer constant.  Note that
4946 	 saying something is "in" the range we make is done by
4947 	 complementing IN_P since it will set in the initial case of
4948 	 being not equal to zero; "out" is leaving it alone.  */
4949       if (low == NULL_TREE || high == NULL_TREE
4950 	  || ! integer_zerop (low) || ! integer_zerop (high)
4951 	  || TREE_CODE (arg1) != INTEGER_CST)
4952 	return NULL_TREE;
4953 
4954       switch (code)
4955 	{
4956 	case NE_EXPR:  /* - [c, c]  */
4957 	  low = high = arg1;
4958 	  break;
4959 	case EQ_EXPR:  /* + [c, c]  */
4960 	  in_p = ! in_p, low = high = arg1;
4961 	  break;
4962 	case GT_EXPR:  /* - [-, c] */
4963 	  low = 0, high = arg1;
4964 	  break;
4965 	case GE_EXPR:  /* + [c, -] */
4966 	  in_p = ! in_p, low = arg1, high = 0;
4967 	  break;
4968 	case LT_EXPR:  /* - [c, -] */
4969 	  low = arg1, high = 0;
4970 	  break;
4971 	case LE_EXPR:  /* + [-, c] */
4972 	  in_p = ! in_p, low = 0, high = arg1;
4973 	  break;
4974 	default:
4975 	  gcc_unreachable ();
4976 	}
4977 
4978       /* If this is an unsigned comparison, we also know that EXP is
4979 	 greater than or equal to zero.  We base the range tests we make
4980 	 on that fact, so we record it here so we can parse existing
4981 	 range tests.  We test arg0_type since often the return type
4982 	 of, e.g. EQ_EXPR, is boolean.  */
4983       if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4984 	{
4985 	  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4986 			      in_p, low, high, 1,
4987 			      build_int_cst (arg0_type, 0),
4988 			      NULL_TREE))
4989 	    return NULL_TREE;
4990 
4991 	  in_p = n_in_p, low = n_low, high = n_high;
4992 
4993 	  /* If the high bound is missing, but we have a nonzero low
4994 	     bound, reverse the range so it goes from zero to the low bound
4995 	     minus 1.  */
4996 	  if (high == 0 && low && ! integer_zerop (low))
4997 	    {
4998 	      in_p = ! in_p;
4999 	      high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5000 				  build_int_cst (TREE_TYPE (low), 1), 0);
5001 	      low = build_int_cst (arg0_type, 0);
5002 	    }
5003 	}
5004 
5005       *p_low = low;
5006       *p_high = high;
5007       *p_in_p = in_p;
5008       return arg0;
5009 
5010     case NEGATE_EXPR:
5011       /* If flag_wrapv and ARG0_TYPE is signed, make sure
5012 	 low and high are non-NULL, then normalize will DTRT.  */
5013       if (!TYPE_UNSIGNED (arg0_type)
5014 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5015 	{
5016 	  if (low == NULL_TREE)
5017 	    low = TYPE_MIN_VALUE (arg0_type);
5018 	  if (high == NULL_TREE)
5019 	    high = TYPE_MAX_VALUE (arg0_type);
5020 	}
5021 
5022       /* (-x) IN [a,b] -> x in [-b, -a]  */
5023       n_low = range_binop (MINUS_EXPR, exp_type,
5024 			   build_int_cst (exp_type, 0),
5025 			   0, high, 1);
5026       n_high = range_binop (MINUS_EXPR, exp_type,
5027 			    build_int_cst (exp_type, 0),
5028 			    0, low, 0);
5029       if (n_high != 0 && TREE_OVERFLOW (n_high))
5030 	return NULL_TREE;
5031       goto normalize;
5032 
5033     case BIT_NOT_EXPR:
5034       /* ~ X -> -X - 1  */
5035       return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5036 			 build_int_cst (exp_type, 1));
5037 
5038     case PLUS_EXPR:
5039     case MINUS_EXPR:
5040       if (TREE_CODE (arg1) != INTEGER_CST)
5041 	return NULL_TREE;
5042 
5043       /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5044 	 move a constant to the other side.  */
5045       if (!TYPE_UNSIGNED (arg0_type)
5046 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5047 	return NULL_TREE;
5048 
5049       /* If EXP is signed, any overflow in the computation is undefined,
5050 	 so we don't worry about it so long as our computations on
5051 	 the bounds don't overflow.  For unsigned, overflow is defined
5052 	 and this is exactly the right thing.  */
5053       n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5054 			   arg0_type, low, 0, arg1, 0);
5055       n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5056 			    arg0_type, high, 1, arg1, 0);
5057       if ((n_low != 0 && TREE_OVERFLOW (n_low))
5058 	  || (n_high != 0 && TREE_OVERFLOW (n_high)))
5059 	return NULL_TREE;
5060 
5061       if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5062 	*strict_overflow_p = true;
5063 
5064       normalize:
5065 	/* Check for an unsigned range which has wrapped around the maximum
5066 	   value thus making n_high < n_low, and normalize it.  */
5067 	if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5068 	  {
5069 	    low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5070 			       build_int_cst (TREE_TYPE (n_high), 1), 0);
5071 	    high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5072 				build_int_cst (TREE_TYPE (n_low), 1), 0);
5073 
5074 	    /* If the range is of the form +/- [ x+1, x ], we won't
5075 	       be able to normalize it.  But then, it represents the
5076 	       whole range or the empty set, so make it
5077 	       +/- [ -, - ].  */
5078 	    if (tree_int_cst_equal (n_low, low)
5079 		&& tree_int_cst_equal (n_high, high))
5080 	      low = high = 0;
5081 	    else
5082 	      in_p = ! in_p;
5083 	  }
5084 	else
5085 	  low = n_low, high = n_high;
5086 
5087 	*p_low = low;
5088 	*p_high = high;
5089 	*p_in_p = in_p;
5090 	return arg0;
5091 
5092     CASE_CONVERT:
5093     case NON_LVALUE_EXPR:
5094       if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5095 	return NULL_TREE;
5096 
5097       if (! INTEGRAL_TYPE_P (arg0_type)
5098 	  || (low != 0 && ! int_fits_type_p (low, arg0_type))
5099 	  || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5100 	return NULL_TREE;
5101 
5102       n_low = low, n_high = high;
5103 
5104       if (n_low != 0)
5105 	n_low = fold_convert_loc (loc, arg0_type, n_low);
5106 
5107       if (n_high != 0)
5108 	n_high = fold_convert_loc (loc, arg0_type, n_high);
5109 
5110       /* If we're converting arg0 from an unsigned type, to exp,
5111 	 a signed type,  we will be doing the comparison as unsigned.
5112 	 The tests above have already verified that LOW and HIGH
5113 	 are both positive.
5114 
5115 	 So we have to ensure that we will handle large unsigned
5116 	 values the same way that the current signed bounds treat
5117 	 negative values.  */
5118 
5119       if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5120 	{
5121 	  tree high_positive;
5122 	  tree equiv_type;
5123 	  /* For fixed-point modes, we need to pass the saturating flag
5124 	     as the 2nd parameter.  */
5125 	  if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5126 	    equiv_type
5127 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5128 						TYPE_SATURATING (arg0_type));
5129 	  else
5130 	    equiv_type
5131 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5132 
5133 	  /* A range without an upper bound is, naturally, unbounded.
5134 	     Since convert would have cropped a very large value, use
5135 	     the max value for the destination type.  */
5136 	  high_positive
5137 	    = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5138 	      : TYPE_MAX_VALUE (arg0_type);
5139 
5140 	  if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5141 	    high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5142 					     fold_convert_loc (loc, arg0_type,
5143 							       high_positive),
5144 					     build_int_cst (arg0_type, 1));
5145 
5146 	  /* If the low bound is specified, "and" the range with the
5147 	     range for which the original unsigned value will be
5148 	     positive.  */
5149 	  if (low != 0)
5150 	    {
5151 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5152 				  1, fold_convert_loc (loc, arg0_type,
5153 						       integer_zero_node),
5154 				  high_positive))
5155 		return NULL_TREE;
5156 
5157 	      in_p = (n_in_p == in_p);
5158 	    }
5159 	  else
5160 	    {
5161 	      /* Otherwise, "or" the range with the range of the input
5162 		 that will be interpreted as negative.  */
5163 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5164 				  1, fold_convert_loc (loc, arg0_type,
5165 						       integer_zero_node),
5166 				  high_positive))
5167 		return NULL_TREE;
5168 
5169 	      in_p = (in_p != n_in_p);
5170 	    }
5171 	}
5172 
5173       *p_low = n_low;
5174       *p_high = n_high;
5175       *p_in_p = in_p;
5176       return arg0;
5177 
5178     default:
5179       return NULL_TREE;
5180     }
5181 }
5182 
5183 /* Given EXP, a logical expression, set the range it is testing into
5184    variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
5185    actually being tested.  *PLOW and *PHIGH will be made of the same
5186    type as the returned expression.  If EXP is not a comparison, we
5187    will most likely not be returning a useful value and range.  Set
5188    *STRICT_OVERFLOW_P to true if the return value is only valid
5189    because signed overflow is undefined; otherwise, do not change
5190    *STRICT_OVERFLOW_P.  */
5191 
5192 tree
make_range(tree exp,int * pin_p,tree * plow,tree * phigh,bool * strict_overflow_p)5193 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5194 	    bool *strict_overflow_p)
5195 {
5196   enum tree_code code;
5197   tree arg0, arg1 = NULL_TREE;
5198   tree exp_type, nexp;
5199   int in_p;
5200   tree low, high;
5201   location_t loc = EXPR_LOCATION (exp);
5202 
5203   /* Start with simply saying "EXP != 0" and then look at the code of EXP
5204      and see if we can refine the range.  Some of the cases below may not
5205      happen, but it doesn't seem worth worrying about this.  We "continue"
5206      the outer loop when we've changed something; otherwise we "break"
5207      the switch, which will "break" the while.  */
5208 
5209   in_p = 0;
5210   low = high = build_int_cst (TREE_TYPE (exp), 0);
5211 
5212   while (1)
5213     {
5214       code = TREE_CODE (exp);
5215       exp_type = TREE_TYPE (exp);
5216       arg0 = NULL_TREE;
5217 
5218       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5219 	{
5220 	  if (TREE_OPERAND_LENGTH (exp) > 0)
5221 	    arg0 = TREE_OPERAND (exp, 0);
5222 	  if (TREE_CODE_CLASS (code) == tcc_binary
5223 	      || TREE_CODE_CLASS (code) == tcc_comparison
5224 	      || (TREE_CODE_CLASS (code) == tcc_expression
5225 		  && TREE_OPERAND_LENGTH (exp) > 1))
5226 	    arg1 = TREE_OPERAND (exp, 1);
5227 	}
5228       if (arg0 == NULL_TREE)
5229 	break;
5230 
5231       nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5232 			      &high, &in_p, strict_overflow_p);
5233       if (nexp == NULL_TREE)
5234 	break;
5235       exp = nexp;
5236     }
5237 
5238   /* If EXP is a constant, we can evaluate whether this is true or false.  */
5239   if (TREE_CODE (exp) == INTEGER_CST)
5240     {
5241       in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5242 						 exp, 0, low, 0))
5243 		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
5244 						    exp, 1, high, 1)));
5245       low = high = 0;
5246       exp = 0;
5247     }
5248 
5249   *pin_p = in_p, *plow = low, *phigh = high;
5250   return exp;
5251 }
5252 
5253 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5254    a bitwise check i.e. when
5255      LOW  == 0xXX...X00...0
5256      HIGH == 0xXX...X11...1
5257    Return corresponding mask in MASK and stem in VALUE.  */
5258 
5259 static bool
maskable_range_p(const_tree low,const_tree high,tree type,tree * mask,tree * value)5260 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5261 		  tree *value)
5262 {
5263   if (TREE_CODE (low) != INTEGER_CST
5264       || TREE_CODE (high) != INTEGER_CST)
5265     return false;
5266 
5267   unsigned prec = TYPE_PRECISION (type);
5268   wide_int lo = wi::to_wide (low, prec);
5269   wide_int hi = wi::to_wide (high, prec);
5270 
5271   wide_int end_mask = lo ^ hi;
5272   if ((end_mask & (end_mask + 1)) != 0
5273       || (lo & end_mask) != 0)
5274     return false;
5275 
5276   wide_int stem_mask = ~end_mask;
5277   wide_int stem = lo & stem_mask;
5278   if (stem != (hi & stem_mask))
5279     return false;
5280 
5281   *mask = wide_int_to_tree (type, stem_mask);
5282   *value = wide_int_to_tree (type, stem);
5283 
5284   return true;
5285 }
5286 
5287 /* Helper routine for build_range_check and match.pd.  Return the type to
5288    perform the check or NULL if it shouldn't be optimized.  */
5289 
5290 tree
range_check_type(tree etype)5291 range_check_type (tree etype)
5292 {
5293   /* First make sure that arithmetics in this type is valid, then make sure
5294      that it wraps around.  */
5295   if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5296     etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5297 
5298   if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5299     {
5300       tree utype, minv, maxv;
5301 
5302       /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5303 	 for the type in question, as we rely on this here.  */
5304       utype = unsigned_type_for (etype);
5305       maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5306       maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5307 			  build_int_cst (TREE_TYPE (maxv), 1), 1);
5308       minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5309 
5310       if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5311 				      minv, 1, maxv, 1)))
5312 	etype = utype;
5313       else
5314 	return NULL_TREE;
5315     }
5316   else if (POINTER_TYPE_P (etype))
5317     etype = unsigned_type_for (etype);
5318   return etype;
5319 }
5320 
5321 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5322    type, TYPE, return an expression to test if EXP is in (or out of, depending
5323    on IN_P) the range.  Return 0 if the test couldn't be created.  */
5324 
5325 tree
build_range_check(location_t loc,tree type,tree exp,int in_p,tree low,tree high)5326 build_range_check (location_t loc, tree type, tree exp, int in_p,
5327 		   tree low, tree high)
5328 {
5329   tree etype = TREE_TYPE (exp), mask, value;
5330 
5331   /* Disable this optimization for function pointer expressions
5332      on targets that require function pointer canonicalization.  */
5333   if (targetm.have_canonicalize_funcptr_for_compare ()
5334       && POINTER_TYPE_P (etype)
5335       && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5336     return NULL_TREE;
5337 
5338   if (! in_p)
5339     {
5340       value = build_range_check (loc, type, exp, 1, low, high);
5341       if (value != 0)
5342         return invert_truthvalue_loc (loc, value);
5343 
5344       return 0;
5345     }
5346 
5347   if (low == 0 && high == 0)
5348     return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5349 
5350   if (low == 0)
5351     return fold_build2_loc (loc, LE_EXPR, type, exp,
5352 			    fold_convert_loc (loc, etype, high));
5353 
5354   if (high == 0)
5355     return fold_build2_loc (loc, GE_EXPR, type, exp,
5356 			    fold_convert_loc (loc, etype, low));
5357 
5358   if (operand_equal_p (low, high, 0))
5359     return fold_build2_loc (loc, EQ_EXPR, type, exp,
5360 			    fold_convert_loc (loc, etype, low));
5361 
5362   if (TREE_CODE (exp) == BIT_AND_EXPR
5363       && maskable_range_p (low, high, etype, &mask, &value))
5364     return fold_build2_loc (loc, EQ_EXPR, type,
5365 			    fold_build2_loc (loc, BIT_AND_EXPR, etype,
5366 					     exp, mask),
5367 			    value);
5368 
5369   if (integer_zerop (low))
5370     {
5371       if (! TYPE_UNSIGNED (etype))
5372 	{
5373 	  etype = unsigned_type_for (etype);
5374 	  high = fold_convert_loc (loc, etype, high);
5375 	  exp = fold_convert_loc (loc, etype, exp);
5376 	}
5377       return build_range_check (loc, type, exp, 1, 0, high);
5378     }
5379 
5380   /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
5381   if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5382     {
5383       int prec = TYPE_PRECISION (etype);
5384 
5385       if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5386 	{
5387 	  if (TYPE_UNSIGNED (etype))
5388 	    {
5389 	      tree signed_etype = signed_type_for (etype);
5390 	      if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5391 		etype
5392 		  = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5393 	      else
5394 		etype = signed_etype;
5395 	      exp = fold_convert_loc (loc, etype, exp);
5396 	    }
5397 	  return fold_build2_loc (loc, GT_EXPR, type, exp,
5398 				  build_int_cst (etype, 0));
5399 	}
5400     }
5401 
5402   /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5403      This requires wrap-around arithmetics for the type of the expression.  */
5404   etype = range_check_type (etype);
5405   if (etype == NULL_TREE)
5406     return NULL_TREE;
5407 
5408   high = fold_convert_loc (loc, etype, high);
5409   low = fold_convert_loc (loc, etype, low);
5410   exp = fold_convert_loc (loc, etype, exp);
5411 
5412   value = const_binop (MINUS_EXPR, high, low);
5413 
5414   if (value != 0 && !TREE_OVERFLOW (value))
5415     return build_range_check (loc, type,
5416 			      fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5417 			      1, build_int_cst (etype, 0), value);
5418 
5419   return 0;
5420 }
5421 
5422 /* Return the predecessor of VAL in its type, handling the infinite case.  */
5423 
5424 static tree
range_predecessor(tree val)5425 range_predecessor (tree val)
5426 {
5427   tree type = TREE_TYPE (val);
5428 
5429   if (INTEGRAL_TYPE_P (type)
5430       && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5431     return 0;
5432   else
5433     return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5434 			build_int_cst (TREE_TYPE (val), 1), 0);
5435 }
5436 
5437 /* Return the successor of VAL in its type, handling the infinite case.  */
5438 
5439 static tree
range_successor(tree val)5440 range_successor (tree val)
5441 {
5442   tree type = TREE_TYPE (val);
5443 
5444   if (INTEGRAL_TYPE_P (type)
5445       && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5446     return 0;
5447   else
5448     return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5449 			build_int_cst (TREE_TYPE (val), 1), 0);
5450 }
5451 
5452 /* Given two ranges, see if we can merge them into one.  Return 1 if we
5453    can, 0 if we can't.  Set the output range into the specified parameters.  */
5454 
5455 bool
merge_ranges(int * pin_p,tree * plow,tree * phigh,int in0_p,tree low0,tree high0,int in1_p,tree low1,tree high1)5456 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5457 	      tree high0, int in1_p, tree low1, tree high1)
5458 {
5459   int no_overlap;
5460   int subset;
5461   int temp;
5462   tree tem;
5463   int in_p;
5464   tree low, high;
5465   int lowequal = ((low0 == 0 && low1 == 0)
5466 		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5467 						low0, 0, low1, 0)));
5468   int highequal = ((high0 == 0 && high1 == 0)
5469 		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5470 						 high0, 1, high1, 1)));
5471 
5472   /* Make range 0 be the range that starts first, or ends last if they
5473      start at the same value.  Swap them if it isn't.  */
5474   if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5475 				 low0, 0, low1, 0))
5476       || (lowequal
5477 	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
5478 					high1, 1, high0, 1))))
5479     {
5480       temp = in0_p, in0_p = in1_p, in1_p = temp;
5481       tem = low0, low0 = low1, low1 = tem;
5482       tem = high0, high0 = high1, high1 = tem;
5483     }
5484 
5485   /* If the second range is != high1 where high1 is the type maximum of
5486      the type, try first merging with < high1 range.  */
5487   if (low1
5488       && high1
5489       && TREE_CODE (low1) == INTEGER_CST
5490       && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5491 	  || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5492 	      && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5493 			   GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5494       && operand_equal_p (low1, high1, 0))
5495     {
5496       if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5497 	  && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5498 			   !in1_p, NULL_TREE, range_predecessor (low1)))
5499 	return true;
5500       /* Similarly for the second range != low1 where low1 is the type minimum
5501 	 of the type, try first merging with > low1 range.  */
5502       if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5503 	  && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5504 			   !in1_p, range_successor (low1), NULL_TREE))
5505 	return true;
5506     }
5507 
5508   /* Now flag two cases, whether the ranges are disjoint or whether the
5509      second range is totally subsumed in the first.  Note that the tests
5510      below are simplified by the ones above.  */
5511   no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5512 					  high0, 1, low1, 0));
5513   subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5514 				      high1, 1, high0, 1));
5515 
5516   /* We now have four cases, depending on whether we are including or
5517      excluding the two ranges.  */
5518   if (in0_p && in1_p)
5519     {
5520       /* If they don't overlap, the result is false.  If the second range
5521 	 is a subset it is the result.  Otherwise, the range is from the start
5522 	 of the second to the end of the first.  */
5523       if (no_overlap)
5524 	in_p = 0, low = high = 0;
5525       else if (subset)
5526 	in_p = 1, low = low1, high = high1;
5527       else
5528 	in_p = 1, low = low1, high = high0;
5529     }
5530 
5531   else if (in0_p && ! in1_p)
5532     {
5533       /* If they don't overlap, the result is the first range.  If they are
5534 	 equal, the result is false.  If the second range is a subset of the
5535 	 first, and the ranges begin at the same place, we go from just after
5536 	 the end of the second range to the end of the first.  If the second
5537 	 range is not a subset of the first, or if it is a subset and both
5538 	 ranges end at the same place, the range starts at the start of the
5539 	 first range and ends just before the second range.
5540 	 Otherwise, we can't describe this as a single range.  */
5541       if (no_overlap)
5542 	in_p = 1, low = low0, high = high0;
5543       else if (lowequal && highequal)
5544 	in_p = 0, low = high = 0;
5545       else if (subset && lowequal)
5546 	{
5547 	  low = range_successor (high1);
5548 	  high = high0;
5549 	  in_p = 1;
5550 	  if (low == 0)
5551 	    {
5552 	      /* We are in the weird situation where high0 > high1 but
5553 		 high1 has no successor.  Punt.  */
5554 	      return 0;
5555 	    }
5556 	}
5557       else if (! subset || highequal)
5558 	{
5559 	  low = low0;
5560 	  high = range_predecessor (low1);
5561 	  in_p = 1;
5562 	  if (high == 0)
5563 	    {
5564 	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
5565 	      return 0;
5566 	    }
5567 	}
5568       else
5569 	return 0;
5570     }
5571 
5572   else if (! in0_p && in1_p)
5573     {
5574       /* If they don't overlap, the result is the second range.  If the second
5575 	 is a subset of the first, the result is false.  Otherwise,
5576 	 the range starts just after the first range and ends at the
5577 	 end of the second.  */
5578       if (no_overlap)
5579 	in_p = 1, low = low1, high = high1;
5580       else if (subset || highequal)
5581 	in_p = 0, low = high = 0;
5582       else
5583 	{
5584 	  low = range_successor (high0);
5585 	  high = high1;
5586 	  in_p = 1;
5587 	  if (low == 0)
5588 	    {
5589 	      /* high1 > high0 but high0 has no successor.  Punt.  */
5590 	      return 0;
5591 	    }
5592 	}
5593     }
5594 
5595   else
5596     {
5597       /* The case where we are excluding both ranges.  Here the complex case
5598 	 is if they don't overlap.  In that case, the only time we have a
5599 	 range is if they are adjacent.  If the second is a subset of the
5600 	 first, the result is the first.  Otherwise, the range to exclude
5601 	 starts at the beginning of the first range and ends at the end of the
5602 	 second.  */
5603       if (no_overlap)
5604 	{
5605 	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5606 					 range_successor (high0),
5607 					 1, low1, 0)))
5608 	    in_p = 0, low = low0, high = high1;
5609 	  else
5610 	    {
5611 	      /* Canonicalize - [min, x] into - [-, x].  */
5612 	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
5613 		switch (TREE_CODE (TREE_TYPE (low0)))
5614 		  {
5615 		  case ENUMERAL_TYPE:
5616 		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5617 				  GET_MODE_BITSIZE
5618 				    (TYPE_MODE (TREE_TYPE (low0)))))
5619 		      break;
5620 		    /* FALLTHROUGH */
5621 		  case INTEGER_TYPE:
5622 		    if (tree_int_cst_equal (low0,
5623 					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
5624 		      low0 = 0;
5625 		    break;
5626 		  case POINTER_TYPE:
5627 		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
5628 			&& integer_zerop (low0))
5629 		      low0 = 0;
5630 		    break;
5631 		  default:
5632 		    break;
5633 		  }
5634 
5635 	      /* Canonicalize - [x, max] into - [x, -].  */
5636 	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
5637 		switch (TREE_CODE (TREE_TYPE (high1)))
5638 		  {
5639 		  case ENUMERAL_TYPE:
5640 		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5641 				  GET_MODE_BITSIZE
5642 				    (TYPE_MODE (TREE_TYPE (high1)))))
5643 		      break;
5644 		    /* FALLTHROUGH */
5645 		  case INTEGER_TYPE:
5646 		    if (tree_int_cst_equal (high1,
5647 					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
5648 		      high1 = 0;
5649 		    break;
5650 		  case POINTER_TYPE:
5651 		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
5652 			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5653 						       high1, 1,
5654 						       build_int_cst (TREE_TYPE (high1), 1),
5655 						       1)))
5656 		      high1 = 0;
5657 		    break;
5658 		  default:
5659 		    break;
5660 		  }
5661 
5662 	      /* The ranges might be also adjacent between the maximum and
5663 	         minimum values of the given type.  For
5664 	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5665 	         return + [x + 1, y - 1].  */
5666 	      if (low0 == 0 && high1 == 0)
5667 	        {
5668 		  low = range_successor (high0);
5669 		  high = range_predecessor (low1);
5670 		  if (low == 0 || high == 0)
5671 		    return 0;
5672 
5673 		  in_p = 1;
5674 		}
5675 	      else
5676 		return 0;
5677 	    }
5678 	}
5679       else if (subset)
5680 	in_p = 0, low = low0, high = high0;
5681       else
5682 	in_p = 0, low = low0, high = high1;
5683     }
5684 
5685   *pin_p = in_p, *plow = low, *phigh = high;
5686   return 1;
5687 }
5688 
5689 
5690 /* Subroutine of fold, looking inside expressions of the form
5691    A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5692    of the COND_EXPR.  This function is being used also to optimize
5693    A op B ? C : A, by reversing the comparison first.
5694 
5695    Return a folded expression whose code is not a COND_EXPR
5696    anymore, or NULL_TREE if no folding opportunity is found.  */
5697 
5698 static tree
fold_cond_expr_with_comparison(location_t loc,tree type,tree arg0,tree arg1,tree arg2)5699 fold_cond_expr_with_comparison (location_t loc, tree type,
5700 				tree arg0, tree arg1, tree arg2)
5701 {
5702   enum tree_code comp_code = TREE_CODE (arg0);
5703   tree arg00 = TREE_OPERAND (arg0, 0);
5704   tree arg01 = TREE_OPERAND (arg0, 1);
5705   tree arg1_type = TREE_TYPE (arg1);
5706   tree tem;
5707 
5708   STRIP_NOPS (arg1);
5709   STRIP_NOPS (arg2);
5710 
5711   /* If we have A op 0 ? A : -A, consider applying the following
5712      transformations:
5713 
5714      A == 0? A : -A    same as -A
5715      A != 0? A : -A    same as A
5716      A >= 0? A : -A    same as abs (A)
5717      A > 0?  A : -A    same as abs (A)
5718      A <= 0? A : -A    same as -abs (A)
5719      A < 0?  A : -A    same as -abs (A)
5720 
5721      None of these transformations work for modes with signed
5722      zeros.  If A is +/-0, the first two transformations will
5723      change the sign of the result (from +0 to -0, or vice
5724      versa).  The last four will fix the sign of the result,
5725      even though the original expressions could be positive or
5726      negative, depending on the sign of A.
5727 
5728      Note that all these transformations are correct if A is
5729      NaN, since the two alternatives (A and -A) are also NaNs.  */
5730   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5731       && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5732 	  ? real_zerop (arg01)
5733 	  : integer_zerop (arg01))
5734       && ((TREE_CODE (arg2) == NEGATE_EXPR
5735 	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5736 	     /* In the case that A is of the form X-Y, '-A' (arg2) may
5737 	        have already been folded to Y-X, check for that. */
5738 	  || (TREE_CODE (arg1) == MINUS_EXPR
5739 	      && TREE_CODE (arg2) == MINUS_EXPR
5740 	      && operand_equal_p (TREE_OPERAND (arg1, 0),
5741 				  TREE_OPERAND (arg2, 1), 0)
5742 	      && operand_equal_p (TREE_OPERAND (arg1, 1),
5743 				  TREE_OPERAND (arg2, 0), 0))))
5744     switch (comp_code)
5745       {
5746       case EQ_EXPR:
5747       case UNEQ_EXPR:
5748 	tem = fold_convert_loc (loc, arg1_type, arg1);
5749 	return fold_convert_loc (loc, type, negate_expr (tem));
5750       case NE_EXPR:
5751       case LTGT_EXPR:
5752 	return fold_convert_loc (loc, type, arg1);
5753       case UNGE_EXPR:
5754       case UNGT_EXPR:
5755 	if (flag_trapping_math)
5756 	  break;
5757 	/* Fall through.  */
5758       case GE_EXPR:
5759       case GT_EXPR:
5760 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5761 	  break;
5762 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5763 	return fold_convert_loc (loc, type, tem);
5764       case UNLE_EXPR:
5765       case UNLT_EXPR:
5766 	if (flag_trapping_math)
5767 	  break;
5768 	/* FALLTHRU */
5769       case LE_EXPR:
5770       case LT_EXPR:
5771 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5772 	  break;
5773 	if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5774 	    && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5775 	  {
5776 	    /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5777 	       is not, invokes UB both in abs and in the negation of it.
5778 	       So, use ABSU_EXPR instead.  */
5779 	    tree utype = unsigned_type_for (TREE_TYPE (arg1));
5780 	    tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5781 	    tem = negate_expr (tem);
5782 	    return fold_convert_loc (loc, type, tem);
5783 	  }
5784 	else
5785 	  {
5786 	    tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5787 	    return negate_expr (fold_convert_loc (loc, type, tem));
5788 	  }
5789       default:
5790 	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5791 	break;
5792       }
5793 
5794   /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
5795      A == 0 ? A : 0 is always 0 unless A is -0.  Note that
5796      both transformations are correct when A is NaN: A != 0
5797      is then true, and A == 0 is false.  */
5798 
5799   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5800       && integer_zerop (arg01) && integer_zerop (arg2))
5801     {
5802       if (comp_code == NE_EXPR)
5803 	return fold_convert_loc (loc, type, arg1);
5804       else if (comp_code == EQ_EXPR)
5805 	return build_zero_cst (type);
5806     }
5807 
5808   /* Try some transformations of A op B ? A : B.
5809 
5810      A == B? A : B    same as B
5811      A != B? A : B    same as A
5812      A >= B? A : B    same as max (A, B)
5813      A > B?  A : B    same as max (B, A)
5814      A <= B? A : B    same as min (A, B)
5815      A < B?  A : B    same as min (B, A)
5816 
5817      As above, these transformations don't work in the presence
5818      of signed zeros.  For example, if A and B are zeros of
5819      opposite sign, the first two transformations will change
5820      the sign of the result.  In the last four, the original
5821      expressions give different results for (A=+0, B=-0) and
5822      (A=-0, B=+0), but the transformed expressions do not.
5823 
5824      The first two transformations are correct if either A or B
5825      is a NaN.  In the first transformation, the condition will
5826      be false, and B will indeed be chosen.  In the case of the
5827      second transformation, the condition A != B will be true,
5828      and A will be chosen.
5829 
5830      The conversions to max() and min() are not correct if B is
5831      a number and A is not.  The conditions in the original
5832      expressions will be false, so all four give B.  The min()
5833      and max() versions would give a NaN instead.  */
5834   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5835       && operand_equal_for_comparison_p (arg01, arg2)
5836       /* Avoid these transformations if the COND_EXPR may be used
5837 	 as an lvalue in the C++ front-end.  PR c++/19199.  */
5838       && (in_gimple_form
5839 	  || VECTOR_TYPE_P (type)
5840 	  || (! lang_GNU_CXX ()
5841 	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5842 	  || ! maybe_lvalue_p (arg1)
5843 	  || ! maybe_lvalue_p (arg2)))
5844     {
5845       tree comp_op0 = arg00;
5846       tree comp_op1 = arg01;
5847       tree comp_type = TREE_TYPE (comp_op0);
5848 
5849       switch (comp_code)
5850 	{
5851 	case EQ_EXPR:
5852 	  return fold_convert_loc (loc, type, arg2);
5853 	case NE_EXPR:
5854 	  return fold_convert_loc (loc, type, arg1);
5855 	case LE_EXPR:
5856 	case LT_EXPR:
5857 	case UNLE_EXPR:
5858 	case UNLT_EXPR:
5859 	  /* In C++ a ?: expression can be an lvalue, so put the
5860 	     operand which will be used if they are equal first
5861 	     so that we can convert this back to the
5862 	     corresponding COND_EXPR.  */
5863 	  if (!HONOR_NANS (arg1))
5864 	    {
5865 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5866 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5867 	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5868 		    ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5869 		    : fold_build2_loc (loc, MIN_EXPR, comp_type,
5870 				   comp_op1, comp_op0);
5871 	      return fold_convert_loc (loc, type, tem);
5872 	    }
5873 	  break;
5874 	case GE_EXPR:
5875 	case GT_EXPR:
5876 	case UNGE_EXPR:
5877 	case UNGT_EXPR:
5878 	  if (!HONOR_NANS (arg1))
5879 	    {
5880 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5881 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5882 	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5883 		    ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5884 		    : fold_build2_loc (loc, MAX_EXPR, comp_type,
5885 				   comp_op1, comp_op0);
5886 	      return fold_convert_loc (loc, type, tem);
5887 	    }
5888 	  break;
5889 	case UNEQ_EXPR:
5890 	  if (!HONOR_NANS (arg1))
5891 	    return fold_convert_loc (loc, type, arg2);
5892 	  break;
5893 	case LTGT_EXPR:
5894 	  if (!HONOR_NANS (arg1))
5895 	    return fold_convert_loc (loc, type, arg1);
5896 	  break;
5897 	default:
5898 	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5899 	  break;
5900 	}
5901     }
5902 
5903   return NULL_TREE;
5904 }
5905 
5906 
5907 
5908 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5909 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5910   (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5911 		false) >= 2)
5912 #endif
5913 
5914 /* EXP is some logical combination of boolean tests.  See if we can
5915    merge it into some range test.  Return the new tree if so.  */
5916 
5917 static tree
fold_range_test(location_t loc,enum tree_code code,tree type,tree op0,tree op1)5918 fold_range_test (location_t loc, enum tree_code code, tree type,
5919 		 tree op0, tree op1)
5920 {
5921   int or_op = (code == TRUTH_ORIF_EXPR
5922 	       || code == TRUTH_OR_EXPR);
5923   int in0_p, in1_p, in_p;
5924   tree low0, low1, low, high0, high1, high;
5925   bool strict_overflow_p = false;
5926   tree tem, lhs, rhs;
5927   const char * const warnmsg = G_("assuming signed overflow does not occur "
5928 				  "when simplifying range test");
5929 
5930   if (!INTEGRAL_TYPE_P (type))
5931     return 0;
5932 
5933   lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5934   rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5935 
5936   /* If this is an OR operation, invert both sides; we will invert
5937      again at the end.  */
5938   if (or_op)
5939     in0_p = ! in0_p, in1_p = ! in1_p;
5940 
5941   /* If both expressions are the same, if we can merge the ranges, and we
5942      can build the range test, return it or it inverted.  If one of the
5943      ranges is always true or always false, consider it to be the same
5944      expression as the other.  */
5945   if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5946       && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5947 		       in1_p, low1, high1)
5948       && (tem = (build_range_check (loc, type,
5949 				    lhs != 0 ? lhs
5950 				    : rhs != 0 ? rhs : integer_zero_node,
5951 				    in_p, low, high))) != 0)
5952     {
5953       if (strict_overflow_p)
5954 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5955       return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5956     }
5957 
5958   /* On machines where the branch cost is expensive, if this is a
5959      short-circuited branch and the underlying object on both sides
5960      is the same, make a non-short-circuit operation.  */
5961   bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
5962   if (param_logical_op_non_short_circuit != -1)
5963     logical_op_non_short_circuit
5964       = param_logical_op_non_short_circuit;
5965   if (logical_op_non_short_circuit
5966       && !flag_sanitize_coverage
5967       && lhs != 0 && rhs != 0
5968       && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5969       && operand_equal_p (lhs, rhs, 0))
5970     {
5971       /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
5972 	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5973 	 which cases we can't do this.  */
5974       if (simple_operand_p (lhs))
5975 	return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5976 			   ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5977 			   type, op0, op1);
5978 
5979       else if (!lang_hooks.decls.global_bindings_p ()
5980 	       && !CONTAINS_PLACEHOLDER_P (lhs))
5981 	{
5982 	  tree common = save_expr (lhs);
5983 
5984 	  if ((lhs = build_range_check (loc, type, common,
5985 					or_op ? ! in0_p : in0_p,
5986 					low0, high0)) != 0
5987 	      && (rhs = build_range_check (loc, type, common,
5988 					   or_op ? ! in1_p : in1_p,
5989 					   low1, high1)) != 0)
5990 	    {
5991 	      if (strict_overflow_p)
5992 		fold_overflow_warning (warnmsg,
5993 				       WARN_STRICT_OVERFLOW_COMPARISON);
5994 	      return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5995 				 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5996 				 type, lhs, rhs);
5997 	    }
5998 	}
5999     }
6000 
6001   return 0;
6002 }
6003 
6004 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6005    bit value.  Arrange things so the extra bits will be set to zero if and
6006    only if C is signed-extended to its full width.  If MASK is nonzero,
6007    it is an INTEGER_CST that should be AND'ed with the extra bits.  */
6008 
6009 static tree
unextend(tree c,int p,int unsignedp,tree mask)6010 unextend (tree c, int p, int unsignedp, tree mask)
6011 {
6012   tree type = TREE_TYPE (c);
6013   int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6014   tree temp;
6015 
6016   if (p == modesize || unsignedp)
6017     return c;
6018 
6019   /* We work by getting just the sign bit into the low-order bit, then
6020      into the high-order bit, then sign-extend.  We then XOR that value
6021      with C.  */
6022   temp = build_int_cst (TREE_TYPE (c),
6023 			wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6024 
6025   /* We must use a signed type in order to get an arithmetic right shift.
6026      However, we must also avoid introducing accidental overflows, so that
6027      a subsequent call to integer_zerop will work.  Hence we must
6028      do the type conversion here.  At this point, the constant is either
6029      zero or one, and the conversion to a signed type can never overflow.
6030      We could get an overflow if this conversion is done anywhere else.  */
6031   if (TYPE_UNSIGNED (type))
6032     temp = fold_convert (signed_type_for (type), temp);
6033 
6034   temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6035   temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6036   if (mask != 0)
6037     temp = const_binop (BIT_AND_EXPR, temp,
6038 			fold_convert (TREE_TYPE (c), mask));
6039   /* If necessary, convert the type back to match the type of C.  */
6040   if (TYPE_UNSIGNED (type))
6041     temp = fold_convert (type, temp);
6042 
6043   return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6044 }
6045 
6046 /* For an expression that has the form
6047      (A && B) || ~B
6048    or
6049      (A || B) && ~B,
6050    we can drop one of the inner expressions and simplify to
6051      A || ~B
6052    or
6053      A && ~B
6054    LOC is the location of the resulting expression.  OP is the inner
6055    logical operation; the left-hand side in the examples above, while CMPOP
6056    is the right-hand side.  RHS_ONLY is used to prevent us from accidentally
6057    removing a condition that guards another, as in
6058      (A != NULL && A->...) || A == NULL
6059    which we must not transform.  If RHS_ONLY is true, only eliminate the
6060    right-most operand of the inner logical operation.  */
6061 
6062 static tree
merge_truthop_with_opposite_arm(location_t loc,tree op,tree cmpop,bool rhs_only)6063 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6064 				 bool rhs_only)
6065 {
6066   tree type = TREE_TYPE (cmpop);
6067   enum tree_code code = TREE_CODE (cmpop);
6068   enum tree_code truthop_code = TREE_CODE (op);
6069   tree lhs = TREE_OPERAND (op, 0);
6070   tree rhs = TREE_OPERAND (op, 1);
6071   tree orig_lhs = lhs, orig_rhs = rhs;
6072   enum tree_code rhs_code = TREE_CODE (rhs);
6073   enum tree_code lhs_code = TREE_CODE (lhs);
6074   enum tree_code inv_code;
6075 
6076   if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6077     return NULL_TREE;
6078 
6079   if (TREE_CODE_CLASS (code) != tcc_comparison)
6080     return NULL_TREE;
6081 
6082   if (rhs_code == truthop_code)
6083     {
6084       tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6085       if (newrhs != NULL_TREE)
6086 	{
6087 	  rhs = newrhs;
6088 	  rhs_code = TREE_CODE (rhs);
6089 	}
6090     }
6091   if (lhs_code == truthop_code && !rhs_only)
6092     {
6093       tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6094       if (newlhs != NULL_TREE)
6095 	{
6096 	  lhs = newlhs;
6097 	  lhs_code = TREE_CODE (lhs);
6098 	}
6099     }
6100 
6101   inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6102   if (inv_code == rhs_code
6103       && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6104       && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6105     return lhs;
6106   if (!rhs_only && inv_code == lhs_code
6107       && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6108       && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6109     return rhs;
6110   if (rhs != orig_rhs || lhs != orig_lhs)
6111     return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6112 			    lhs, rhs);
6113   return NULL_TREE;
6114 }
6115 
6116 /* Find ways of folding logical expressions of LHS and RHS:
6117    Try to merge two comparisons to the same innermost item.
6118    Look for range tests like "ch >= '0' && ch <= '9'".
6119    Look for combinations of simple terms on machines with expensive branches
6120    and evaluate the RHS unconditionally.
6121 
6122    For example, if we have p->a == 2 && p->b == 4 and we can make an
6123    object large enough to span both A and B, we can do this with a comparison
6124    against the object ANDed with the a mask.
6125 
6126    If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6127    operations to do this with one comparison.
6128 
6129    We check for both normal comparisons and the BIT_AND_EXPRs made this by
6130    function and the one above.
6131 
6132    CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
6133    TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6134 
6135    TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6136    two operands.
6137 
6138    We return the simplified tree or 0 if no optimization is possible.  */
6139 
6140 static tree
fold_truth_andor_1(location_t loc,enum tree_code code,tree truth_type,tree lhs,tree rhs)6141 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6142 		    tree lhs, tree rhs)
6143 {
6144   /* If this is the "or" of two comparisons, we can do something if
6145      the comparisons are NE_EXPR.  If this is the "and", we can do something
6146      if the comparisons are EQ_EXPR.  I.e.,
6147 	(a->b == 2 && a->c == 4) can become (a->new == NEW).
6148 
6149      WANTED_CODE is this operation code.  For single bit fields, we can
6150      convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6151      comparison for one-bit fields.  */
6152 
6153   enum tree_code wanted_code;
6154   enum tree_code lcode, rcode;
6155   tree ll_arg, lr_arg, rl_arg, rr_arg;
6156   tree ll_inner, lr_inner, rl_inner, rr_inner;
6157   HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6158   HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6159   HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6160   HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6161   int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6162   int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6163   machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6164   scalar_int_mode lnmode, rnmode;
6165   tree ll_mask, lr_mask, rl_mask, rr_mask;
6166   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6167   tree l_const, r_const;
6168   tree lntype, rntype, result;
6169   HOST_WIDE_INT first_bit, end_bit;
6170   int volatilep;
6171 
6172   /* Start by getting the comparison codes.  Fail if anything is volatile.
6173      If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6174      it were surrounded with a NE_EXPR.  */
6175 
6176   if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6177     return 0;
6178 
6179   lcode = TREE_CODE (lhs);
6180   rcode = TREE_CODE (rhs);
6181 
6182   if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6183     {
6184       lhs = build2 (NE_EXPR, truth_type, lhs,
6185 		    build_int_cst (TREE_TYPE (lhs), 0));
6186       lcode = NE_EXPR;
6187     }
6188 
6189   if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6190     {
6191       rhs = build2 (NE_EXPR, truth_type, rhs,
6192 		    build_int_cst (TREE_TYPE (rhs), 0));
6193       rcode = NE_EXPR;
6194     }
6195 
6196   if (TREE_CODE_CLASS (lcode) != tcc_comparison
6197       || TREE_CODE_CLASS (rcode) != tcc_comparison)
6198     return 0;
6199 
6200   ll_arg = TREE_OPERAND (lhs, 0);
6201   lr_arg = TREE_OPERAND (lhs, 1);
6202   rl_arg = TREE_OPERAND (rhs, 0);
6203   rr_arg = TREE_OPERAND (rhs, 1);
6204 
6205   /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
6206   if (simple_operand_p (ll_arg)
6207       && simple_operand_p (lr_arg))
6208     {
6209       if (operand_equal_p (ll_arg, rl_arg, 0)
6210           && operand_equal_p (lr_arg, rr_arg, 0))
6211 	{
6212           result = combine_comparisons (loc, code, lcode, rcode,
6213 					truth_type, ll_arg, lr_arg);
6214 	  if (result)
6215 	    return result;
6216 	}
6217       else if (operand_equal_p (ll_arg, rr_arg, 0)
6218                && operand_equal_p (lr_arg, rl_arg, 0))
6219 	{
6220           result = combine_comparisons (loc, code, lcode,
6221 					swap_tree_comparison (rcode),
6222 					truth_type, ll_arg, lr_arg);
6223 	  if (result)
6224 	    return result;
6225 	}
6226     }
6227 
6228   code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6229 	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6230 
6231   /* If the RHS can be evaluated unconditionally and its operands are
6232      simple, it wins to evaluate the RHS unconditionally on machines
6233      with expensive branches.  In this case, this isn't a comparison
6234      that can be merged.  */
6235 
6236   if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6237 		   false) >= 2
6238       && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6239       && simple_operand_p (rl_arg)
6240       && simple_operand_p (rr_arg))
6241     {
6242       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
6243       if (code == TRUTH_OR_EXPR
6244 	  && lcode == NE_EXPR && integer_zerop (lr_arg)
6245 	  && rcode == NE_EXPR && integer_zerop (rr_arg)
6246 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6247 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6248 	return build2_loc (loc, NE_EXPR, truth_type,
6249 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6250 				   ll_arg, rl_arg),
6251 			   build_int_cst (TREE_TYPE (ll_arg), 0));
6252 
6253       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
6254       if (code == TRUTH_AND_EXPR
6255 	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
6256 	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
6257 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6258 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6259 	return build2_loc (loc, EQ_EXPR, truth_type,
6260 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6261 				   ll_arg, rl_arg),
6262 			   build_int_cst (TREE_TYPE (ll_arg), 0));
6263     }
6264 
6265   /* See if the comparisons can be merged.  Then get all the parameters for
6266      each side.  */
6267 
6268   if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6269       || (rcode != EQ_EXPR && rcode != NE_EXPR))
6270     return 0;
6271 
6272   ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6273   volatilep = 0;
6274   ll_inner = decode_field_reference (loc, &ll_arg,
6275 				     &ll_bitsize, &ll_bitpos, &ll_mode,
6276 				     &ll_unsignedp, &ll_reversep, &volatilep,
6277 				     &ll_mask, &ll_and_mask);
6278   lr_inner = decode_field_reference (loc, &lr_arg,
6279 				     &lr_bitsize, &lr_bitpos, &lr_mode,
6280 				     &lr_unsignedp, &lr_reversep, &volatilep,
6281 				     &lr_mask, &lr_and_mask);
6282   rl_inner = decode_field_reference (loc, &rl_arg,
6283 				     &rl_bitsize, &rl_bitpos, &rl_mode,
6284 				     &rl_unsignedp, &rl_reversep, &volatilep,
6285 				     &rl_mask, &rl_and_mask);
6286   rr_inner = decode_field_reference (loc, &rr_arg,
6287 				     &rr_bitsize, &rr_bitpos, &rr_mode,
6288 				     &rr_unsignedp, &rr_reversep, &volatilep,
6289 				     &rr_mask, &rr_and_mask);
6290 
6291   /* It must be true that the inner operation on the lhs of each
6292      comparison must be the same if we are to be able to do anything.
6293      Then see if we have constants.  If not, the same must be true for
6294      the rhs's.  */
6295   if (volatilep
6296       || ll_reversep != rl_reversep
6297       || ll_inner == 0 || rl_inner == 0
6298       || ! operand_equal_p (ll_inner, rl_inner, 0))
6299     return 0;
6300 
6301   if (TREE_CODE (lr_arg) == INTEGER_CST
6302       && TREE_CODE (rr_arg) == INTEGER_CST)
6303     {
6304       l_const = lr_arg, r_const = rr_arg;
6305       lr_reversep = ll_reversep;
6306     }
6307   else if (lr_reversep != rr_reversep
6308 	   || lr_inner == 0 || rr_inner == 0
6309 	   || ! operand_equal_p (lr_inner, rr_inner, 0))
6310     return 0;
6311   else
6312     l_const = r_const = 0;
6313 
6314   /* If either comparison code is not correct for our logical operation,
6315      fail.  However, we can convert a one-bit comparison against zero into
6316      the opposite comparison against that bit being set in the field.  */
6317 
6318   wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6319   if (lcode != wanted_code)
6320     {
6321       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6322 	{
6323 	  /* Make the left operand unsigned, since we are only interested
6324 	     in the value of one bit.  Otherwise we are doing the wrong
6325 	     thing below.  */
6326 	  ll_unsignedp = 1;
6327 	  l_const = ll_mask;
6328 	}
6329       else
6330 	return 0;
6331     }
6332 
6333   /* This is analogous to the code for l_const above.  */
6334   if (rcode != wanted_code)
6335     {
6336       if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6337 	{
6338 	  rl_unsignedp = 1;
6339 	  r_const = rl_mask;
6340 	}
6341       else
6342 	return 0;
6343     }
6344 
6345   /* See if we can find a mode that contains both fields being compared on
6346      the left.  If we can't, fail.  Otherwise, update all constants and masks
6347      to be relative to a field of that size.  */
6348   first_bit = MIN (ll_bitpos, rl_bitpos);
6349   end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6350   if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6351 		      TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6352 		      volatilep, &lnmode))
6353     return 0;
6354 
6355   lnbitsize = GET_MODE_BITSIZE (lnmode);
6356   lnbitpos = first_bit & ~ (lnbitsize - 1);
6357   lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6358   xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6359 
6360   if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6361     {
6362       xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6363       xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6364     }
6365 
6366   ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6367 			 size_int (xll_bitpos));
6368   rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6369 			 size_int (xrl_bitpos));
6370 
6371   if (l_const)
6372     {
6373       l_const = fold_convert_loc (loc, lntype, l_const);
6374       l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6375       l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6376       if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6377 					fold_build1_loc (loc, BIT_NOT_EXPR,
6378 						     lntype, ll_mask))))
6379 	{
6380 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6381 
6382 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6383 	}
6384     }
6385   if (r_const)
6386     {
6387       r_const = fold_convert_loc (loc, lntype, r_const);
6388       r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6389       r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6390       if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6391 					fold_build1_loc (loc, BIT_NOT_EXPR,
6392 						     lntype, rl_mask))))
6393 	{
6394 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6395 
6396 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6397 	}
6398     }
6399 
6400   /* If the right sides are not constant, do the same for it.  Also,
6401      disallow this optimization if a size, signedness or storage order
6402      mismatch occurs between the left and right sides.  */
6403   if (l_const == 0)
6404     {
6405       if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6406 	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6407 	  || ll_reversep != lr_reversep
6408 	  /* Make sure the two fields on the right
6409 	     correspond to the left without being swapped.  */
6410 	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6411 	return 0;
6412 
6413       first_bit = MIN (lr_bitpos, rr_bitpos);
6414       end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6415       if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6416 			  TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6417 			  volatilep, &rnmode))
6418 	return 0;
6419 
6420       rnbitsize = GET_MODE_BITSIZE (rnmode);
6421       rnbitpos = first_bit & ~ (rnbitsize - 1);
6422       rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6423       xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6424 
6425       if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6426 	{
6427 	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6428 	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6429 	}
6430 
6431       lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6432 							    rntype, lr_mask),
6433 			     size_int (xlr_bitpos));
6434       rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6435 							    rntype, rr_mask),
6436 			     size_int (xrr_bitpos));
6437 
6438       /* Make a mask that corresponds to both fields being compared.
6439 	 Do this for both items being compared.  If the operands are the
6440 	 same size and the bits being compared are in the same position
6441 	 then we can do this by masking both and comparing the masked
6442 	 results.  */
6443       ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6444       lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6445       if (lnbitsize == rnbitsize
6446 	  && xll_bitpos == xlr_bitpos
6447 	  && lnbitpos >= 0
6448 	  && rnbitpos >= 0)
6449 	{
6450 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6451 				    lntype, lnbitsize, lnbitpos,
6452 				    ll_unsignedp || rl_unsignedp, ll_reversep);
6453 	  if (! all_ones_mask_p (ll_mask, lnbitsize))
6454 	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6455 
6456 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6457 				    rntype, rnbitsize, rnbitpos,
6458 				    lr_unsignedp || rr_unsignedp, lr_reversep);
6459 	  if (! all_ones_mask_p (lr_mask, rnbitsize))
6460 	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6461 
6462 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6463 	}
6464 
6465       /* There is still another way we can do something:  If both pairs of
6466 	 fields being compared are adjacent, we may be able to make a wider
6467 	 field containing them both.
6468 
6469 	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
6470 	 the mask must be shifted to account for the shift done by
6471 	 make_bit_field_ref.  */
6472       if (((ll_bitsize + ll_bitpos == rl_bitpos
6473 	    && lr_bitsize + lr_bitpos == rr_bitpos)
6474 	   || (ll_bitpos == rl_bitpos + rl_bitsize
6475 	       && lr_bitpos == rr_bitpos + rr_bitsize))
6476 	  && ll_bitpos >= 0
6477 	  && rl_bitpos >= 0
6478 	  && lr_bitpos >= 0
6479 	  && rr_bitpos >= 0)
6480 	{
6481 	  tree type;
6482 
6483 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6484 				    ll_bitsize + rl_bitsize,
6485 				    MIN (ll_bitpos, rl_bitpos),
6486 				    ll_unsignedp, ll_reversep);
6487 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6488 				    lr_bitsize + rr_bitsize,
6489 				    MIN (lr_bitpos, rr_bitpos),
6490 				    lr_unsignedp, lr_reversep);
6491 
6492 	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6493 				 size_int (MIN (xll_bitpos, xrl_bitpos)));
6494 	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6495 				 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6496 
6497 	  /* Convert to the smaller type before masking out unwanted bits.  */
6498 	  type = lntype;
6499 	  if (lntype != rntype)
6500 	    {
6501 	      if (lnbitsize > rnbitsize)
6502 		{
6503 		  lhs = fold_convert_loc (loc, rntype, lhs);
6504 		  ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6505 		  type = rntype;
6506 		}
6507 	      else if (lnbitsize < rnbitsize)
6508 		{
6509 		  rhs = fold_convert_loc (loc, lntype, rhs);
6510 		  lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6511 		  type = lntype;
6512 		}
6513 	    }
6514 
6515 	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6516 	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6517 
6518 	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6519 	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6520 
6521 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6522 	}
6523 
6524       return 0;
6525     }
6526 
6527   /* Handle the case of comparisons with constants.  If there is something in
6528      common between the masks, those bits of the constants must be the same.
6529      If not, the condition is always false.  Test for this to avoid generating
6530      incorrect code below.  */
6531   result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6532   if (! integer_zerop (result)
6533       && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6534 			   const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6535     {
6536       if (wanted_code == NE_EXPR)
6537 	{
6538 	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
6539 	  return constant_boolean_node (true, truth_type);
6540 	}
6541       else
6542 	{
6543 	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6544 	  return constant_boolean_node (false, truth_type);
6545 	}
6546     }
6547 
6548   if (lnbitpos < 0)
6549     return 0;
6550 
6551   /* Construct the expression we will return.  First get the component
6552      reference we will make.  Unless the mask is all ones the width of
6553      that field, perform the mask operation.  Then compare with the
6554      merged constant.  */
6555   result = make_bit_field_ref (loc, ll_inner, ll_arg,
6556 			       lntype, lnbitsize, lnbitpos,
6557 			       ll_unsignedp || rl_unsignedp, ll_reversep);
6558 
6559   ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6560   if (! all_ones_mask_p (ll_mask, lnbitsize))
6561     result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6562 
6563   return build2_loc (loc, wanted_code, truth_type, result,
6564 		     const_binop (BIT_IOR_EXPR, l_const, r_const));
6565 }
6566 
6567 /* T is an integer expression that is being multiplied, divided, or taken a
6568    modulus (CODE says which and what kind of divide or modulus) by a
6569    constant C.  See if we can eliminate that operation by folding it with
6570    other operations already in T.  WIDE_TYPE, if non-null, is a type that
6571    should be used for the computation if wider than our type.
6572 
6573    For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6574    (X * 2) + (Y * 4).  We must, however, be assured that either the original
6575    expression would not overflow or that overflow is undefined for the type
6576    in the language in question.
6577 
6578    If we return a non-null expression, it is an equivalent form of the
6579    original computation, but need not be in the original type.
6580 
6581    We set *STRICT_OVERFLOW_P to true if the return values depends on
6582    signed overflow being undefined.  Otherwise we do not change
6583    *STRICT_OVERFLOW_P.  */
6584 
6585 static tree
extract_muldiv(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)6586 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6587 		bool *strict_overflow_p)
6588 {
6589   /* To avoid exponential search depth, refuse to allow recursion past
6590      three levels.  Beyond that (1) it's highly unlikely that we'll find
6591      something interesting and (2) we've probably processed it before
6592      when we built the inner expression.  */
6593 
6594   static int depth;
6595   tree ret;
6596 
6597   if (depth > 3)
6598     return NULL;
6599 
6600   depth++;
6601   ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6602   depth--;
6603 
6604   return ret;
6605 }
6606 
6607 static tree
extract_muldiv_1(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)6608 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6609 		  bool *strict_overflow_p)
6610 {
6611   tree type = TREE_TYPE (t);
6612   enum tree_code tcode = TREE_CODE (t);
6613   tree ctype = (wide_type != 0
6614 		&& (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6615 		    > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6616 		? wide_type : type);
6617   tree t1, t2;
6618   int same_p = tcode == code;
6619   tree op0 = NULL_TREE, op1 = NULL_TREE;
6620   bool sub_strict_overflow_p;
6621 
6622   /* Don't deal with constants of zero here; they confuse the code below.  */
6623   if (integer_zerop (c))
6624     return NULL_TREE;
6625 
6626   if (TREE_CODE_CLASS (tcode) == tcc_unary)
6627     op0 = TREE_OPERAND (t, 0);
6628 
6629   if (TREE_CODE_CLASS (tcode) == tcc_binary)
6630     op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6631 
6632   /* Note that we need not handle conditional operations here since fold
6633      already handles those cases.  So just do arithmetic here.  */
6634   switch (tcode)
6635     {
6636     case INTEGER_CST:
6637       /* For a constant, we can always simplify if we are a multiply
6638 	 or (for divide and modulus) if it is a multiple of our constant.  */
6639       if (code == MULT_EXPR
6640 	  || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6641 				TYPE_SIGN (type)))
6642 	{
6643 	  tree tem = const_binop (code, fold_convert (ctype, t),
6644 				  fold_convert (ctype, c));
6645 	  /* If the multiplication overflowed, we lost information on it.
6646 	     See PR68142 and PR69845.  */
6647 	  if (TREE_OVERFLOW (tem))
6648 	    return NULL_TREE;
6649 	  return tem;
6650 	}
6651       break;
6652 
6653     CASE_CONVERT: case NON_LVALUE_EXPR:
6654       /* If op0 is an expression ...  */
6655       if ((COMPARISON_CLASS_P (op0)
6656 	   || UNARY_CLASS_P (op0)
6657 	   || BINARY_CLASS_P (op0)
6658 	   || VL_EXP_CLASS_P (op0)
6659 	   || EXPRESSION_CLASS_P (op0))
6660 	  /* ... and has wrapping overflow, and its type is smaller
6661 	     than ctype, then we cannot pass through as widening.  */
6662 	  && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6663 		&& TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6664 	       && (TYPE_PRECISION (ctype)
6665 	           > TYPE_PRECISION (TREE_TYPE (op0))))
6666 	      /* ... or this is a truncation (t is narrower than op0),
6667 		 then we cannot pass through this narrowing.  */
6668 	      || (TYPE_PRECISION (type)
6669 		  < TYPE_PRECISION (TREE_TYPE (op0)))
6670 	      /* ... or signedness changes for division or modulus,
6671 		 then we cannot pass through this conversion.  */
6672 	      || (code != MULT_EXPR
6673 		  && (TYPE_UNSIGNED (ctype)
6674 		      != TYPE_UNSIGNED (TREE_TYPE (op0))))
6675 	      /* ... or has undefined overflow while the converted to
6676 		 type has not, we cannot do the operation in the inner type
6677 		 as that would introduce undefined overflow.  */
6678 	      || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6679 		   && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6680 		  && !TYPE_OVERFLOW_UNDEFINED (type))))
6681 	break;
6682 
6683       /* Pass the constant down and see if we can make a simplification.  If
6684 	 we can, replace this expression with the inner simplification for
6685 	 possible later conversion to our or some other type.  */
6686       if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6687 	  && TREE_CODE (t2) == INTEGER_CST
6688 	  && !TREE_OVERFLOW (t2)
6689 	  && (t1 = extract_muldiv (op0, t2, code,
6690 				   code == MULT_EXPR ? ctype : NULL_TREE,
6691 				   strict_overflow_p)) != 0)
6692 	return t1;
6693       break;
6694 
6695     case ABS_EXPR:
6696       /* If widening the type changes it from signed to unsigned, then we
6697          must avoid building ABS_EXPR itself as unsigned.  */
6698       if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6699         {
6700           tree cstype = (*signed_type_for) (ctype);
6701           if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6702 	      != 0)
6703             {
6704               t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6705               return fold_convert (ctype, t1);
6706             }
6707           break;
6708         }
6709       /* If the constant is negative, we cannot simplify this.  */
6710       if (tree_int_cst_sgn (c) == -1)
6711         break;
6712       /* FALLTHROUGH */
6713     case NEGATE_EXPR:
6714       /* For division and modulus, type can't be unsigned, as e.g.
6715 	 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6716 	 For signed types, even with wrapping overflow, this is fine.  */
6717       if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6718 	break;
6719       if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6720 	  != 0)
6721 	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6722       break;
6723 
6724     case MIN_EXPR:  case MAX_EXPR:
6725       /* If widening the type changes the signedness, then we can't perform
6726 	 this optimization as that changes the result.  */
6727       if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6728 	break;
6729 
6730       /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
6731       sub_strict_overflow_p = false;
6732       if ((t1 = extract_muldiv (op0, c, code, wide_type,
6733 				&sub_strict_overflow_p)) != 0
6734 	  && (t2 = extract_muldiv (op1, c, code, wide_type,
6735 				   &sub_strict_overflow_p)) != 0)
6736 	{
6737 	  if (tree_int_cst_sgn (c) < 0)
6738 	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6739 	  if (sub_strict_overflow_p)
6740 	    *strict_overflow_p = true;
6741 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6742 			      fold_convert (ctype, t2));
6743 	}
6744       break;
6745 
6746     case LSHIFT_EXPR:  case RSHIFT_EXPR:
6747       /* If the second operand is constant, this is a multiplication
6748 	 or floor division, by a power of two, so we can treat it that
6749 	 way unless the multiplier or divisor overflows.  Signed
6750 	 left-shift overflow is implementation-defined rather than
6751 	 undefined in C90, so do not convert signed left shift into
6752 	 multiplication.  */
6753       if (TREE_CODE (op1) == INTEGER_CST
6754 	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6755 	  /* const_binop may not detect overflow correctly,
6756 	     so check for it explicitly here.  */
6757 	  && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6758 			wi::to_wide (op1))
6759 	  && (t1 = fold_convert (ctype,
6760 				 const_binop (LSHIFT_EXPR, size_one_node,
6761 					      op1))) != 0
6762 	  && !TREE_OVERFLOW (t1))
6763 	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6764 				       ? MULT_EXPR : FLOOR_DIV_EXPR,
6765 				       ctype,
6766 				       fold_convert (ctype, op0),
6767 				       t1),
6768 			       c, code, wide_type, strict_overflow_p);
6769       break;
6770 
6771     case PLUS_EXPR:  case MINUS_EXPR:
6772       /* See if we can eliminate the operation on both sides.  If we can, we
6773 	 can return a new PLUS or MINUS.  If we can't, the only remaining
6774 	 cases where we can do anything are if the second operand is a
6775 	 constant.  */
6776       sub_strict_overflow_p = false;
6777       t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6778       t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6779       if (t1 != 0 && t2 != 0
6780 	  && TYPE_OVERFLOW_WRAPS (ctype)
6781 	  && (code == MULT_EXPR
6782 	      /* If not multiplication, we can only do this if both operands
6783 		 are divisible by c.  */
6784 	      || (multiple_of_p (ctype, op0, c)
6785 	          && multiple_of_p (ctype, op1, c))))
6786 	{
6787 	  if (sub_strict_overflow_p)
6788 	    *strict_overflow_p = true;
6789 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6790 			      fold_convert (ctype, t2));
6791 	}
6792 
6793       /* If this was a subtraction, negate OP1 and set it to be an addition.
6794 	 This simplifies the logic below.  */
6795       if (tcode == MINUS_EXPR)
6796 	{
6797 	  tcode = PLUS_EXPR, op1 = negate_expr (op1);
6798 	  /* If OP1 was not easily negatable, the constant may be OP0.  */
6799 	  if (TREE_CODE (op0) == INTEGER_CST)
6800 	    {
6801 	      std::swap (op0, op1);
6802 	      std::swap (t1, t2);
6803 	    }
6804 	}
6805 
6806       if (TREE_CODE (op1) != INTEGER_CST)
6807 	break;
6808 
6809       /* If either OP1 or C are negative, this optimization is not safe for
6810 	 some of the division and remainder types while for others we need
6811 	 to change the code.  */
6812       if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6813 	{
6814 	  if (code == CEIL_DIV_EXPR)
6815 	    code = FLOOR_DIV_EXPR;
6816 	  else if (code == FLOOR_DIV_EXPR)
6817 	    code = CEIL_DIV_EXPR;
6818 	  else if (code != MULT_EXPR
6819 		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6820 	    break;
6821 	}
6822 
6823       /* If it's a multiply or a division/modulus operation of a multiple
6824          of our constant, do the operation and verify it doesn't overflow.  */
6825       if (code == MULT_EXPR
6826 	  || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6827 				TYPE_SIGN (type)))
6828 	{
6829 	  op1 = const_binop (code, fold_convert (ctype, op1),
6830 			     fold_convert (ctype, c));
6831 	  /* We allow the constant to overflow with wrapping semantics.  */
6832 	  if (op1 == 0
6833 	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6834 	    break;
6835 	}
6836       else
6837 	break;
6838 
6839       /* If we have an unsigned type, we cannot widen the operation since it
6840 	 will change the result if the original computation overflowed.  */
6841       if (TYPE_UNSIGNED (ctype) && ctype != type)
6842 	break;
6843 
6844       /* The last case is if we are a multiply.  In that case, we can
6845 	 apply the distributive law to commute the multiply and addition
6846 	 if the multiplication of the constants doesn't overflow
6847 	 and overflow is defined.  With undefined overflow
6848 	 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6849 	 But fold_plusminus_mult_expr would factor back any power-of-two
6850 	 value so do not distribute in the first place in this case.  */
6851       if (code == MULT_EXPR
6852 	  && TYPE_OVERFLOW_WRAPS (ctype)
6853 	  && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6854 	return fold_build2 (tcode, ctype,
6855 			    fold_build2 (code, ctype,
6856 					 fold_convert (ctype, op0),
6857 					 fold_convert (ctype, c)),
6858 			    op1);
6859 
6860       break;
6861 
6862     case MULT_EXPR:
6863       /* We have a special case here if we are doing something like
6864 	 (C * 8) % 4 since we know that's zero.  */
6865       if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6866 	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6867 	  /* If the multiplication can overflow we cannot optimize this.  */
6868 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6869 	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6870 	  && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6871 				TYPE_SIGN (type)))
6872 	{
6873 	  *strict_overflow_p = true;
6874 	  return omit_one_operand (type, integer_zero_node, op0);
6875 	}
6876 
6877       /* ... fall through ...  */
6878 
6879     case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
6880     case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
6881       /* If we can extract our operation from the LHS, do so and return a
6882 	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
6883 	 do something only if the second operand is a constant.  */
6884       if (same_p
6885 	  && TYPE_OVERFLOW_WRAPS (ctype)
6886 	  && (t1 = extract_muldiv (op0, c, code, wide_type,
6887 				   strict_overflow_p)) != 0)
6888 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6889 			    fold_convert (ctype, op1));
6890       else if (tcode == MULT_EXPR && code == MULT_EXPR
6891 	       && TYPE_OVERFLOW_WRAPS (ctype)
6892 	       && (t1 = extract_muldiv (op1, c, code, wide_type,
6893 					strict_overflow_p)) != 0)
6894 	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6895 			    fold_convert (ctype, t1));
6896       else if (TREE_CODE (op1) != INTEGER_CST)
6897 	return 0;
6898 
6899       /* If these are the same operation types, we can associate them
6900 	 assuming no overflow.  */
6901       if (tcode == code)
6902 	{
6903 	  bool overflow_p = false;
6904 	  wi::overflow_type overflow_mul;
6905 	  signop sign = TYPE_SIGN (ctype);
6906 	  unsigned prec = TYPE_PRECISION (ctype);
6907 	  wide_int mul = wi::mul (wi::to_wide (op1, prec),
6908 				  wi::to_wide (c, prec),
6909 				  sign, &overflow_mul);
6910 	  overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6911 	  if (overflow_mul
6912 	      && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6913 	    overflow_p = true;
6914 	  if (!overflow_p)
6915 	    return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6916 				wide_int_to_tree (ctype, mul));
6917 	}
6918 
6919       /* If these operations "cancel" each other, we have the main
6920 	 optimizations of this pass, which occur when either constant is a
6921 	 multiple of the other, in which case we replace this with either an
6922 	 operation or CODE or TCODE.
6923 
6924 	 If we have an unsigned type, we cannot do this since it will change
6925 	 the result if the original computation overflowed.  */
6926       if (TYPE_OVERFLOW_UNDEFINED (ctype)
6927 	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6928 	      || (tcode == MULT_EXPR
6929 		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6930 		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6931 		  && code != MULT_EXPR)))
6932 	{
6933 	  if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6934 				 TYPE_SIGN (type)))
6935 	    {
6936 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6937 		*strict_overflow_p = true;
6938 	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6939 				  fold_convert (ctype,
6940 						const_binop (TRUNC_DIV_EXPR,
6941 							     op1, c)));
6942 	    }
6943 	  else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
6944 				      TYPE_SIGN (type)))
6945 	    {
6946 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6947 		*strict_overflow_p = true;
6948 	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
6949 				  fold_convert (ctype,
6950 						const_binop (TRUNC_DIV_EXPR,
6951 							     c, op1)));
6952 	    }
6953 	}
6954       break;
6955 
6956     default:
6957       break;
6958     }
6959 
6960   return 0;
6961 }
6962 
6963 /* Return a node which has the indicated constant VALUE (either 0 or
6964    1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6965    and is of the indicated TYPE.  */
6966 
6967 tree
constant_boolean_node(bool value,tree type)6968 constant_boolean_node (bool value, tree type)
6969 {
6970   if (type == integer_type_node)
6971     return value ? integer_one_node : integer_zero_node;
6972   else if (type == boolean_type_node)
6973     return value ? boolean_true_node : boolean_false_node;
6974   else if (TREE_CODE (type) == VECTOR_TYPE)
6975     return build_vector_from_val (type,
6976 				  build_int_cst (TREE_TYPE (type),
6977 						 value ? -1 : 0));
6978   else
6979     return fold_convert (type, value ? integer_one_node : integer_zero_node);
6980 }
6981 
6982 
6983 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6984    Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
6985    CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6986    expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
6987    COND is the first argument to CODE; otherwise (as in the example
6988    given here), it is the second argument.  TYPE is the type of the
6989    original expression.  Return NULL_TREE if no simplification is
6990    possible.  */
6991 
6992 static tree
fold_binary_op_with_conditional_arg(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree cond,tree arg,int cond_first_p)6993 fold_binary_op_with_conditional_arg (location_t loc,
6994 				     enum tree_code code,
6995 				     tree type, tree op0, tree op1,
6996 				     tree cond, tree arg, int cond_first_p)
6997 {
6998   tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6999   tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7000   tree test, true_value, false_value;
7001   tree lhs = NULL_TREE;
7002   tree rhs = NULL_TREE;
7003   enum tree_code cond_code = COND_EXPR;
7004 
7005   /* Do not move possibly trapping operations into the conditional as this
7006      pessimizes code and causes gimplification issues when applied late.  */
7007   if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7008 			      ANY_INTEGRAL_TYPE_P (type)
7009 			      && TYPE_OVERFLOW_TRAPS (type), op1))
7010     return NULL_TREE;
7011 
7012   if (TREE_CODE (cond) == COND_EXPR
7013       || TREE_CODE (cond) == VEC_COND_EXPR)
7014     {
7015       test = TREE_OPERAND (cond, 0);
7016       true_value = TREE_OPERAND (cond, 1);
7017       false_value = TREE_OPERAND (cond, 2);
7018       /* If this operand throws an expression, then it does not make
7019 	 sense to try to perform a logical or arithmetic operation
7020 	 involving it.  */
7021       if (VOID_TYPE_P (TREE_TYPE (true_value)))
7022 	lhs = true_value;
7023       if (VOID_TYPE_P (TREE_TYPE (false_value)))
7024 	rhs = false_value;
7025     }
7026   else if (!(TREE_CODE (type) != VECTOR_TYPE
7027 	     && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7028     {
7029       tree testtype = TREE_TYPE (cond);
7030       test = cond;
7031       true_value = constant_boolean_node (true, testtype);
7032       false_value = constant_boolean_node (false, testtype);
7033     }
7034   else
7035     /* Detect the case of mixing vector and scalar types - bail out.  */
7036     return NULL_TREE;
7037 
7038   if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7039     cond_code = VEC_COND_EXPR;
7040 
7041   /* This transformation is only worthwhile if we don't have to wrap ARG
7042      in a SAVE_EXPR and the operation can be simplified without recursing
7043      on at least one of the branches once its pushed inside the COND_EXPR.  */
7044   if (!TREE_CONSTANT (arg)
7045       && (TREE_SIDE_EFFECTS (arg)
7046 	  || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7047 	  || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7048     return NULL_TREE;
7049 
7050   arg = fold_convert_loc (loc, arg_type, arg);
7051   if (lhs == 0)
7052     {
7053       true_value = fold_convert_loc (loc, cond_type, true_value);
7054       if (cond_first_p)
7055 	lhs = fold_build2_loc (loc, code, type, true_value, arg);
7056       else
7057 	lhs = fold_build2_loc (loc, code, type, arg, true_value);
7058     }
7059   if (rhs == 0)
7060     {
7061       false_value = fold_convert_loc (loc, cond_type, false_value);
7062       if (cond_first_p)
7063 	rhs = fold_build2_loc (loc, code, type, false_value, arg);
7064       else
7065 	rhs = fold_build2_loc (loc, code, type, arg, false_value);
7066     }
7067 
7068   /* Check that we have simplified at least one of the branches.  */
7069   if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7070     return NULL_TREE;
7071 
7072   return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7073 }
7074 
7075 
7076 /* Subroutine of fold() that checks for the addition of +/- 0.0.
7077 
7078    If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
7079    TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
7080    ADDEND is the same as X.
7081 
7082    X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7083    and finite.  The problematic cases are when X is zero, and its mode
7084    has signed zeros.  In the case of rounding towards -infinity,
7085    X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
7086    modes, X + 0 is not the same as X because -0 + 0 is 0.  */
7087 
7088 bool
fold_real_zero_addition_p(const_tree type,const_tree addend,int negate)7089 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
7090 {
7091   if (!real_zerop (addend))
7092     return false;
7093 
7094   /* Don't allow the fold with -fsignaling-nans.  */
7095   if (HONOR_SNANS (type))
7096     return false;
7097 
7098   /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
7099   if (!HONOR_SIGNED_ZEROS (type))
7100     return true;
7101 
7102   /* There is no case that is safe for all rounding modes.  */
7103   if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7104     return false;
7105 
7106   /* In a vector or complex, we would need to check the sign of all zeros.  */
7107   if (TREE_CODE (addend) == VECTOR_CST)
7108     addend = uniform_vector_p (addend);
7109   if (!addend || TREE_CODE (addend) != REAL_CST)
7110     return false;
7111 
7112   /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
7113   if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
7114     negate = !negate;
7115 
7116   /* The mode has signed zeros, and we have to honor their sign.
7117      In this situation, there is only one case we can return true for.
7118      X - 0 is the same as X with default rounding.  */
7119   return negate;
7120 }
7121 
7122 /* Subroutine of match.pd that optimizes comparisons of a division by
7123    a nonzero integer constant against an integer constant, i.e.
7124    X/C1 op C2.
7125 
7126    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7127    GE_EXPR or LE_EXPR.  ARG01 and ARG1 must be a INTEGER_CST.  */
7128 
7129 enum tree_code
fold_div_compare(enum tree_code code,tree c1,tree c2,tree * lo,tree * hi,bool * neg_overflow)7130 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7131 		  tree *hi, bool *neg_overflow)
7132 {
7133   tree prod, tmp, type = TREE_TYPE (c1);
7134   signop sign = TYPE_SIGN (type);
7135   wi::overflow_type overflow;
7136 
7137   /* We have to do this the hard way to detect unsigned overflow.
7138      prod = int_const_binop (MULT_EXPR, c1, c2);  */
7139   wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7140   prod = force_fit_type (type, val, -1, overflow);
7141   *neg_overflow = false;
7142 
7143   if (sign == UNSIGNED)
7144     {
7145       tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7146       *lo = prod;
7147 
7148       /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp).  */
7149       val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7150       *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7151     }
7152   else if (tree_int_cst_sgn (c1) >= 0)
7153     {
7154       tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7155       switch (tree_int_cst_sgn (c2))
7156 	{
7157 	case -1:
7158 	  *neg_overflow = true;
7159 	  *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7160 	  *hi = prod;
7161 	  break;
7162 
7163 	case 0:
7164 	  *lo = fold_negate_const (tmp, type);
7165 	  *hi = tmp;
7166 	  break;
7167 
7168 	case 1:
7169 	  *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7170 	  *lo = prod;
7171 	  break;
7172 
7173 	default:
7174 	  gcc_unreachable ();
7175 	}
7176     }
7177   else
7178     {
7179       /* A negative divisor reverses the relational operators.  */
7180       code = swap_tree_comparison (code);
7181 
7182       tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7183       switch (tree_int_cst_sgn (c2))
7184 	{
7185 	case -1:
7186 	  *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7187 	  *lo = prod;
7188 	  break;
7189 
7190 	case 0:
7191 	  *hi = fold_negate_const (tmp, type);
7192 	  *lo = tmp;
7193 	  break;
7194 
7195 	case 1:
7196 	  *neg_overflow = true;
7197 	  *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7198 	  *hi = prod;
7199 	  break;
7200 
7201 	default:
7202 	  gcc_unreachable ();
7203 	}
7204     }
7205 
7206   if (code != EQ_EXPR && code != NE_EXPR)
7207     return code;
7208 
7209   if (TREE_OVERFLOW (*lo)
7210       || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7211     *lo = NULL_TREE;
7212   if (TREE_OVERFLOW (*hi)
7213       || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7214     *hi = NULL_TREE;
7215 
7216   return code;
7217 }
7218 
7219 
7220 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7221    equality/inequality test, then return a simplified form of the test
7222    using a sign testing.  Otherwise return NULL.  TYPE is the desired
7223    result type.  */
7224 
7225 static tree
fold_single_bit_test_into_sign_test(location_t loc,enum tree_code code,tree arg0,tree arg1,tree result_type)7226 fold_single_bit_test_into_sign_test (location_t loc,
7227 				     enum tree_code code, tree arg0, tree arg1,
7228 				     tree result_type)
7229 {
7230   /* If this is testing a single bit, we can optimize the test.  */
7231   if ((code == NE_EXPR || code == EQ_EXPR)
7232       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7233       && integer_pow2p (TREE_OPERAND (arg0, 1)))
7234     {
7235       /* If we have (A & C) != 0 where C is the sign bit of A, convert
7236 	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
7237       tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7238 
7239       if (arg00 != NULL_TREE
7240 	  /* This is only a win if casting to a signed type is cheap,
7241 	     i.e. when arg00's type is not a partial mode.  */
7242 	  && type_has_mode_precision_p (TREE_TYPE (arg00)))
7243 	{
7244 	  tree stype = signed_type_for (TREE_TYPE (arg00));
7245 	  return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7246 			      result_type,
7247 			      fold_convert_loc (loc, stype, arg00),
7248 			      build_int_cst (stype, 0));
7249 	}
7250     }
7251 
7252   return NULL_TREE;
7253 }
7254 
7255 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7256    equality/inequality test, then return a simplified form of
7257    the test using shifts and logical operations.  Otherwise return
7258    NULL.  TYPE is the desired result type.  */
7259 
7260 tree
fold_single_bit_test(location_t loc,enum tree_code code,tree arg0,tree arg1,tree result_type)7261 fold_single_bit_test (location_t loc, enum tree_code code,
7262 		      tree arg0, tree arg1, tree result_type)
7263 {
7264   /* If this is testing a single bit, we can optimize the test.  */
7265   if ((code == NE_EXPR || code == EQ_EXPR)
7266       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7267       && integer_pow2p (TREE_OPERAND (arg0, 1)))
7268     {
7269       tree inner = TREE_OPERAND (arg0, 0);
7270       tree type = TREE_TYPE (arg0);
7271       int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7272       scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7273       int ops_unsigned;
7274       tree signed_type, unsigned_type, intermediate_type;
7275       tree tem, one;
7276 
7277       /* First, see if we can fold the single bit test into a sign-bit
7278 	 test.  */
7279       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7280 						 result_type);
7281       if (tem)
7282 	return tem;
7283 
7284       /* Otherwise we have (A & C) != 0 where C is a single bit,
7285 	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
7286 	 Similarly for (A & C) == 0.  */
7287 
7288       /* If INNER is a right shift of a constant and it plus BITNUM does
7289 	 not overflow, adjust BITNUM and INNER.  */
7290       if (TREE_CODE (inner) == RSHIFT_EXPR
7291 	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7292 	  && bitnum < TYPE_PRECISION (type)
7293 	  && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7294 			TYPE_PRECISION (type) - bitnum))
7295 	{
7296 	  bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7297 	  inner = TREE_OPERAND (inner, 0);
7298 	}
7299 
7300       /* If we are going to be able to omit the AND below, we must do our
7301 	 operations as unsigned.  If we must use the AND, we have a choice.
7302 	 Normally unsigned is faster, but for some machines signed is.  */
7303       ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7304 		      && !flag_syntax_only) ? 0 : 1;
7305 
7306       signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7307       unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7308       intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7309       inner = fold_convert_loc (loc, intermediate_type, inner);
7310 
7311       if (bitnum != 0)
7312 	inner = build2 (RSHIFT_EXPR, intermediate_type,
7313 			inner, size_int (bitnum));
7314 
7315       one = build_int_cst (intermediate_type, 1);
7316 
7317       if (code == EQ_EXPR)
7318 	inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7319 
7320       /* Put the AND last so it can combine with more things.  */
7321       inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7322 
7323       /* Make sure to return the proper type.  */
7324       inner = fold_convert_loc (loc, result_type, inner);
7325 
7326       return inner;
7327     }
7328   return NULL_TREE;
7329 }
7330 
7331 /* Test whether it is preferable two swap two operands, ARG0 and
7332    ARG1, for example because ARG0 is an integer constant and ARG1
7333    isn't.  */
7334 
7335 bool
tree_swap_operands_p(const_tree arg0,const_tree arg1)7336 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7337 {
7338   if (CONSTANT_CLASS_P (arg1))
7339     return 0;
7340   if (CONSTANT_CLASS_P (arg0))
7341     return 1;
7342 
7343   STRIP_NOPS (arg0);
7344   STRIP_NOPS (arg1);
7345 
7346   if (TREE_CONSTANT (arg1))
7347     return 0;
7348   if (TREE_CONSTANT (arg0))
7349     return 1;
7350 
7351   /* It is preferable to swap two SSA_NAME to ensure a canonical form
7352      for commutative and comparison operators.  Ensuring a canonical
7353      form allows the optimizers to find additional redundancies without
7354      having to explicitly check for both orderings.  */
7355   if (TREE_CODE (arg0) == SSA_NAME
7356       && TREE_CODE (arg1) == SSA_NAME
7357       && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7358     return 1;
7359 
7360   /* Put SSA_NAMEs last.  */
7361   if (TREE_CODE (arg1) == SSA_NAME)
7362     return 0;
7363   if (TREE_CODE (arg0) == SSA_NAME)
7364     return 1;
7365 
7366   /* Put variables last.  */
7367   if (DECL_P (arg1))
7368     return 0;
7369   if (DECL_P (arg0))
7370     return 1;
7371 
7372   return 0;
7373 }
7374 
7375 
7376 /* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
7377    means A >= Y && A != MAX, but in this case we know that
7378    A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
7379 
7380 static tree
fold_to_nonsharp_ineq_using_bound(location_t loc,tree ineq,tree bound)7381 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7382 {
7383   tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7384 
7385   if (TREE_CODE (bound) == LT_EXPR)
7386     a = TREE_OPERAND (bound, 0);
7387   else if (TREE_CODE (bound) == GT_EXPR)
7388     a = TREE_OPERAND (bound, 1);
7389   else
7390     return NULL_TREE;
7391 
7392   typea = TREE_TYPE (a);
7393   if (!INTEGRAL_TYPE_P (typea)
7394       && !POINTER_TYPE_P (typea))
7395     return NULL_TREE;
7396 
7397   if (TREE_CODE (ineq) == LT_EXPR)
7398     {
7399       a1 = TREE_OPERAND (ineq, 1);
7400       y = TREE_OPERAND (ineq, 0);
7401     }
7402   else if (TREE_CODE (ineq) == GT_EXPR)
7403     {
7404       a1 = TREE_OPERAND (ineq, 0);
7405       y = TREE_OPERAND (ineq, 1);
7406     }
7407   else
7408     return NULL_TREE;
7409 
7410   if (TREE_TYPE (a1) != typea)
7411     return NULL_TREE;
7412 
7413   if (POINTER_TYPE_P (typea))
7414     {
7415       /* Convert the pointer types into integer before taking the difference.  */
7416       tree ta = fold_convert_loc (loc, ssizetype, a);
7417       tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7418       diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7419     }
7420   else
7421     diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7422 
7423   if (!diff || !integer_onep (diff))
7424    return NULL_TREE;
7425 
7426   return fold_build2_loc (loc, GE_EXPR, type, a, y);
7427 }
7428 
7429 /* Fold a sum or difference of at least one multiplication.
7430    Returns the folded tree or NULL if no simplification could be made.  */
7431 
7432 static tree
fold_plusminus_mult_expr(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)7433 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7434 			  tree arg0, tree arg1)
7435 {
7436   tree arg00, arg01, arg10, arg11;
7437   tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7438 
7439   /* (A * C) +- (B * C) -> (A+-B) * C.
7440      (A * C) +- A -> A * (C+-1).
7441      We are most concerned about the case where C is a constant,
7442      but other combinations show up during loop reduction.  Since
7443      it is not difficult, try all four possibilities.  */
7444 
7445   if (TREE_CODE (arg0) == MULT_EXPR)
7446     {
7447       arg00 = TREE_OPERAND (arg0, 0);
7448       arg01 = TREE_OPERAND (arg0, 1);
7449     }
7450   else if (TREE_CODE (arg0) == INTEGER_CST)
7451     {
7452       arg00 = build_one_cst (type);
7453       arg01 = arg0;
7454     }
7455   else
7456     {
7457       /* We cannot generate constant 1 for fract.  */
7458       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7459 	return NULL_TREE;
7460       arg00 = arg0;
7461       arg01 = build_one_cst (type);
7462     }
7463   if (TREE_CODE (arg1) == MULT_EXPR)
7464     {
7465       arg10 = TREE_OPERAND (arg1, 0);
7466       arg11 = TREE_OPERAND (arg1, 1);
7467     }
7468   else if (TREE_CODE (arg1) == INTEGER_CST)
7469     {
7470       arg10 = build_one_cst (type);
7471       /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7472 	 the purpose of this canonicalization.  */
7473       if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7474 	  && negate_expr_p (arg1)
7475 	  && code == PLUS_EXPR)
7476 	{
7477 	  arg11 = negate_expr (arg1);
7478 	  code = MINUS_EXPR;
7479 	}
7480       else
7481 	arg11 = arg1;
7482     }
7483   else
7484     {
7485       /* We cannot generate constant 1 for fract.  */
7486       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7487 	return NULL_TREE;
7488       arg10 = arg1;
7489       arg11 = build_one_cst (type);
7490     }
7491   same = NULL_TREE;
7492 
7493   /* Prefer factoring a common non-constant.  */
7494   if (operand_equal_p (arg00, arg10, 0))
7495     same = arg00, alt0 = arg01, alt1 = arg11;
7496   else if (operand_equal_p (arg01, arg11, 0))
7497     same = arg01, alt0 = arg00, alt1 = arg10;
7498   else if (operand_equal_p (arg00, arg11, 0))
7499     same = arg00, alt0 = arg01, alt1 = arg10;
7500   else if (operand_equal_p (arg01, arg10, 0))
7501     same = arg01, alt0 = arg00, alt1 = arg11;
7502 
7503   /* No identical multiplicands; see if we can find a common
7504      power-of-two factor in non-power-of-two multiplies.  This
7505      can help in multi-dimensional array access.  */
7506   else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7507     {
7508       HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7509       HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7510       HOST_WIDE_INT tmp;
7511       bool swap = false;
7512       tree maybe_same;
7513 
7514       /* Move min of absolute values to int11.  */
7515       if (absu_hwi (int01) < absu_hwi (int11))
7516         {
7517 	  tmp = int01, int01 = int11, int11 = tmp;
7518 	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
7519 	  maybe_same = arg01;
7520 	  swap = true;
7521 	}
7522       else
7523 	maybe_same = arg11;
7524 
7525       const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7526       if (factor > 1
7527 	  && pow2p_hwi (factor)
7528 	  && (int01 & (factor - 1)) == 0
7529 	  /* The remainder should not be a constant, otherwise we
7530 	     end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7531 	     increased the number of multiplications necessary.  */
7532 	  && TREE_CODE (arg10) != INTEGER_CST)
7533         {
7534 	  alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7535 			      build_int_cst (TREE_TYPE (arg00),
7536 					     int01 / int11));
7537 	  alt1 = arg10;
7538 	  same = maybe_same;
7539 	  if (swap)
7540 	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7541 	}
7542     }
7543 
7544   if (!same)
7545     return NULL_TREE;
7546 
7547   if (! ANY_INTEGRAL_TYPE_P (type)
7548       || TYPE_OVERFLOW_WRAPS (type)
7549       /* We are neither factoring zero nor minus one.  */
7550       || TREE_CODE (same) == INTEGER_CST)
7551     return fold_build2_loc (loc, MULT_EXPR, type,
7552 			fold_build2_loc (loc, code, type,
7553 				     fold_convert_loc (loc, type, alt0),
7554 				     fold_convert_loc (loc, type, alt1)),
7555 			fold_convert_loc (loc, type, same));
7556 
7557   /* Same may be zero and thus the operation 'code' may overflow.  Likewise
7558      same may be minus one and thus the multiplication may overflow.  Perform
7559      the sum operation in an unsigned type.  */
7560   tree utype = unsigned_type_for (type);
7561   tree tem = fold_build2_loc (loc, code, utype,
7562 			      fold_convert_loc (loc, utype, alt0),
7563 			      fold_convert_loc (loc, utype, alt1));
7564   /* If the sum evaluated to a constant that is not -INF the multiplication
7565      cannot overflow.  */
7566   if (TREE_CODE (tem) == INTEGER_CST
7567       && (wi::to_wide (tem)
7568 	  != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7569     return fold_build2_loc (loc, MULT_EXPR, type,
7570 			    fold_convert (type, tem), same);
7571 
7572   /* Do not resort to unsigned multiplication because
7573      we lose the no-overflow property of the expression.  */
7574   return NULL_TREE;
7575 }
7576 
7577 /* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7578    specified by EXPR into the buffer PTR of length LEN bytes.
7579    Return the number of bytes placed in the buffer, or zero
7580    upon failure.  */
7581 
7582 static int
native_encode_int(const_tree expr,unsigned char * ptr,int len,int off)7583 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7584 {
7585   tree type = TREE_TYPE (expr);
7586   int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7587   int byte, offset, word, words;
7588   unsigned char value;
7589 
7590   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7591     return 0;
7592   if (off == -1)
7593     off = 0;
7594 
7595   if (ptr == NULL)
7596     /* Dry run.  */
7597     return MIN (len, total_bytes - off);
7598 
7599   words = total_bytes / UNITS_PER_WORD;
7600 
7601   for (byte = 0; byte < total_bytes; byte++)
7602     {
7603       int bitpos = byte * BITS_PER_UNIT;
7604       /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7605 	 number of bytes.  */
7606       value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7607 
7608       if (total_bytes > UNITS_PER_WORD)
7609 	{
7610 	  word = byte / UNITS_PER_WORD;
7611 	  if (WORDS_BIG_ENDIAN)
7612 	    word = (words - 1) - word;
7613 	  offset = word * UNITS_PER_WORD;
7614 	  if (BYTES_BIG_ENDIAN)
7615 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7616 	  else
7617 	    offset += byte % UNITS_PER_WORD;
7618 	}
7619       else
7620 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7621       if (offset >= off && offset - off < len)
7622 	ptr[offset - off] = value;
7623     }
7624   return MIN (len, total_bytes - off);
7625 }
7626 
7627 
7628 /* Subroutine of native_encode_expr.  Encode the FIXED_CST
7629    specified by EXPR into the buffer PTR of length LEN bytes.
7630    Return the number of bytes placed in the buffer, or zero
7631    upon failure.  */
7632 
7633 static int
native_encode_fixed(const_tree expr,unsigned char * ptr,int len,int off)7634 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7635 {
7636   tree type = TREE_TYPE (expr);
7637   scalar_mode mode = SCALAR_TYPE_MODE (type);
7638   int total_bytes = GET_MODE_SIZE (mode);
7639   FIXED_VALUE_TYPE value;
7640   tree i_value, i_type;
7641 
7642   if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7643     return 0;
7644 
7645   i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7646 
7647   if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7648     return 0;
7649 
7650   value = TREE_FIXED_CST (expr);
7651   i_value = double_int_to_tree (i_type, value.data);
7652 
7653   return native_encode_int (i_value, ptr, len, off);
7654 }
7655 
7656 
7657 /* Subroutine of native_encode_expr.  Encode the REAL_CST
7658    specified by EXPR into the buffer PTR of length LEN bytes.
7659    Return the number of bytes placed in the buffer, or zero
7660    upon failure.  */
7661 
7662 static int
native_encode_real(const_tree expr,unsigned char * ptr,int len,int off)7663 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7664 {
7665   tree type = TREE_TYPE (expr);
7666   int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7667   int byte, offset, word, words, bitpos;
7668   unsigned char value;
7669 
7670   /* There are always 32 bits in each long, no matter the size of
7671      the hosts long.  We handle floating point representations with
7672      up to 192 bits.  */
7673   long tmp[6];
7674 
7675   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7676     return 0;
7677   if (off == -1)
7678     off = 0;
7679 
7680   if (ptr == NULL)
7681     /* Dry run.  */
7682     return MIN (len, total_bytes - off);
7683 
7684   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7685 
7686   real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7687 
7688   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7689        bitpos += BITS_PER_UNIT)
7690     {
7691       byte = (bitpos / BITS_PER_UNIT) & 3;
7692       value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7693 
7694       if (UNITS_PER_WORD < 4)
7695 	{
7696 	  word = byte / UNITS_PER_WORD;
7697 	  if (WORDS_BIG_ENDIAN)
7698 	    word = (words - 1) - word;
7699 	  offset = word * UNITS_PER_WORD;
7700 	  if (BYTES_BIG_ENDIAN)
7701 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7702 	  else
7703 	    offset += byte % UNITS_PER_WORD;
7704 	}
7705       else
7706 	{
7707 	  offset = byte;
7708 	  if (BYTES_BIG_ENDIAN)
7709 	    {
7710 	      /* Reverse bytes within each long, or within the entire float
7711 		 if it's smaller than a long (for HFmode).  */
7712 	      offset = MIN (3, total_bytes - 1) - offset;
7713 	      gcc_assert (offset >= 0);
7714 	    }
7715 	}
7716       offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7717       if (offset >= off
7718 	  && offset - off < len)
7719 	ptr[offset - off] = value;
7720     }
7721   return MIN (len, total_bytes - off);
7722 }
7723 
7724 /* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7725    specified by EXPR into the buffer PTR of length LEN bytes.
7726    Return the number of bytes placed in the buffer, or zero
7727    upon failure.  */
7728 
7729 static int
native_encode_complex(const_tree expr,unsigned char * ptr,int len,int off)7730 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7731 {
7732   int rsize, isize;
7733   tree part;
7734 
7735   part = TREE_REALPART (expr);
7736   rsize = native_encode_expr (part, ptr, len, off);
7737   if (off == -1 && rsize == 0)
7738     return 0;
7739   part = TREE_IMAGPART (expr);
7740   if (off != -1)
7741     off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7742   isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7743 			      len - rsize, off);
7744   if (off == -1 && isize != rsize)
7745     return 0;
7746   return rsize + isize;
7747 }
7748 
7749 /* Like native_encode_vector, but only encode the first COUNT elements.
7750    The other arguments are as for native_encode_vector.  */
7751 
7752 static int
native_encode_vector_part(const_tree expr,unsigned char * ptr,int len,int off,unsigned HOST_WIDE_INT count)7753 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7754 			   int off, unsigned HOST_WIDE_INT count)
7755 {
7756   tree itype = TREE_TYPE (TREE_TYPE (expr));
7757   if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7758       && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7759     {
7760       /* This is the only case in which elements can be smaller than a byte.
7761 	 Element 0 is always in the lsb of the containing byte.  */
7762       unsigned int elt_bits = TYPE_PRECISION (itype);
7763       int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7764       if ((off == -1 && total_bytes > len) || off >= total_bytes)
7765 	return 0;
7766 
7767       if (off == -1)
7768 	off = 0;
7769 
7770       /* Zero the buffer and then set bits later where necessary.  */
7771       int extract_bytes = MIN (len, total_bytes - off);
7772       if (ptr)
7773 	memset (ptr, 0, extract_bytes);
7774 
7775       unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7776       unsigned int first_elt = off * elts_per_byte;
7777       unsigned int extract_elts = extract_bytes * elts_per_byte;
7778       for (unsigned int i = 0; i < extract_elts; ++i)
7779 	{
7780 	  tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7781 	  if (TREE_CODE (elt) != INTEGER_CST)
7782 	    return 0;
7783 
7784 	  if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7785 	    {
7786 	      unsigned int bit = i * elt_bits;
7787 	      ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7788 	    }
7789 	}
7790       return extract_bytes;
7791     }
7792 
7793   int offset = 0;
7794   int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7795   for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7796     {
7797       if (off >= size)
7798 	{
7799 	  off -= size;
7800 	  continue;
7801 	}
7802       tree elem = VECTOR_CST_ELT (expr, i);
7803       int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7804 				    len - offset, off);
7805       if ((off == -1 && res != size) || res == 0)
7806 	return 0;
7807       offset += res;
7808       if (offset >= len)
7809 	return (off == -1 && i < count - 1) ? 0 : offset;
7810       if (off != -1)
7811 	off = 0;
7812     }
7813   return offset;
7814 }
7815 
7816 /* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7817    specified by EXPR into the buffer PTR of length LEN bytes.
7818    Return the number of bytes placed in the buffer, or zero
7819    upon failure.  */
7820 
7821 static int
native_encode_vector(const_tree expr,unsigned char * ptr,int len,int off)7822 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7823 {
7824   unsigned HOST_WIDE_INT count;
7825   if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7826     return 0;
7827   return native_encode_vector_part (expr, ptr, len, off, count);
7828 }
7829 
7830 
7831 /* Subroutine of native_encode_expr.  Encode the STRING_CST
7832    specified by EXPR into the buffer PTR of length LEN bytes.
7833    Return the number of bytes placed in the buffer, or zero
7834    upon failure.  */
7835 
7836 static int
native_encode_string(const_tree expr,unsigned char * ptr,int len,int off)7837 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7838 {
7839   tree type = TREE_TYPE (expr);
7840 
7841   /* Wide-char strings are encoded in target byte-order so native
7842      encoding them is trivial.  */
7843   if (BITS_PER_UNIT != CHAR_BIT
7844       || TREE_CODE (type) != ARRAY_TYPE
7845       || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7846       || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7847     return 0;
7848 
7849   HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7850   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7851     return 0;
7852   if (off == -1)
7853     off = 0;
7854   len = MIN (total_bytes - off, len);
7855   if (ptr == NULL)
7856     /* Dry run.  */;
7857   else
7858     {
7859       int written = 0;
7860       if (off < TREE_STRING_LENGTH (expr))
7861 	{
7862 	  written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7863 	  memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7864 	}
7865       memset (ptr + written, 0, len - written);
7866     }
7867   return len;
7868 }
7869 
7870 
7871 /* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
7872    REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7873    buffer PTR of length LEN bytes.  If PTR is NULL, don't actually store
7874    anything, just do a dry run.  If OFF is not -1 then start
7875    the encoding at byte offset OFF and encode at most LEN bytes.
7876    Return the number of bytes placed in the buffer, or zero upon failure.  */
7877 
7878 int
native_encode_expr(const_tree expr,unsigned char * ptr,int len,int off)7879 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7880 {
7881   /* We don't support starting at negative offset and -1 is special.  */
7882   if (off < -1)
7883     return 0;
7884 
7885   switch (TREE_CODE (expr))
7886     {
7887     case INTEGER_CST:
7888       return native_encode_int (expr, ptr, len, off);
7889 
7890     case REAL_CST:
7891       return native_encode_real (expr, ptr, len, off);
7892 
7893     case FIXED_CST:
7894       return native_encode_fixed (expr, ptr, len, off);
7895 
7896     case COMPLEX_CST:
7897       return native_encode_complex (expr, ptr, len, off);
7898 
7899     case VECTOR_CST:
7900       return native_encode_vector (expr, ptr, len, off);
7901 
7902     case STRING_CST:
7903       return native_encode_string (expr, ptr, len, off);
7904 
7905     default:
7906       return 0;
7907     }
7908 }
7909 
7910 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
7911    NON_LVALUE_EXPRs and nops.  */
7912 
7913 int
native_encode_initializer(tree init,unsigned char * ptr,int len,int off)7914 native_encode_initializer (tree init, unsigned char *ptr, int len,
7915 			   int off)
7916 {
7917   /* We don't support starting at negative offset and -1 is special.  */
7918   if (off < -1 || init == NULL_TREE)
7919     return 0;
7920 
7921   STRIP_NOPS (init);
7922   switch (TREE_CODE (init))
7923     {
7924     case VIEW_CONVERT_EXPR:
7925     case NON_LVALUE_EXPR:
7926       return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off);
7927     default:
7928       return native_encode_expr (init, ptr, len, off);
7929     case CONSTRUCTOR:
7930       tree type = TREE_TYPE (init);
7931       HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
7932       if (total_bytes < 0)
7933 	return 0;
7934       if ((off == -1 && total_bytes > len) || off >= total_bytes)
7935 	return 0;
7936       int o = off == -1 ? 0 : off;
7937       if (TREE_CODE (type) == ARRAY_TYPE)
7938 	{
7939 	  HOST_WIDE_INT min_index;
7940 	  unsigned HOST_WIDE_INT cnt;
7941 	  HOST_WIDE_INT curpos = 0, fieldsize;
7942 	  constructor_elt *ce;
7943 
7944 	  if (TYPE_DOMAIN (type) == NULL_TREE
7945 	      || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
7946 	    return 0;
7947 
7948 	  fieldsize = int_size_in_bytes (TREE_TYPE (type));
7949 	  if (fieldsize <= 0)
7950 	    return 0;
7951 
7952 	  min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
7953 	  if (ptr != NULL)
7954 	    memset (ptr, '\0', MIN (total_bytes - off, len));
7955 
7956 	  FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
7957 	    {
7958 	      tree val = ce->value;
7959 	      tree index = ce->index;
7960 	      HOST_WIDE_INT pos = curpos, count = 0;
7961 	      bool full = false;
7962 	      if (index && TREE_CODE (index) == RANGE_EXPR)
7963 		{
7964 		  if (!tree_fits_shwi_p (TREE_OPERAND (index, 0))
7965 		      || !tree_fits_shwi_p (TREE_OPERAND (index, 1)))
7966 		    return 0;
7967 		  pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
7968 			* fieldsize;
7969 		  count = (tree_to_shwi (TREE_OPERAND (index, 1))
7970 			   - tree_to_shwi (TREE_OPERAND (index, 0)));
7971 		}
7972 	      else if (index)
7973 		{
7974 		  if (!tree_fits_shwi_p (index))
7975 		    return 0;
7976 		  pos = (tree_to_shwi (index) - min_index) * fieldsize;
7977 		}
7978 
7979 	      curpos = pos;
7980 	      if (val)
7981 		do
7982 		  {
7983 		    if (off == -1
7984 			|| (curpos >= off
7985 			    && (curpos + fieldsize
7986 				<= (HOST_WIDE_INT) off + len)))
7987 		      {
7988 			if (full)
7989 			  {
7990 			    if (ptr)
7991 			      memcpy (ptr + (curpos - o), ptr + (pos - o),
7992 				      fieldsize);
7993 			  }
7994 			else if (!native_encode_initializer (val,
7995 							     ptr
7996 							     ? ptr + curpos - o
7997 							     : NULL,
7998 							     fieldsize,
7999 							     off == -1 ? -1
8000 								       : 0))
8001 			  return 0;
8002 			else
8003 			  {
8004 			    full = true;
8005 			    pos = curpos;
8006 			  }
8007 		      }
8008 		    else if (curpos + fieldsize > off
8009 			     && curpos < (HOST_WIDE_INT) off + len)
8010 		      {
8011 			/* Partial overlap.  */
8012 			unsigned char *p = NULL;
8013 			int no = 0;
8014 			int l;
8015 			if (curpos >= off)
8016 			  {
8017 			    if (ptr)
8018 			      p = ptr + curpos - off;
8019 			    l = MIN ((HOST_WIDE_INT) off + len - curpos,
8020 				     fieldsize);
8021 			  }
8022 			else
8023 			  {
8024 			    p = ptr;
8025 			    no = off - curpos;
8026 			    l = len;
8027 			  }
8028 			if (!native_encode_initializer (val, p, l, no))
8029 			  return 0;
8030 		      }
8031 		    curpos += fieldsize;
8032 		  }
8033 		while (count-- != 0);
8034 	    }
8035 	  return MIN (total_bytes - off, len);
8036 	}
8037       else if (TREE_CODE (type) == RECORD_TYPE
8038 	       || TREE_CODE (type) == UNION_TYPE)
8039 	{
8040 	  unsigned HOST_WIDE_INT cnt;
8041 	  constructor_elt *ce;
8042 
8043 	  if (ptr != NULL)
8044 	    memset (ptr, '\0', MIN (total_bytes - off, len));
8045 	  FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
8046 	    {
8047 	      tree field = ce->index;
8048 	      tree val = ce->value;
8049 	      HOST_WIDE_INT pos, fieldsize;
8050 
8051 	      if (field == NULL_TREE)
8052 		return 0;
8053 
8054 	      pos = int_byte_position (field);
8055 	      if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8056 		continue;
8057 
8058 	      if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8059 		  && TYPE_DOMAIN (TREE_TYPE (field))
8060 		  && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8061 		return 0;
8062 	      if (DECL_SIZE_UNIT (field) == NULL_TREE
8063 		  || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8064 		return 0;
8065 	      fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8066 	      if (fieldsize == 0)
8067 		continue;
8068 
8069 	      if (off != -1 && pos + fieldsize <= off)
8070 		continue;
8071 
8072 	      if (DECL_BIT_FIELD (field))
8073 		return 0;
8074 
8075 	      if (val == NULL_TREE)
8076 		continue;
8077 
8078 	      if (off == -1
8079 		  || (pos >= off
8080 		      && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8081 		{
8082 		  if (!native_encode_initializer (val, ptr ? ptr + pos - o
8083 							   : NULL,
8084 						  fieldsize,
8085 						  off == -1 ? -1 : 0))
8086 		    return 0;
8087 		}
8088 	      else
8089 		{
8090 		  /* Partial overlap.  */
8091 		  unsigned char *p = NULL;
8092 		  int no = 0;
8093 		  int l;
8094 		  if (pos >= off)
8095 		    {
8096 		      if (ptr)
8097 			p = ptr + pos - off;
8098 		      l = MIN ((HOST_WIDE_INT) off + len - pos,
8099 				fieldsize);
8100 		    }
8101 		  else
8102 		    {
8103 		      p = ptr;
8104 		      no = off - pos;
8105 		      l = len;
8106 		    }
8107 		  if (!native_encode_initializer (val, p, l, no))
8108 		    return 0;
8109 		}
8110 	    }
8111 	  return MIN (total_bytes - off, len);
8112 	}
8113       return 0;
8114     }
8115 }
8116 
8117 
8118 /* Subroutine of native_interpret_expr.  Interpret the contents of
8119    the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8120    If the buffer cannot be interpreted, return NULL_TREE.  */
8121 
8122 static tree
native_interpret_int(tree type,const unsigned char * ptr,int len)8123 native_interpret_int (tree type, const unsigned char *ptr, int len)
8124 {
8125   int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8126 
8127   if (total_bytes > len
8128       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8129     return NULL_TREE;
8130 
8131   wide_int result = wi::from_buffer (ptr, total_bytes);
8132 
8133   return wide_int_to_tree (type, result);
8134 }
8135 
8136 
8137 /* Subroutine of native_interpret_expr.  Interpret the contents of
8138    the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8139    If the buffer cannot be interpreted, return NULL_TREE.  */
8140 
8141 static tree
native_interpret_fixed(tree type,const unsigned char * ptr,int len)8142 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8143 {
8144   scalar_mode mode = SCALAR_TYPE_MODE (type);
8145   int total_bytes = GET_MODE_SIZE (mode);
8146   double_int result;
8147   FIXED_VALUE_TYPE fixed_value;
8148 
8149   if (total_bytes > len
8150       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8151     return NULL_TREE;
8152 
8153   result = double_int::from_buffer (ptr, total_bytes);
8154   fixed_value = fixed_from_double_int (result, mode);
8155 
8156   return build_fixed (type, fixed_value);
8157 }
8158 
8159 
8160 /* Subroutine of native_interpret_expr.  Interpret the contents of
8161    the buffer PTR of length LEN as a REAL_CST of type TYPE.
8162    If the buffer cannot be interpreted, return NULL_TREE.  */
8163 
8164 static tree
native_interpret_real(tree type,const unsigned char * ptr,int len)8165 native_interpret_real (tree type, const unsigned char *ptr, int len)
8166 {
8167   scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8168   int total_bytes = GET_MODE_SIZE (mode);
8169   unsigned char value;
8170   /* There are always 32 bits in each long, no matter the size of
8171      the hosts long.  We handle floating point representations with
8172      up to 192 bits.  */
8173   REAL_VALUE_TYPE r;
8174   long tmp[6];
8175 
8176   if (total_bytes > len || total_bytes > 24)
8177     return NULL_TREE;
8178   int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8179 
8180   memset (tmp, 0, sizeof (tmp));
8181   for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8182        bitpos += BITS_PER_UNIT)
8183     {
8184       /* Both OFFSET and BYTE index within a long;
8185 	 bitpos indexes the whole float.  */
8186       int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8187       if (UNITS_PER_WORD < 4)
8188 	{
8189 	  int word = byte / UNITS_PER_WORD;
8190 	  if (WORDS_BIG_ENDIAN)
8191 	    word = (words - 1) - word;
8192 	  offset = word * UNITS_PER_WORD;
8193 	  if (BYTES_BIG_ENDIAN)
8194 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8195 	  else
8196 	    offset += byte % UNITS_PER_WORD;
8197 	}
8198       else
8199 	{
8200 	  offset = byte;
8201 	  if (BYTES_BIG_ENDIAN)
8202 	    {
8203 	      /* Reverse bytes within each long, or within the entire float
8204 		 if it's smaller than a long (for HFmode).  */
8205 	      offset = MIN (3, total_bytes - 1) - offset;
8206 	      gcc_assert (offset >= 0);
8207 	    }
8208 	}
8209       value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8210 
8211       tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8212     }
8213 
8214   real_from_target (&r, tmp, mode);
8215   return build_real (type, r);
8216 }
8217 
8218 
8219 /* Subroutine of native_interpret_expr.  Interpret the contents of
8220    the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8221    If the buffer cannot be interpreted, return NULL_TREE.  */
8222 
8223 static tree
native_interpret_complex(tree type,const unsigned char * ptr,int len)8224 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8225 {
8226   tree etype, rpart, ipart;
8227   int size;
8228 
8229   etype = TREE_TYPE (type);
8230   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8231   if (size * 2 > len)
8232     return NULL_TREE;
8233   rpart = native_interpret_expr (etype, ptr, size);
8234   if (!rpart)
8235     return NULL_TREE;
8236   ipart = native_interpret_expr (etype, ptr+size, size);
8237   if (!ipart)
8238     return NULL_TREE;
8239   return build_complex (type, rpart, ipart);
8240 }
8241 
8242 /* Read a vector of type TYPE from the target memory image given by BYTES,
8243    which contains LEN bytes.  The vector is known to be encodable using
8244    NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8245 
8246    Return the vector on success, otherwise return null.  */
8247 
8248 static tree
native_interpret_vector_part(tree type,const unsigned char * bytes,unsigned int len,unsigned int npatterns,unsigned int nelts_per_pattern)8249 native_interpret_vector_part (tree type, const unsigned char *bytes,
8250 			      unsigned int len, unsigned int npatterns,
8251 			      unsigned int nelts_per_pattern)
8252 {
8253   tree elt_type = TREE_TYPE (type);
8254   if (VECTOR_BOOLEAN_TYPE_P (type)
8255       && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8256     {
8257       /* This is the only case in which elements can be smaller than a byte.
8258 	 Element 0 is always in the lsb of the containing byte.  */
8259       unsigned int elt_bits = TYPE_PRECISION (elt_type);
8260       if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8261 	return NULL_TREE;
8262 
8263       tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8264       for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8265 	{
8266 	  unsigned int bit_index = i * elt_bits;
8267 	  unsigned int byte_index = bit_index / BITS_PER_UNIT;
8268 	  unsigned int lsb = bit_index % BITS_PER_UNIT;
8269 	  builder.quick_push (bytes[byte_index] & (1 << lsb)
8270 			      ? build_all_ones_cst (elt_type)
8271 			      : build_zero_cst (elt_type));
8272 	}
8273       return builder.build ();
8274     }
8275 
8276   unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8277   if (elt_bytes * npatterns * nelts_per_pattern > len)
8278     return NULL_TREE;
8279 
8280   tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8281   for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8282     {
8283       tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8284       if (!elt)
8285 	return NULL_TREE;
8286       builder.quick_push (elt);
8287       bytes += elt_bytes;
8288     }
8289   return builder.build ();
8290 }
8291 
8292 /* Subroutine of native_interpret_expr.  Interpret the contents of
8293    the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8294    If the buffer cannot be interpreted, return NULL_TREE.  */
8295 
8296 static tree
native_interpret_vector(tree type,const unsigned char * ptr,unsigned int len)8297 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8298 {
8299   tree etype;
8300   unsigned int size;
8301   unsigned HOST_WIDE_INT count;
8302 
8303   etype = TREE_TYPE (type);
8304   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8305   if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8306       || size * count > len)
8307     return NULL_TREE;
8308 
8309   return native_interpret_vector_part (type, ptr, len, count, 1);
8310 }
8311 
8312 
8313 /* Subroutine of fold_view_convert_expr.  Interpret the contents of
8314    the buffer PTR of length LEN as a constant of type TYPE.  For
8315    INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8316    we return a REAL_CST, etc...  If the buffer cannot be interpreted,
8317    return NULL_TREE.  */
8318 
8319 tree
native_interpret_expr(tree type,const unsigned char * ptr,int len)8320 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8321 {
8322   switch (TREE_CODE (type))
8323     {
8324     case INTEGER_TYPE:
8325     case ENUMERAL_TYPE:
8326     case BOOLEAN_TYPE:
8327     case POINTER_TYPE:
8328     case REFERENCE_TYPE:
8329       return native_interpret_int (type, ptr, len);
8330 
8331     case REAL_TYPE:
8332       return native_interpret_real (type, ptr, len);
8333 
8334     case FIXED_POINT_TYPE:
8335       return native_interpret_fixed (type, ptr, len);
8336 
8337     case COMPLEX_TYPE:
8338       return native_interpret_complex (type, ptr, len);
8339 
8340     case VECTOR_TYPE:
8341       return native_interpret_vector (type, ptr, len);
8342 
8343     default:
8344       return NULL_TREE;
8345     }
8346 }
8347 
8348 /* Returns true if we can interpret the contents of a native encoding
8349    as TYPE.  */
8350 
8351 bool
can_native_interpret_type_p(tree type)8352 can_native_interpret_type_p (tree type)
8353 {
8354   switch (TREE_CODE (type))
8355     {
8356     case INTEGER_TYPE:
8357     case ENUMERAL_TYPE:
8358     case BOOLEAN_TYPE:
8359     case POINTER_TYPE:
8360     case REFERENCE_TYPE:
8361     case FIXED_POINT_TYPE:
8362     case REAL_TYPE:
8363     case COMPLEX_TYPE:
8364     case VECTOR_TYPE:
8365       return true;
8366     default:
8367       return false;
8368     }
8369 }
8370 
8371 /* Routines for manipulation of native_encode_expr encoded data if the encoded
8372    or extracted constant positions and/or sizes aren't byte aligned.  */
8373 
8374 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
8375    bits between adjacent elements.  AMNT should be within
8376    [0, BITS_PER_UNIT).
8377    Example, AMNT = 2:
8378    00011111|11100000 << 2 = 01111111|10000000
8379    PTR[1]  | PTR[0]         PTR[1]  | PTR[0].  */
8380 
8381 void
shift_bytes_in_array_left(unsigned char * ptr,unsigned int sz,unsigned int amnt)8382 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
8383 			   unsigned int amnt)
8384 {
8385   if (amnt == 0)
8386     return;
8387 
8388   unsigned char carry_over = 0U;
8389   unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
8390   unsigned char clear_mask = (~0U) << amnt;
8391 
8392   for (unsigned int i = 0; i < sz; i++)
8393     {
8394       unsigned prev_carry_over = carry_over;
8395       carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
8396 
8397       ptr[i] <<= amnt;
8398       if (i != 0)
8399 	{
8400 	  ptr[i] &= clear_mask;
8401 	  ptr[i] |= prev_carry_over;
8402 	}
8403     }
8404 }
8405 
8406 /* Like shift_bytes_in_array_left but for big-endian.
8407    Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
8408    bits between adjacent elements.  AMNT should be within
8409    [0, BITS_PER_UNIT).
8410    Example, AMNT = 2:
8411    00011111|11100000 >> 2 = 00000111|11111000
8412    PTR[0]  | PTR[1]         PTR[0]  | PTR[1].  */
8413 
8414 void
shift_bytes_in_array_right(unsigned char * ptr,unsigned int sz,unsigned int amnt)8415 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
8416 			    unsigned int amnt)
8417 {
8418   if (amnt == 0)
8419     return;
8420 
8421   unsigned char carry_over = 0U;
8422   unsigned char carry_mask = ~(~0U << amnt);
8423 
8424   for (unsigned int i = 0; i < sz; i++)
8425     {
8426       unsigned prev_carry_over = carry_over;
8427       carry_over = ptr[i] & carry_mask;
8428 
8429       carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
8430       ptr[i] >>= amnt;
8431       ptr[i] |= prev_carry_over;
8432     }
8433 }
8434 
8435 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
8436    directly on the VECTOR_CST encoding, in a way that works for variable-
8437    length vectors.  Return the resulting VECTOR_CST on success or null
8438    on failure.  */
8439 
8440 static tree
fold_view_convert_vector_encoding(tree type,tree expr)8441 fold_view_convert_vector_encoding (tree type, tree expr)
8442 {
8443   tree expr_type = TREE_TYPE (expr);
8444   poly_uint64 type_bits, expr_bits;
8445   if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
8446       || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
8447     return NULL_TREE;
8448 
8449   poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
8450   poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
8451   unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
8452   unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
8453 
8454   /* We can only preserve the semantics of a stepped pattern if the new
8455      vector element is an integer of the same size.  */
8456   if (VECTOR_CST_STEPPED_P (expr)
8457       && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
8458     return NULL_TREE;
8459 
8460   /* The number of bits needed to encode one element from every pattern
8461      of the original vector.  */
8462   unsigned int expr_sequence_bits
8463     = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
8464 
8465   /* The number of bits needed to encode one element from every pattern
8466      of the result.  */
8467   unsigned int type_sequence_bits
8468     = least_common_multiple (expr_sequence_bits, type_elt_bits);
8469 
8470   /* Don't try to read more bytes than are available, which can happen
8471      for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
8472      The general VIEW_CONVERT handling can cope with that case, so there's
8473      no point complicating things here.  */
8474   unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
8475   unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
8476 				    BITS_PER_UNIT);
8477   unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
8478   if (known_gt (buffer_bits, expr_bits))
8479     return NULL_TREE;
8480 
8481   /* Get enough bytes of EXPR to form the new encoding.  */
8482   auto_vec<unsigned char, 128> buffer (buffer_bytes);
8483   buffer.quick_grow (buffer_bytes);
8484   if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
8485 				 buffer_bits / expr_elt_bits)
8486       != (int) buffer_bytes)
8487     return NULL_TREE;
8488 
8489   /* Reencode the bytes as TYPE.  */
8490   unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
8491   return native_interpret_vector_part (type, &buffer[0], buffer.length (),
8492 				       type_npatterns, nelts_per_pattern);
8493 }
8494 
8495 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8496    TYPE at compile-time.  If we're unable to perform the conversion
8497    return NULL_TREE.  */
8498 
8499 static tree
fold_view_convert_expr(tree type,tree expr)8500 fold_view_convert_expr (tree type, tree expr)
8501 {
8502   /* We support up to 512-bit values (for V8DFmode).  */
8503   unsigned char buffer[64];
8504   int len;
8505 
8506   /* Check that the host and target are sane.  */
8507   if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8508     return NULL_TREE;
8509 
8510   if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
8511     if (tree res = fold_view_convert_vector_encoding (type, expr))
8512       return res;
8513 
8514   len = native_encode_expr (expr, buffer, sizeof (buffer));
8515   if (len == 0)
8516     return NULL_TREE;
8517 
8518   return native_interpret_expr (type, buffer, len);
8519 }
8520 
8521 /* Build an expression for the address of T.  Folds away INDIRECT_REF
8522    to avoid confusing the gimplify process.  */
8523 
8524 tree
build_fold_addr_expr_with_type_loc(location_t loc,tree t,tree ptrtype)8525 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8526 {
8527   /* The size of the object is not relevant when talking about its address.  */
8528   if (TREE_CODE (t) == WITH_SIZE_EXPR)
8529     t = TREE_OPERAND (t, 0);
8530 
8531   if (TREE_CODE (t) == INDIRECT_REF)
8532     {
8533       t = TREE_OPERAND (t, 0);
8534 
8535       if (TREE_TYPE (t) != ptrtype)
8536 	t = build1_loc (loc, NOP_EXPR, ptrtype, t);
8537     }
8538   else if (TREE_CODE (t) == MEM_REF
8539 	   && integer_zerop (TREE_OPERAND (t, 1)))
8540     {
8541       t = TREE_OPERAND (t, 0);
8542 
8543       if (TREE_TYPE (t) != ptrtype)
8544 	t = fold_convert_loc (loc, ptrtype, t);
8545     }
8546   else if (TREE_CODE (t) == MEM_REF
8547 	   && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
8548     return fold_binary (POINTER_PLUS_EXPR, ptrtype,
8549 			TREE_OPERAND (t, 0),
8550 			convert_to_ptrofftype (TREE_OPERAND (t, 1)));
8551   else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8552     {
8553       t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8554 
8555       if (TREE_TYPE (t) != ptrtype)
8556 	t = fold_convert_loc (loc, ptrtype, t);
8557     }
8558   else
8559     t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
8560 
8561   return t;
8562 }
8563 
8564 /* Build an expression for the address of T.  */
8565 
8566 tree
build_fold_addr_expr_loc(location_t loc,tree t)8567 build_fold_addr_expr_loc (location_t loc, tree t)
8568 {
8569   tree ptrtype = build_pointer_type (TREE_TYPE (t));
8570 
8571   return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8572 }
8573 
8574 /* Fold a unary expression of code CODE and type TYPE with operand
8575    OP0.  Return the folded expression if folding is successful.
8576    Otherwise, return NULL_TREE.  */
8577 
8578 tree
fold_unary_loc(location_t loc,enum tree_code code,tree type,tree op0)8579 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8580 {
8581   tree tem;
8582   tree arg0;
8583   enum tree_code_class kind = TREE_CODE_CLASS (code);
8584 
8585   gcc_assert (IS_EXPR_CODE_CLASS (kind)
8586 	      && TREE_CODE_LENGTH (code) == 1);
8587 
8588   arg0 = op0;
8589   if (arg0)
8590     {
8591       if (CONVERT_EXPR_CODE_P (code)
8592 	  || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
8593 	{
8594 	  /* Don't use STRIP_NOPS, because signedness of argument type
8595 	     matters.  */
8596 	  STRIP_SIGN_NOPS (arg0);
8597 	}
8598       else
8599 	{
8600 	  /* Strip any conversions that don't change the mode.  This
8601 	     is safe for every expression, except for a comparison
8602 	     expression because its signedness is derived from its
8603 	     operands.
8604 
8605 	     Note that this is done as an internal manipulation within
8606 	     the constant folder, in order to find the simplest
8607 	     representation of the arguments so that their form can be
8608 	     studied.  In any cases, the appropriate type conversions
8609 	     should be put back in the tree that will get out of the
8610 	     constant folder.  */
8611 	  STRIP_NOPS (arg0);
8612 	}
8613 
8614       if (CONSTANT_CLASS_P (arg0))
8615 	{
8616 	  tree tem = const_unop (code, type, arg0);
8617 	  if (tem)
8618 	    {
8619 	      if (TREE_TYPE (tem) != type)
8620 		tem = fold_convert_loc (loc, type, tem);
8621 	      return tem;
8622 	    }
8623 	}
8624     }
8625 
8626   tem = generic_simplify (loc, code, type, op0);
8627   if (tem)
8628     return tem;
8629 
8630   if (TREE_CODE_CLASS (code) == tcc_unary)
8631     {
8632       if (TREE_CODE (arg0) == COMPOUND_EXPR)
8633 	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8634 		       fold_build1_loc (loc, code, type,
8635 				    fold_convert_loc (loc, TREE_TYPE (op0),
8636 						      TREE_OPERAND (arg0, 1))));
8637       else if (TREE_CODE (arg0) == COND_EXPR)
8638 	{
8639 	  tree arg01 = TREE_OPERAND (arg0, 1);
8640 	  tree arg02 = TREE_OPERAND (arg0, 2);
8641 	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8642 	    arg01 = fold_build1_loc (loc, code, type,
8643 				 fold_convert_loc (loc,
8644 						   TREE_TYPE (op0), arg01));
8645 	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8646 	    arg02 = fold_build1_loc (loc, code, type,
8647 				 fold_convert_loc (loc,
8648 						   TREE_TYPE (op0), arg02));
8649 	  tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8650 			     arg01, arg02);
8651 
8652 	  /* If this was a conversion, and all we did was to move into
8653 	     inside the COND_EXPR, bring it back out.  But leave it if
8654 	     it is a conversion from integer to integer and the
8655 	     result precision is no wider than a word since such a
8656 	     conversion is cheap and may be optimized away by combine,
8657 	     while it couldn't if it were outside the COND_EXPR.  Then return
8658 	     so we don't get into an infinite recursion loop taking the
8659 	     conversion out and then back in.  */
8660 
8661 	  if ((CONVERT_EXPR_CODE_P (code)
8662 	       || code == NON_LVALUE_EXPR)
8663 	      && TREE_CODE (tem) == COND_EXPR
8664 	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8665 	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8666 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8667 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8668 	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8669 		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8670 	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8671 		     && (INTEGRAL_TYPE_P
8672 			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8673 		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8674 		  || flag_syntax_only))
8675 	    tem = build1_loc (loc, code, type,
8676 			      build3 (COND_EXPR,
8677 				      TREE_TYPE (TREE_OPERAND
8678 						 (TREE_OPERAND (tem, 1), 0)),
8679 				      TREE_OPERAND (tem, 0),
8680 				      TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8681 				      TREE_OPERAND (TREE_OPERAND (tem, 2),
8682 						    0)));
8683 	  return tem;
8684 	}
8685    }
8686 
8687   switch (code)
8688     {
8689     case NON_LVALUE_EXPR:
8690       if (!maybe_lvalue_p (op0))
8691 	return fold_convert_loc (loc, type, op0);
8692       return NULL_TREE;
8693 
8694     CASE_CONVERT:
8695     case FLOAT_EXPR:
8696     case FIX_TRUNC_EXPR:
8697       if (COMPARISON_CLASS_P (op0))
8698 	{
8699 	  /* If we have (type) (a CMP b) and type is an integral type, return
8700 	     new expression involving the new type.  Canonicalize
8701 	     (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
8702 	     non-integral type.
8703 	     Do not fold the result as that would not simplify further, also
8704 	     folding again results in recursions.  */
8705 	  if (TREE_CODE (type) == BOOLEAN_TYPE)
8706 	    return build2_loc (loc, TREE_CODE (op0), type,
8707 			       TREE_OPERAND (op0, 0),
8708 			       TREE_OPERAND (op0, 1));
8709 	  else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
8710 		   && TREE_CODE (type) != VECTOR_TYPE)
8711 	    return build3_loc (loc, COND_EXPR, type, op0,
8712 			       constant_boolean_node (true, type),
8713 			       constant_boolean_node (false, type));
8714 	}
8715 
8716       /* Handle (T *)&A.B.C for A being of type T and B and C
8717 	 living at offset zero.  This occurs frequently in
8718 	 C++ upcasting and then accessing the base.  */
8719       if (TREE_CODE (op0) == ADDR_EXPR
8720 	  && POINTER_TYPE_P (type)
8721 	  && handled_component_p (TREE_OPERAND (op0, 0)))
8722         {
8723 	  poly_int64 bitsize, bitpos;
8724 	  tree offset;
8725 	  machine_mode mode;
8726 	  int unsignedp, reversep, volatilep;
8727 	  tree base
8728 	    = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
8729 				   &offset, &mode, &unsignedp, &reversep,
8730 				   &volatilep);
8731 	  /* If the reference was to a (constant) zero offset, we can use
8732 	     the address of the base if it has the same base type
8733 	     as the result type and the pointer type is unqualified.  */
8734 	  if (!offset
8735 	      && known_eq (bitpos, 0)
8736 	      && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8737 		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8738 	      && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8739 	    return fold_convert_loc (loc, type,
8740 				     build_fold_addr_expr_loc (loc, base));
8741         }
8742 
8743       if (TREE_CODE (op0) == MODIFY_EXPR
8744 	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8745 	  /* Detect assigning a bitfield.  */
8746 	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8747 	       && DECL_BIT_FIELD
8748 	       (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8749 	{
8750 	  /* Don't leave an assignment inside a conversion
8751 	     unless assigning a bitfield.  */
8752 	  tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8753 	  /* First do the assignment, then return converted constant.  */
8754 	  tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8755 	  TREE_NO_WARNING (tem) = 1;
8756 	  TREE_USED (tem) = 1;
8757 	  return tem;
8758 	}
8759 
8760       /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8761 	 constants (if x has signed type, the sign bit cannot be set
8762 	 in c).  This folds extension into the BIT_AND_EXPR.
8763 	 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8764 	 very likely don't have maximal range for their precision and this
8765 	 transformation effectively doesn't preserve non-maximal ranges.  */
8766       if (TREE_CODE (type) == INTEGER_TYPE
8767 	  && TREE_CODE (op0) == BIT_AND_EXPR
8768 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8769 	{
8770 	  tree and_expr = op0;
8771 	  tree and0 = TREE_OPERAND (and_expr, 0);
8772 	  tree and1 = TREE_OPERAND (and_expr, 1);
8773 	  int change = 0;
8774 
8775 	  if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8776 	      || (TYPE_PRECISION (type)
8777 		  <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8778 	    change = 1;
8779 	  else if (TYPE_PRECISION (TREE_TYPE (and1))
8780 		   <= HOST_BITS_PER_WIDE_INT
8781 		   && tree_fits_uhwi_p (and1))
8782 	    {
8783 	      unsigned HOST_WIDE_INT cst;
8784 
8785 	      cst = tree_to_uhwi (and1);
8786 	      cst &= HOST_WIDE_INT_M1U
8787 		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8788 	      change = (cst == 0);
8789 	      if (change
8790 		  && !flag_syntax_only
8791 		  && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
8792 		      == ZERO_EXTEND))
8793 		{
8794 		  tree uns = unsigned_type_for (TREE_TYPE (and0));
8795 		  and0 = fold_convert_loc (loc, uns, and0);
8796 		  and1 = fold_convert_loc (loc, uns, and1);
8797 		}
8798 	    }
8799 	  if (change)
8800 	    {
8801 	      tem = force_fit_type (type, wi::to_widest (and1), 0,
8802 				    TREE_OVERFLOW (and1));
8803 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
8804 				      fold_convert_loc (loc, type, and0), tem);
8805 	    }
8806 	}
8807 
8808       /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
8809 	 cast (T1)X will fold away.  We assume that this happens when X itself
8810 	 is a cast.  */
8811       if (POINTER_TYPE_P (type)
8812 	  && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8813 	  && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
8814 	{
8815 	  tree arg00 = TREE_OPERAND (arg0, 0);
8816 	  tree arg01 = TREE_OPERAND (arg0, 1);
8817 
8818 	  return fold_build_pointer_plus_loc
8819 		   (loc, fold_convert_loc (loc, type, arg00), arg01);
8820 	}
8821 
8822       /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8823 	 of the same precision, and X is an integer type not narrower than
8824 	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
8825       if (INTEGRAL_TYPE_P (type)
8826 	  && TREE_CODE (op0) == BIT_NOT_EXPR
8827 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8828 	  && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8829 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8830 	{
8831 	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8832 	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8833 	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8834 	    return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8835 				fold_convert_loc (loc, type, tem));
8836 	}
8837 
8838       /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8839 	 type of X and Y (integer types only).  */
8840       if (INTEGRAL_TYPE_P (type)
8841 	  && TREE_CODE (op0) == MULT_EXPR
8842 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8843 	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8844 	{
8845 	  /* Be careful not to introduce new overflows.  */
8846 	  tree mult_type;
8847           if (TYPE_OVERFLOW_WRAPS (type))
8848 	    mult_type = type;
8849 	  else
8850 	    mult_type = unsigned_type_for (type);
8851 
8852 	  if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8853 	    {
8854 	      tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8855 				 fold_convert_loc (loc, mult_type,
8856 						   TREE_OPERAND (op0, 0)),
8857 				 fold_convert_loc (loc, mult_type,
8858 						   TREE_OPERAND (op0, 1)));
8859 	      return fold_convert_loc (loc, type, tem);
8860 	    }
8861 	}
8862 
8863       return NULL_TREE;
8864 
8865     case VIEW_CONVERT_EXPR:
8866       if (TREE_CODE (op0) == MEM_REF)
8867         {
8868 	  if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
8869 	    type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
8870 	  tem = fold_build2_loc (loc, MEM_REF, type,
8871 				 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8872 	  REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
8873 	  return tem;
8874 	}
8875 
8876       return NULL_TREE;
8877 
8878     case NEGATE_EXPR:
8879       tem = fold_negate_expr (loc, arg0);
8880       if (tem)
8881 	return fold_convert_loc (loc, type, tem);
8882       return NULL_TREE;
8883 
8884     case ABS_EXPR:
8885       /* Convert fabs((double)float) into (double)fabsf(float).  */
8886       if (TREE_CODE (arg0) == NOP_EXPR
8887 	  && TREE_CODE (type) == REAL_TYPE)
8888 	{
8889 	  tree targ0 = strip_float_extensions (arg0);
8890 	  if (targ0 != arg0)
8891 	    return fold_convert_loc (loc, type,
8892 				     fold_build1_loc (loc, ABS_EXPR,
8893 						  TREE_TYPE (targ0),
8894 						  targ0));
8895 	}
8896       return NULL_TREE;
8897 
8898     case BIT_NOT_EXPR:
8899       /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
8900       if (TREE_CODE (arg0) == BIT_XOR_EXPR
8901 	  && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8902 				    fold_convert_loc (loc, type,
8903 						      TREE_OPERAND (arg0, 0)))))
8904 	return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8905 				fold_convert_loc (loc, type,
8906 						  TREE_OPERAND (arg0, 1)));
8907       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8908 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8909 			       	     fold_convert_loc (loc, type,
8910 						       TREE_OPERAND (arg0, 1)))))
8911 	return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8912 			    fold_convert_loc (loc, type,
8913 					      TREE_OPERAND (arg0, 0)), tem);
8914 
8915       return NULL_TREE;
8916 
8917     case TRUTH_NOT_EXPR:
8918       /* Note that the operand of this must be an int
8919 	 and its values must be 0 or 1.
8920 	 ("true" is a fixed value perhaps depending on the language,
8921 	 but we don't handle values other than 1 correctly yet.)  */
8922       tem = fold_truth_not_expr (loc, arg0);
8923       if (!tem)
8924 	return NULL_TREE;
8925       return fold_convert_loc (loc, type, tem);
8926 
8927     case INDIRECT_REF:
8928       /* Fold *&X to X if X is an lvalue.  */
8929       if (TREE_CODE (op0) == ADDR_EXPR)
8930 	{
8931 	  tree op00 = TREE_OPERAND (op0, 0);
8932 	  if ((VAR_P (op00)
8933 	       || TREE_CODE (op00) == PARM_DECL
8934 	       || TREE_CODE (op00) == RESULT_DECL)
8935 	      && !TREE_READONLY (op00))
8936 	    return op00;
8937 	}
8938       return NULL_TREE;
8939 
8940     default:
8941       return NULL_TREE;
8942     } /* switch (code) */
8943 }
8944 
8945 
8946 /* If the operation was a conversion do _not_ mark a resulting constant
8947    with TREE_OVERFLOW if the original constant was not.  These conversions
8948    have implementation defined behavior and retaining the TREE_OVERFLOW
8949    flag here would confuse later passes such as VRP.  */
8950 tree
fold_unary_ignore_overflow_loc(location_t loc,enum tree_code code,tree type,tree op0)8951 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8952 				tree type, tree op0)
8953 {
8954   tree res = fold_unary_loc (loc, code, type, op0);
8955   if (res
8956       && TREE_CODE (res) == INTEGER_CST
8957       && TREE_CODE (op0) == INTEGER_CST
8958       && CONVERT_EXPR_CODE_P (code))
8959     TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8960 
8961   return res;
8962 }
8963 
8964 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8965    operands OP0 and OP1.  LOC is the location of the resulting expression.
8966    ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8967    Return the folded expression if folding is successful.  Otherwise,
8968    return NULL_TREE.  */
8969 static tree
fold_truth_andor(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1,tree op0,tree op1)8970 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8971 		  tree arg0, tree arg1, tree op0, tree op1)
8972 {
8973   tree tem;
8974 
8975   /* We only do these simplifications if we are optimizing.  */
8976   if (!optimize)
8977     return NULL_TREE;
8978 
8979   /* Check for things like (A || B) && (A || C).  We can convert this
8980      to A || (B && C).  Note that either operator can be any of the four
8981      truth and/or operations and the transformation will still be
8982      valid.   Also note that we only care about order for the
8983      ANDIF and ORIF operators.  If B contains side effects, this
8984      might change the truth-value of A.  */
8985   if (TREE_CODE (arg0) == TREE_CODE (arg1)
8986       && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8987 	  || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8988 	  || TREE_CODE (arg0) == TRUTH_AND_EXPR
8989 	  || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8990       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8991     {
8992       tree a00 = TREE_OPERAND (arg0, 0);
8993       tree a01 = TREE_OPERAND (arg0, 1);
8994       tree a10 = TREE_OPERAND (arg1, 0);
8995       tree a11 = TREE_OPERAND (arg1, 1);
8996       int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8997 			  || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8998 			 && (code == TRUTH_AND_EXPR
8999 			     || code == TRUTH_OR_EXPR));
9000 
9001       if (operand_equal_p (a00, a10, 0))
9002 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9003 			    fold_build2_loc (loc, code, type, a01, a11));
9004       else if (commutative && operand_equal_p (a00, a11, 0))
9005 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9006 			    fold_build2_loc (loc, code, type, a01, a10));
9007       else if (commutative && operand_equal_p (a01, a10, 0))
9008 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9009 			    fold_build2_loc (loc, code, type, a00, a11));
9010 
9011       /* This case if tricky because we must either have commutative
9012 	 operators or else A10 must not have side-effects.  */
9013 
9014       else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9015 	       && operand_equal_p (a01, a11, 0))
9016 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
9017 			    fold_build2_loc (loc, code, type, a00, a10),
9018 			    a01);
9019     }
9020 
9021   /* See if we can build a range comparison.  */
9022   if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9023     return tem;
9024 
9025   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9026       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9027     {
9028       tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9029       if (tem)
9030 	return fold_build2_loc (loc, code, type, tem, arg1);
9031     }
9032 
9033   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9034       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9035     {
9036       tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9037       if (tem)
9038 	return fold_build2_loc (loc, code, type, arg0, tem);
9039     }
9040 
9041   /* Check for the possibility of merging component references.  If our
9042      lhs is another similar operation, try to merge its rhs with our
9043      rhs.  Then try to merge our lhs and rhs.  */
9044   if (TREE_CODE (arg0) == code
9045       && (tem = fold_truth_andor_1 (loc, code, type,
9046 				    TREE_OPERAND (arg0, 1), arg1)) != 0)
9047     return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9048 
9049   if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9050     return tem;
9051 
9052   bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9053   if (param_logical_op_non_short_circuit != -1)
9054     logical_op_non_short_circuit
9055       = param_logical_op_non_short_circuit;
9056   if (logical_op_non_short_circuit
9057       && !flag_sanitize_coverage
9058       && (code == TRUTH_AND_EXPR
9059           || code == TRUTH_ANDIF_EXPR
9060           || code == TRUTH_OR_EXPR
9061           || code == TRUTH_ORIF_EXPR))
9062     {
9063       enum tree_code ncode, icode;
9064 
9065       ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9066 	      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9067       icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9068 
9069       /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9070 	 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9071 	 We don't want to pack more than two leafs to a non-IF AND/OR
9072 	 expression.
9073 	 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9074 	 equal to IF-CODE, then we don't want to add right-hand operand.
9075 	 If the inner right-hand side of left-hand operand has
9076 	 side-effects, or isn't simple, then we can't add to it,
9077 	 as otherwise we might destroy if-sequence.  */
9078       if (TREE_CODE (arg0) == icode
9079 	  && simple_operand_p_2 (arg1)
9080 	  /* Needed for sequence points to handle trappings, and
9081 	     side-effects.  */
9082 	  && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
9083 	{
9084 	  tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9085 				 arg1);
9086 	  return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9087 				  tem);
9088 	}
9089 	/* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9090 	   or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C).  */
9091       else if (TREE_CODE (arg1) == icode
9092 	  && simple_operand_p_2 (arg0)
9093 	  /* Needed for sequence points to handle trappings, and
9094 	     side-effects.  */
9095 	  && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
9096 	{
9097 	  tem = fold_build2_loc (loc, ncode, type,
9098 				 arg0, TREE_OPERAND (arg1, 0));
9099 	  return fold_build2_loc (loc, icode, type, tem,
9100 				  TREE_OPERAND (arg1, 1));
9101 	}
9102       /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9103 	 into (A OR B).
9104 	 For sequence point consistancy, we need to check for trapping,
9105 	 and side-effects.  */
9106       else if (code == icode && simple_operand_p_2 (arg0)
9107                && simple_operand_p_2 (arg1))
9108 	return fold_build2_loc (loc, ncode, type, arg0, arg1);
9109     }
9110 
9111   return NULL_TREE;
9112 }
9113 
9114 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9115    by changing CODE to reduce the magnitude of constants involved in
9116    ARG0 of the comparison.
9117    Returns a canonicalized comparison tree if a simplification was
9118    possible, otherwise returns NULL_TREE.
9119    Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9120    valid if signed overflow is undefined.  */
9121 
9122 static tree
maybe_canonicalize_comparison_1(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1,bool * strict_overflow_p)9123 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9124 				 tree arg0, tree arg1,
9125 				 bool *strict_overflow_p)
9126 {
9127   enum tree_code code0 = TREE_CODE (arg0);
9128   tree t, cst0 = NULL_TREE;
9129   int sgn0;
9130 
9131   /* Match A +- CST code arg1.  We can change this only if overflow
9132      is undefined.  */
9133   if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9134 	 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9135 	/* In principle pointers also have undefined overflow behavior,
9136 	   but that causes problems elsewhere.  */
9137 	&& !POINTER_TYPE_P (TREE_TYPE (arg0))
9138 	&& (code0 == MINUS_EXPR
9139 	    || code0 == PLUS_EXPR)
9140 	&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9141     return NULL_TREE;
9142 
9143   /* Identify the constant in arg0 and its sign.  */
9144   cst0 = TREE_OPERAND (arg0, 1);
9145   sgn0 = tree_int_cst_sgn (cst0);
9146 
9147   /* Overflowed constants and zero will cause problems.  */
9148   if (integer_zerop (cst0)
9149       || TREE_OVERFLOW (cst0))
9150     return NULL_TREE;
9151 
9152   /* See if we can reduce the magnitude of the constant in
9153      arg0 by changing the comparison code.  */
9154   /* A - CST < arg1  ->  A - CST-1 <= arg1.  */
9155   if (code == LT_EXPR
9156       && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9157     code = LE_EXPR;
9158   /* A + CST > arg1  ->  A + CST-1 >= arg1.  */
9159   else if (code == GT_EXPR
9160 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9161     code = GE_EXPR;
9162   /* A + CST <= arg1  ->  A + CST-1 < arg1.  */
9163   else if (code == LE_EXPR
9164 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9165     code = LT_EXPR;
9166   /* A - CST >= arg1  ->  A - CST-1 > arg1.  */
9167   else if (code == GE_EXPR
9168 	   && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9169     code = GT_EXPR;
9170   else
9171     return NULL_TREE;
9172   *strict_overflow_p = true;
9173 
9174   /* Now build the constant reduced in magnitude.  But not if that
9175      would produce one outside of its types range.  */
9176   if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9177       && ((sgn0 == 1
9178 	   && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9179 	   && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9180 	  || (sgn0 == -1
9181 	      && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9182 	      && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9183     return NULL_TREE;
9184 
9185   t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9186 		       cst0, build_int_cst (TREE_TYPE (cst0), 1));
9187   t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9188   t = fold_convert (TREE_TYPE (arg1), t);
9189 
9190   return fold_build2_loc (loc, code, type, t, arg1);
9191 }
9192 
9193 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9194    overflow further.  Try to decrease the magnitude of constants involved
9195    by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9196    and put sole constants at the second argument position.
9197    Returns the canonicalized tree if changed, otherwise NULL_TREE.  */
9198 
9199 static tree
maybe_canonicalize_comparison(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)9200 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9201 			       tree arg0, tree arg1)
9202 {
9203   tree t;
9204   bool strict_overflow_p;
9205   const char * const warnmsg = G_("assuming signed overflow does not occur "
9206 				  "when reducing constant in comparison");
9207 
9208   /* Try canonicalization by simplifying arg0.  */
9209   strict_overflow_p = false;
9210   t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9211 				       &strict_overflow_p);
9212   if (t)
9213     {
9214       if (strict_overflow_p)
9215 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9216       return t;
9217     }
9218 
9219   /* Try canonicalization by simplifying arg1 using the swapped
9220      comparison.  */
9221   code = swap_tree_comparison (code);
9222   strict_overflow_p = false;
9223   t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9224 				       &strict_overflow_p);
9225   if (t && strict_overflow_p)
9226     fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9227   return t;
9228 }
9229 
9230 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9231    space.  This is used to avoid issuing overflow warnings for
9232    expressions like &p->x which cannot wrap.  */
9233 
9234 static bool
pointer_may_wrap_p(tree base,tree offset,poly_int64 bitpos)9235 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9236 {
9237   if (!POINTER_TYPE_P (TREE_TYPE (base)))
9238     return true;
9239 
9240   if (maybe_lt (bitpos, 0))
9241     return true;
9242 
9243   poly_wide_int wi_offset;
9244   int precision = TYPE_PRECISION (TREE_TYPE (base));
9245   if (offset == NULL_TREE)
9246     wi_offset = wi::zero (precision);
9247   else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9248     return true;
9249   else
9250     wi_offset = wi::to_poly_wide (offset);
9251 
9252   wi::overflow_type overflow;
9253   poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9254 				  precision);
9255   poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9256   if (overflow)
9257     return true;
9258 
9259   poly_uint64 total_hwi, size;
9260   if (!total.to_uhwi (&total_hwi)
9261       || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9262 			   &size)
9263       || known_eq (size, 0U))
9264     return true;
9265 
9266   if (known_le (total_hwi, size))
9267     return false;
9268 
9269   /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9270      array.  */
9271   if (TREE_CODE (base) == ADDR_EXPR
9272       && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9273 			  &size)
9274       && maybe_ne (size, 0U)
9275       && known_le (total_hwi, size))
9276     return false;
9277 
9278   return true;
9279 }
9280 
9281 /* Return a positive integer when the symbol DECL is known to have
9282    a nonzero address, zero when it's known not to (e.g., it's a weak
9283    symbol), and a negative integer when the symbol is not yet in the
9284    symbol table and so whether or not its address is zero is unknown.
9285    For function local objects always return positive integer.  */
9286 static int
maybe_nonzero_address(tree decl)9287 maybe_nonzero_address (tree decl)
9288 {
9289   if (DECL_P (decl) && decl_in_symtab_p (decl))
9290     if (struct symtab_node *symbol = symtab_node::get_create (decl))
9291       return symbol->nonzero_address ();
9292 
9293   /* Function local objects are never NULL.  */
9294   if (DECL_P (decl)
9295       && (DECL_CONTEXT (decl)
9296       && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9297       && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
9298     return 1;
9299 
9300   return -1;
9301 }
9302 
9303 /* Subroutine of fold_binary.  This routine performs all of the
9304    transformations that are common to the equality/inequality
9305    operators (EQ_EXPR and NE_EXPR) and the ordering operators
9306    (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
9307    fold_binary should call fold_binary.  Fold a comparison with
9308    tree code CODE and type TYPE with operands OP0 and OP1.  Return
9309    the folded comparison or NULL_TREE.  */
9310 
9311 static tree
fold_comparison(location_t loc,enum tree_code code,tree type,tree op0,tree op1)9312 fold_comparison (location_t loc, enum tree_code code, tree type,
9313 		 tree op0, tree op1)
9314 {
9315   const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9316   tree arg0, arg1, tem;
9317 
9318   arg0 = op0;
9319   arg1 = op1;
9320 
9321   STRIP_SIGN_NOPS (arg0);
9322   STRIP_SIGN_NOPS (arg1);
9323 
9324   /* For comparisons of pointers we can decompose it to a compile time
9325      comparison of the base objects and the offsets into the object.
9326      This requires at least one operand being an ADDR_EXPR or a
9327      POINTER_PLUS_EXPR to do more than the operand_equal_p test below.  */
9328   if (POINTER_TYPE_P (TREE_TYPE (arg0))
9329       && (TREE_CODE (arg0) == ADDR_EXPR
9330 	  || TREE_CODE (arg1) == ADDR_EXPR
9331 	  || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9332 	  || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9333     {
9334       tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9335       poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
9336       machine_mode mode;
9337       int volatilep, reversep, unsignedp;
9338       bool indirect_base0 = false, indirect_base1 = false;
9339 
9340       /* Get base and offset for the access.  Strip ADDR_EXPR for
9341 	 get_inner_reference, but put it back by stripping INDIRECT_REF
9342 	 off the base object if possible.  indirect_baseN will be true
9343 	 if baseN is not an address but refers to the object itself.  */
9344       base0 = arg0;
9345       if (TREE_CODE (arg0) == ADDR_EXPR)
9346 	{
9347 	  base0
9348 	    = get_inner_reference (TREE_OPERAND (arg0, 0),
9349 				   &bitsize, &bitpos0, &offset0, &mode,
9350 				   &unsignedp, &reversep, &volatilep);
9351 	  if (TREE_CODE (base0) == INDIRECT_REF)
9352 	    base0 = TREE_OPERAND (base0, 0);
9353 	  else
9354 	    indirect_base0 = true;
9355 	}
9356       else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9357 	{
9358 	  base0 = TREE_OPERAND (arg0, 0);
9359 	  STRIP_SIGN_NOPS (base0);
9360 	  if (TREE_CODE (base0) == ADDR_EXPR)
9361 	    {
9362 	      base0
9363 		= get_inner_reference (TREE_OPERAND (base0, 0),
9364 				       &bitsize, &bitpos0, &offset0, &mode,
9365 				       &unsignedp, &reversep, &volatilep);
9366 	      if (TREE_CODE (base0) == INDIRECT_REF)
9367 		base0 = TREE_OPERAND (base0, 0);
9368 	      else
9369 		indirect_base0 = true;
9370 	    }
9371 	  if (offset0 == NULL_TREE || integer_zerop (offset0))
9372 	    offset0 = TREE_OPERAND (arg0, 1);
9373 	  else
9374 	    offset0 = size_binop (PLUS_EXPR, offset0,
9375 				  TREE_OPERAND (arg0, 1));
9376 	  if (poly_int_tree_p (offset0))
9377 	    {
9378 	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
9379 					      TYPE_PRECISION (sizetype));
9380 	      tem <<= LOG2_BITS_PER_UNIT;
9381 	      tem += bitpos0;
9382 	      if (tem.to_shwi (&bitpos0))
9383 		offset0 = NULL_TREE;
9384 	    }
9385 	}
9386 
9387       base1 = arg1;
9388       if (TREE_CODE (arg1) == ADDR_EXPR)
9389 	{
9390 	  base1
9391 	    = get_inner_reference (TREE_OPERAND (arg1, 0),
9392 				   &bitsize, &bitpos1, &offset1, &mode,
9393 				   &unsignedp, &reversep, &volatilep);
9394 	  if (TREE_CODE (base1) == INDIRECT_REF)
9395 	    base1 = TREE_OPERAND (base1, 0);
9396 	  else
9397 	    indirect_base1 = true;
9398 	}
9399       else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9400 	{
9401 	  base1 = TREE_OPERAND (arg1, 0);
9402 	  STRIP_SIGN_NOPS (base1);
9403 	  if (TREE_CODE (base1) == ADDR_EXPR)
9404 	    {
9405 	      base1
9406 		= get_inner_reference (TREE_OPERAND (base1, 0),
9407 				       &bitsize, &bitpos1, &offset1, &mode,
9408 				       &unsignedp, &reversep, &volatilep);
9409 	      if (TREE_CODE (base1) == INDIRECT_REF)
9410 		base1 = TREE_OPERAND (base1, 0);
9411 	      else
9412 		indirect_base1 = true;
9413 	    }
9414 	  if (offset1 == NULL_TREE || integer_zerop (offset1))
9415 	    offset1 = TREE_OPERAND (arg1, 1);
9416 	  else
9417 	    offset1 = size_binop (PLUS_EXPR, offset1,
9418 				  TREE_OPERAND (arg1, 1));
9419 	  if (poly_int_tree_p (offset1))
9420 	    {
9421 	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
9422 					      TYPE_PRECISION (sizetype));
9423 	      tem <<= LOG2_BITS_PER_UNIT;
9424 	      tem += bitpos1;
9425 	      if (tem.to_shwi (&bitpos1))
9426 		offset1 = NULL_TREE;
9427 	    }
9428 	}
9429 
9430       /* If we have equivalent bases we might be able to simplify.  */
9431       if (indirect_base0 == indirect_base1
9432 	  && operand_equal_p (base0, base1,
9433 			      indirect_base0 ? OEP_ADDRESS_OF : 0))
9434 	{
9435 	  /* We can fold this expression to a constant if the non-constant
9436 	     offset parts are equal.  */
9437 	  if ((offset0 == offset1
9438 	       || (offset0 && offset1
9439 		   && operand_equal_p (offset0, offset1, 0)))
9440 	      && (equality_code
9441 		  || (indirect_base0
9442 		      && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9443 		  || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9444 	    {
9445 	      if (!equality_code
9446 		  && maybe_ne (bitpos0, bitpos1)
9447 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
9448 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
9449 		fold_overflow_warning (("assuming pointer wraparound does not "
9450 					"occur when comparing P +- C1 with "
9451 					"P +- C2"),
9452 				       WARN_STRICT_OVERFLOW_CONDITIONAL);
9453 
9454 	      switch (code)
9455 		{
9456 		case EQ_EXPR:
9457 		  if (known_eq (bitpos0, bitpos1))
9458 		    return constant_boolean_node (true, type);
9459 		  if (known_ne (bitpos0, bitpos1))
9460 		    return constant_boolean_node (false, type);
9461 		  break;
9462 		case NE_EXPR:
9463 		  if (known_ne (bitpos0, bitpos1))
9464 		    return constant_boolean_node (true, type);
9465 		  if (known_eq (bitpos0, bitpos1))
9466 		    return constant_boolean_node (false, type);
9467 		  break;
9468 		case LT_EXPR:
9469 		  if (known_lt (bitpos0, bitpos1))
9470 		    return constant_boolean_node (true, type);
9471 		  if (known_ge (bitpos0, bitpos1))
9472 		    return constant_boolean_node (false, type);
9473 		  break;
9474 		case LE_EXPR:
9475 		  if (known_le (bitpos0, bitpos1))
9476 		    return constant_boolean_node (true, type);
9477 		  if (known_gt (bitpos0, bitpos1))
9478 		    return constant_boolean_node (false, type);
9479 		  break;
9480 		case GE_EXPR:
9481 		  if (known_ge (bitpos0, bitpos1))
9482 		    return constant_boolean_node (true, type);
9483 		  if (known_lt (bitpos0, bitpos1))
9484 		    return constant_boolean_node (false, type);
9485 		  break;
9486 		case GT_EXPR:
9487 		  if (known_gt (bitpos0, bitpos1))
9488 		    return constant_boolean_node (true, type);
9489 		  if (known_le (bitpos0, bitpos1))
9490 		    return constant_boolean_node (false, type);
9491 		  break;
9492 		default:;
9493 		}
9494 	    }
9495 	  /* We can simplify the comparison to a comparison of the variable
9496 	     offset parts if the constant offset parts are equal.
9497 	     Be careful to use signed sizetype here because otherwise we
9498 	     mess with array offsets in the wrong way.  This is possible
9499 	     because pointer arithmetic is restricted to retain within an
9500 	     object and overflow on pointer differences is undefined as of
9501 	     6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
9502 	  else if (known_eq (bitpos0, bitpos1)
9503 		   && (equality_code
9504 		       || (indirect_base0
9505 			   && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
9506 		       || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
9507 	    {
9508 	      /* By converting to signed sizetype we cover middle-end pointer
9509 	         arithmetic which operates on unsigned pointer types of size
9510 	         type size and ARRAY_REF offsets which are properly sign or
9511 	         zero extended from their type in case it is narrower than
9512 	         sizetype.  */
9513 	      if (offset0 == NULL_TREE)
9514 		offset0 = build_int_cst (ssizetype, 0);
9515 	      else
9516 		offset0 = fold_convert_loc (loc, ssizetype, offset0);
9517 	      if (offset1 == NULL_TREE)
9518 		offset1 = build_int_cst (ssizetype, 0);
9519 	      else
9520 		offset1 = fold_convert_loc (loc, ssizetype, offset1);
9521 
9522 	      if (!equality_code
9523 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
9524 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
9525 		fold_overflow_warning (("assuming pointer wraparound does not "
9526 					"occur when comparing P +- C1 with "
9527 					"P +- C2"),
9528 				       WARN_STRICT_OVERFLOW_COMPARISON);
9529 
9530 	      return fold_build2_loc (loc, code, type, offset0, offset1);
9531 	    }
9532 	}
9533       /* For equal offsets we can simplify to a comparison of the
9534 	 base addresses.  */
9535       else if (known_eq (bitpos0, bitpos1)
9536 	       && (indirect_base0
9537 		   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9538 	       && (indirect_base1
9539 		   ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9540 	       && ((offset0 == offset1)
9541 		   || (offset0 && offset1
9542 		       && operand_equal_p (offset0, offset1, 0))))
9543 	{
9544 	  if (indirect_base0)
9545 	    base0 = build_fold_addr_expr_loc (loc, base0);
9546 	  if (indirect_base1)
9547 	    base1 = build_fold_addr_expr_loc (loc, base1);
9548 	  return fold_build2_loc (loc, code, type, base0, base1);
9549 	}
9550       /* Comparison between an ordinary (non-weak) symbol and a null
9551 	 pointer can be eliminated since such symbols must have a non
9552 	 null address.  In C, relational expressions between pointers
9553 	 to objects and null pointers are undefined.  The results
9554 	 below follow the C++ rules with the additional property that
9555 	 every object pointer compares greater than a null pointer.
9556       */
9557       else if (((DECL_P (base0)
9558 		 && maybe_nonzero_address (base0) > 0
9559 		 /* Avoid folding references to struct members at offset 0 to
9560 		    prevent tests like '&ptr->firstmember == 0' from getting
9561 		    eliminated.  When ptr is null, although the -> expression
9562 		    is strictly speaking invalid, GCC retains it as a matter
9563 		    of QoI.  See PR c/44555. */
9564 		 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
9565 		|| CONSTANT_CLASS_P (base0))
9566 	       && indirect_base0
9567 	       /* The caller guarantees that when one of the arguments is
9568 		  constant (i.e., null in this case) it is second.  */
9569 	       && integer_zerop (arg1))
9570 	{
9571 	  switch (code)
9572 	    {
9573 	    case EQ_EXPR:
9574 	    case LE_EXPR:
9575 	    case LT_EXPR:
9576 	      return constant_boolean_node (false, type);
9577 	    case GE_EXPR:
9578 	    case GT_EXPR:
9579 	    case NE_EXPR:
9580 	      return constant_boolean_node (true, type);
9581 	    default:
9582 	      gcc_unreachable ();
9583 	    }
9584 	}
9585     }
9586 
9587   /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9588      X CMP Y +- C2 +- C1 for signed X, Y.  This is valid if
9589      the resulting offset is smaller in absolute value than the
9590      original one and has the same sign.  */
9591   if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9592       && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9593       && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9594       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9595 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9596       && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9597       && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9598 	  && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9599     {
9600       tree const1 = TREE_OPERAND (arg0, 1);
9601       tree const2 = TREE_OPERAND (arg1, 1);
9602       tree variable1 = TREE_OPERAND (arg0, 0);
9603       tree variable2 = TREE_OPERAND (arg1, 0);
9604       tree cst;
9605       const char * const warnmsg = G_("assuming signed overflow does not "
9606 				      "occur when combining constants around "
9607 				      "a comparison");
9608 
9609       /* Put the constant on the side where it doesn't overflow and is
9610 	 of lower absolute value and of same sign than before.  */
9611       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9612 			     ? MINUS_EXPR : PLUS_EXPR,
9613 			     const2, const1);
9614       if (!TREE_OVERFLOW (cst)
9615 	  && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9616 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9617 	{
9618 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9619 	  return fold_build2_loc (loc, code, type,
9620 				  variable1,
9621 				  fold_build2_loc (loc, TREE_CODE (arg1),
9622 						   TREE_TYPE (arg1),
9623 						   variable2, cst));
9624 	}
9625 
9626       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9627 			     ? MINUS_EXPR : PLUS_EXPR,
9628 			     const1, const2);
9629       if (!TREE_OVERFLOW (cst)
9630 	  && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9631 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9632 	{
9633 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9634 	  return fold_build2_loc (loc, code, type,
9635 				  fold_build2_loc (loc, TREE_CODE (arg0),
9636 						   TREE_TYPE (arg0),
9637 						   variable1, cst),
9638 				  variable2);
9639 	}
9640     }
9641 
9642   tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9643   if (tem)
9644     return tem;
9645 
9646   /* If we are comparing an expression that just has comparisons
9647      of two integer values, arithmetic expressions of those comparisons,
9648      and constants, we can simplify it.  There are only three cases
9649      to check: the two values can either be equal, the first can be
9650      greater, or the second can be greater.  Fold the expression for
9651      those three values.  Since each value must be 0 or 1, we have
9652      eight possibilities, each of which corresponds to the constant 0
9653      or 1 or one of the six possible comparisons.
9654 
9655      This handles common cases like (a > b) == 0 but also handles
9656      expressions like  ((x > y) - (y > x)) > 0, which supposedly
9657      occur in macroized code.  */
9658 
9659   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9660     {
9661       tree cval1 = 0, cval2 = 0;
9662 
9663       if (twoval_comparison_p (arg0, &cval1, &cval2)
9664 	  /* Don't handle degenerate cases here; they should already
9665 	     have been handled anyway.  */
9666 	  && cval1 != 0 && cval2 != 0
9667 	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9668 	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9669 	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9670 	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9671 	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9672 	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9673 				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9674 	{
9675 	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9676 	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9677 
9678 	  /* We can't just pass T to eval_subst in case cval1 or cval2
9679 	     was the same as ARG1.  */
9680 
9681 	  tree high_result
9682 		= fold_build2_loc (loc, code, type,
9683 			       eval_subst (loc, arg0, cval1, maxval,
9684 					   cval2, minval),
9685 			       arg1);
9686 	  tree equal_result
9687 		= fold_build2_loc (loc, code, type,
9688 			       eval_subst (loc, arg0, cval1, maxval,
9689 					   cval2, maxval),
9690 			       arg1);
9691 	  tree low_result
9692 		= fold_build2_loc (loc, code, type,
9693 			       eval_subst (loc, arg0, cval1, minval,
9694 					   cval2, maxval),
9695 			       arg1);
9696 
9697 	  /* All three of these results should be 0 or 1.  Confirm they are.
9698 	     Then use those values to select the proper code to use.  */
9699 
9700 	  if (TREE_CODE (high_result) == INTEGER_CST
9701 	      && TREE_CODE (equal_result) == INTEGER_CST
9702 	      && TREE_CODE (low_result) == INTEGER_CST)
9703 	    {
9704 	      /* Make a 3-bit mask with the high-order bit being the
9705 		 value for `>', the next for '=', and the low for '<'.  */
9706 	      switch ((integer_onep (high_result) * 4)
9707 		      + (integer_onep (equal_result) * 2)
9708 		      + integer_onep (low_result))
9709 		{
9710 		case 0:
9711 		  /* Always false.  */
9712 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9713 		case 1:
9714 		  code = LT_EXPR;
9715 		  break;
9716 		case 2:
9717 		  code = EQ_EXPR;
9718 		  break;
9719 		case 3:
9720 		  code = LE_EXPR;
9721 		  break;
9722 		case 4:
9723 		  code = GT_EXPR;
9724 		  break;
9725 		case 5:
9726 		  code = NE_EXPR;
9727 		  break;
9728 		case 6:
9729 		  code = GE_EXPR;
9730 		  break;
9731 		case 7:
9732 		  /* Always true.  */
9733 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9734 		}
9735 
9736 	      return fold_build2_loc (loc, code, type, cval1, cval2);
9737 	    }
9738 	}
9739     }
9740 
9741   return NULL_TREE;
9742 }
9743 
9744 
9745 /* Subroutine of fold_binary.  Optimize complex multiplications of the
9746    form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
9747    argument EXPR represents the expression "z" of type TYPE.  */
9748 
9749 static tree
fold_mult_zconjz(location_t loc,tree type,tree expr)9750 fold_mult_zconjz (location_t loc, tree type, tree expr)
9751 {
9752   tree itype = TREE_TYPE (type);
9753   tree rpart, ipart, tem;
9754 
9755   if (TREE_CODE (expr) == COMPLEX_EXPR)
9756     {
9757       rpart = TREE_OPERAND (expr, 0);
9758       ipart = TREE_OPERAND (expr, 1);
9759     }
9760   else if (TREE_CODE (expr) == COMPLEX_CST)
9761     {
9762       rpart = TREE_REALPART (expr);
9763       ipart = TREE_IMAGPART (expr);
9764     }
9765   else
9766     {
9767       expr = save_expr (expr);
9768       rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9769       ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9770     }
9771 
9772   rpart = save_expr (rpart);
9773   ipart = save_expr (ipart);
9774   tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9775 		     fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9776 		     fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9777   return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9778 			  build_zero_cst (itype));
9779 }
9780 
9781 
9782 /* Helper function for fold_vec_perm.  Store elements of VECTOR_CST or
9783    CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
9784    true if successful.  */
9785 
9786 static bool
vec_cst_ctor_to_array(tree arg,unsigned int nelts,tree * elts)9787 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
9788 {
9789   unsigned HOST_WIDE_INT i, nunits;
9790 
9791   if (TREE_CODE (arg) == VECTOR_CST
9792       && VECTOR_CST_NELTS (arg).is_constant (&nunits))
9793     {
9794       for (i = 0; i < nunits; ++i)
9795 	elts[i] = VECTOR_CST_ELT (arg, i);
9796     }
9797   else if (TREE_CODE (arg) == CONSTRUCTOR)
9798     {
9799       constructor_elt *elt;
9800 
9801       FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9802 	if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9803 	  return false;
9804 	else
9805 	  elts[i] = elt->value;
9806     }
9807   else
9808     return false;
9809   for (; i < nelts; i++)
9810     elts[i]
9811       = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9812   return true;
9813 }
9814 
9815 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9816    selector.  Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9817    NULL_TREE otherwise.  */
9818 
9819 tree
fold_vec_perm(tree type,tree arg0,tree arg1,const vec_perm_indices & sel)9820 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
9821 {
9822   unsigned int i;
9823   unsigned HOST_WIDE_INT nelts;
9824   bool need_ctor = false;
9825 
9826   if (!sel.length ().is_constant (&nelts))
9827     return NULL_TREE;
9828   gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
9829 	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
9830 	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
9831   if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9832       || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9833     return NULL_TREE;
9834 
9835   tree *in_elts = XALLOCAVEC (tree, nelts * 2);
9836   if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
9837       || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
9838     return NULL_TREE;
9839 
9840   tree_vector_builder out_elts (type, nelts, 1);
9841   for (i = 0; i < nelts; i++)
9842     {
9843       HOST_WIDE_INT index;
9844       if (!sel[i].is_constant (&index))
9845 	return NULL_TREE;
9846       if (!CONSTANT_CLASS_P (in_elts[index]))
9847 	need_ctor = true;
9848       out_elts.quick_push (unshare_expr (in_elts[index]));
9849     }
9850 
9851   if (need_ctor)
9852     {
9853       vec<constructor_elt, va_gc> *v;
9854       vec_alloc (v, nelts);
9855       for (i = 0; i < nelts; i++)
9856 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
9857       return build_constructor (type, v);
9858     }
9859   else
9860     return out_elts.build ();
9861 }
9862 
9863 /* Try to fold a pointer difference of type TYPE two address expressions of
9864    array references AREF0 and AREF1 using location LOC.  Return a
9865    simplified expression for the difference or NULL_TREE.  */
9866 
9867 static tree
fold_addr_of_array_ref_difference(location_t loc,tree type,tree aref0,tree aref1,bool use_pointer_diff)9868 fold_addr_of_array_ref_difference (location_t loc, tree type,
9869 				   tree aref0, tree aref1,
9870 				   bool use_pointer_diff)
9871 {
9872   tree base0 = TREE_OPERAND (aref0, 0);
9873   tree base1 = TREE_OPERAND (aref1, 0);
9874   tree base_offset = build_int_cst (type, 0);
9875 
9876   /* If the bases are array references as well, recurse.  If the bases
9877      are pointer indirections compute the difference of the pointers.
9878      If the bases are equal, we are set.  */
9879   if ((TREE_CODE (base0) == ARRAY_REF
9880        && TREE_CODE (base1) == ARRAY_REF
9881        && (base_offset
9882 	   = fold_addr_of_array_ref_difference (loc, type, base0, base1,
9883 						use_pointer_diff)))
9884       || (INDIRECT_REF_P (base0)
9885 	  && INDIRECT_REF_P (base1)
9886 	  && (base_offset
9887 	        = use_pointer_diff
9888 		  ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
9889 				     TREE_OPERAND (base0, 0),
9890 				     TREE_OPERAND (base1, 0))
9891 		  : fold_binary_loc (loc, MINUS_EXPR, type,
9892 				     fold_convert (type,
9893 						   TREE_OPERAND (base0, 0)),
9894 				     fold_convert (type,
9895 						   TREE_OPERAND (base1, 0)))))
9896       || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9897     {
9898       tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9899       tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9900       tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9901       tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
9902       return fold_build2_loc (loc, PLUS_EXPR, type,
9903 			      base_offset,
9904 			      fold_build2_loc (loc, MULT_EXPR, type,
9905 					       diff, esz));
9906     }
9907   return NULL_TREE;
9908 }
9909 
9910 /* If the real or vector real constant CST of type TYPE has an exact
9911    inverse, return it, else return NULL.  */
9912 
9913 tree
exact_inverse(tree type,tree cst)9914 exact_inverse (tree type, tree cst)
9915 {
9916   REAL_VALUE_TYPE r;
9917   tree unit_type;
9918   machine_mode mode;
9919 
9920   switch (TREE_CODE (cst))
9921     {
9922     case REAL_CST:
9923       r = TREE_REAL_CST (cst);
9924 
9925       if (exact_real_inverse (TYPE_MODE (type), &r))
9926 	return build_real (type, r);
9927 
9928       return NULL_TREE;
9929 
9930     case VECTOR_CST:
9931       {
9932 	unit_type = TREE_TYPE (type);
9933 	mode = TYPE_MODE (unit_type);
9934 
9935 	tree_vector_builder elts;
9936 	if (!elts.new_unary_operation (type, cst, false))
9937 	  return NULL_TREE;
9938 	unsigned int count = elts.encoded_nelts ();
9939 	for (unsigned int i = 0; i < count; ++i)
9940 	  {
9941 	    r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9942 	    if (!exact_real_inverse (mode, &r))
9943 	      return NULL_TREE;
9944 	    elts.quick_push (build_real (unit_type, r));
9945 	  }
9946 
9947 	return elts.build ();
9948       }
9949 
9950     default:
9951       return NULL_TREE;
9952     }
9953 }
9954 
9955 /*  Mask out the tz least significant bits of X of type TYPE where
9956     tz is the number of trailing zeroes in Y.  */
9957 static wide_int
mask_with_tz(tree type,const wide_int & x,const wide_int & y)9958 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9959 {
9960   int tz = wi::ctz (y);
9961   if (tz > 0)
9962     return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9963   return x;
9964 }
9965 
9966 /* Return true when T is an address and is known to be nonzero.
9967    For floating point we further ensure that T is not denormal.
9968    Similar logic is present in nonzero_address in rtlanal.h.
9969 
9970    If the return value is based on the assumption that signed overflow
9971    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9972    change *STRICT_OVERFLOW_P.  */
9973 
9974 static bool
tree_expr_nonzero_warnv_p(tree t,bool * strict_overflow_p)9975 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9976 {
9977   tree type = TREE_TYPE (t);
9978   enum tree_code code;
9979 
9980   /* Doing something useful for floating point would need more work.  */
9981   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9982     return false;
9983 
9984   code = TREE_CODE (t);
9985   switch (TREE_CODE_CLASS (code))
9986     {
9987     case tcc_unary:
9988       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9989 					      strict_overflow_p);
9990     case tcc_binary:
9991     case tcc_comparison:
9992       return tree_binary_nonzero_warnv_p (code, type,
9993 					       TREE_OPERAND (t, 0),
9994 					       TREE_OPERAND (t, 1),
9995 					       strict_overflow_p);
9996     case tcc_constant:
9997     case tcc_declaration:
9998     case tcc_reference:
9999       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10000 
10001     default:
10002       break;
10003     }
10004 
10005   switch (code)
10006     {
10007     case TRUTH_NOT_EXPR:
10008       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10009 					      strict_overflow_p);
10010 
10011     case TRUTH_AND_EXPR:
10012     case TRUTH_OR_EXPR:
10013     case TRUTH_XOR_EXPR:
10014       return tree_binary_nonzero_warnv_p (code, type,
10015 					       TREE_OPERAND (t, 0),
10016 					       TREE_OPERAND (t, 1),
10017 					       strict_overflow_p);
10018 
10019     case COND_EXPR:
10020     case CONSTRUCTOR:
10021     case OBJ_TYPE_REF:
10022     case ASSERT_EXPR:
10023     case ADDR_EXPR:
10024     case WITH_SIZE_EXPR:
10025     case SSA_NAME:
10026       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10027 
10028     case COMPOUND_EXPR:
10029     case MODIFY_EXPR:
10030     case BIND_EXPR:
10031       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10032 					strict_overflow_p);
10033 
10034     case SAVE_EXPR:
10035       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10036 					strict_overflow_p);
10037 
10038     case CALL_EXPR:
10039       {
10040 	tree fndecl = get_callee_fndecl (t);
10041 	if (!fndecl) return false;
10042 	if (flag_delete_null_pointer_checks && !flag_check_new
10043 	    && DECL_IS_OPERATOR_NEW_P (fndecl)
10044 	    && !TREE_NOTHROW (fndecl))
10045 	  return true;
10046 	if (flag_delete_null_pointer_checks
10047 	    && lookup_attribute ("returns_nonnull",
10048 		 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10049 	  return true;
10050 	return alloca_call_p (t);
10051       }
10052 
10053     default:
10054       break;
10055     }
10056   return false;
10057 }
10058 
10059 /* Return true when T is an address and is known to be nonzero.
10060    Handle warnings about undefined signed overflow.  */
10061 
10062 bool
tree_expr_nonzero_p(tree t)10063 tree_expr_nonzero_p (tree t)
10064 {
10065   bool ret, strict_overflow_p;
10066 
10067   strict_overflow_p = false;
10068   ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10069   if (strict_overflow_p)
10070     fold_overflow_warning (("assuming signed overflow does not occur when "
10071 			    "determining that expression is always "
10072 			    "non-zero"),
10073 			   WARN_STRICT_OVERFLOW_MISC);
10074   return ret;
10075 }
10076 
10077 /* Return true if T is known not to be equal to an integer W.  */
10078 
10079 bool
expr_not_equal_to(tree t,const wide_int & w)10080 expr_not_equal_to (tree t, const wide_int &w)
10081 {
10082   wide_int min, max, nz;
10083   value_range_kind rtype;
10084   switch (TREE_CODE (t))
10085     {
10086     case INTEGER_CST:
10087       return wi::to_wide (t) != w;
10088 
10089     case SSA_NAME:
10090       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10091 	return false;
10092       rtype = get_range_info (t, &min, &max);
10093       if (rtype == VR_RANGE)
10094 	{
10095 	  if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
10096 	    return true;
10097 	  if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
10098 	    return true;
10099 	}
10100       else if (rtype == VR_ANTI_RANGE
10101 	       && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
10102 	       && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
10103 	return true;
10104       /* If T has some known zero bits and W has any of those bits set,
10105 	 then T is known not to be equal to W.  */
10106       if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10107 			      TYPE_PRECISION (TREE_TYPE (t))), 0))
10108 	return true;
10109       return false;
10110 
10111     default:
10112       return false;
10113     }
10114 }
10115 
10116 /* Fold a binary expression of code CODE and type TYPE with operands
10117    OP0 and OP1.  LOC is the location of the resulting expression.
10118    Return the folded expression if folding is successful.  Otherwise,
10119    return NULL_TREE.  */
10120 
10121 tree
fold_binary_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1)10122 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10123 		 tree op0, tree op1)
10124 {
10125   enum tree_code_class kind = TREE_CODE_CLASS (code);
10126   tree arg0, arg1, tem;
10127   tree t1 = NULL_TREE;
10128   bool strict_overflow_p;
10129   unsigned int prec;
10130 
10131   gcc_assert (IS_EXPR_CODE_CLASS (kind)
10132 	      && TREE_CODE_LENGTH (code) == 2
10133 	      && op0 != NULL_TREE
10134 	      && op1 != NULL_TREE);
10135 
10136   arg0 = op0;
10137   arg1 = op1;
10138 
10139   /* Strip any conversions that don't change the mode.  This is
10140      safe for every expression, except for a comparison expression
10141      because its signedness is derived from its operands.  So, in
10142      the latter case, only strip conversions that don't change the
10143      signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
10144      preserved.
10145 
10146      Note that this is done as an internal manipulation within the
10147      constant folder, in order to find the simplest representation
10148      of the arguments so that their form can be studied.  In any
10149      cases, the appropriate type conversions should be put back in
10150      the tree that will get out of the constant folder.  */
10151 
10152   if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10153     {
10154       STRIP_SIGN_NOPS (arg0);
10155       STRIP_SIGN_NOPS (arg1);
10156     }
10157   else
10158     {
10159       STRIP_NOPS (arg0);
10160       STRIP_NOPS (arg1);
10161     }
10162 
10163   /* Note that TREE_CONSTANT isn't enough: static var addresses are
10164      constant but we can't do arithmetic on them.  */
10165   if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10166     {
10167       tem = const_binop (code, type, arg0, arg1);
10168       if (tem != NULL_TREE)
10169 	{
10170 	  if (TREE_TYPE (tem) != type)
10171 	    tem = fold_convert_loc (loc, type, tem);
10172 	  return tem;
10173 	}
10174     }
10175 
10176   /* If this is a commutative operation, and ARG0 is a constant, move it
10177      to ARG1 to reduce the number of tests below.  */
10178   if (commutative_tree_code (code)
10179       && tree_swap_operands_p (arg0, arg1))
10180     return fold_build2_loc (loc, code, type, op1, op0);
10181 
10182   /* Likewise if this is a comparison, and ARG0 is a constant, move it
10183      to ARG1 to reduce the number of tests below.  */
10184   if (kind == tcc_comparison
10185       && tree_swap_operands_p (arg0, arg1))
10186     return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10187 
10188   tem = generic_simplify (loc, code, type, op0, op1);
10189   if (tem)
10190     return tem;
10191 
10192   /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10193 
10194      First check for cases where an arithmetic operation is applied to a
10195      compound, conditional, or comparison operation.  Push the arithmetic
10196      operation inside the compound or conditional to see if any folding
10197      can then be done.  Convert comparison to conditional for this purpose.
10198      The also optimizes non-constant cases that used to be done in
10199      expand_expr.
10200 
10201      Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10202      one of the operands is a comparison and the other is a comparison, a
10203      BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
10204      code below would make the expression more complex.  Change it to a
10205      TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
10206      TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
10207 
10208   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10209        || code == EQ_EXPR || code == NE_EXPR)
10210       && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10211       && ((truth_value_p (TREE_CODE (arg0))
10212 	   && (truth_value_p (TREE_CODE (arg1))
10213 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
10214 		   && integer_onep (TREE_OPERAND (arg1, 1)))))
10215 	  || (truth_value_p (TREE_CODE (arg1))
10216 	      && (truth_value_p (TREE_CODE (arg0))
10217 		  || (TREE_CODE (arg0) == BIT_AND_EXPR
10218 		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
10219     {
10220       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10221 			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10222 			 : TRUTH_XOR_EXPR,
10223 			 boolean_type_node,
10224 			 fold_convert_loc (loc, boolean_type_node, arg0),
10225 			 fold_convert_loc (loc, boolean_type_node, arg1));
10226 
10227       if (code == EQ_EXPR)
10228 	tem = invert_truthvalue_loc (loc, tem);
10229 
10230       return fold_convert_loc (loc, type, tem);
10231     }
10232 
10233   if (TREE_CODE_CLASS (code) == tcc_binary
10234       || TREE_CODE_CLASS (code) == tcc_comparison)
10235     {
10236       if (TREE_CODE (arg0) == COMPOUND_EXPR)
10237 	{
10238 	  tem = fold_build2_loc (loc, code, type,
10239 			     fold_convert_loc (loc, TREE_TYPE (op0),
10240 					       TREE_OPERAND (arg0, 1)), op1);
10241 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10242 			     tem);
10243 	}
10244       if (TREE_CODE (arg1) == COMPOUND_EXPR)
10245 	{
10246 	  tem = fold_build2_loc (loc, code, type, op0,
10247 			     fold_convert_loc (loc, TREE_TYPE (op1),
10248 					       TREE_OPERAND (arg1, 1)));
10249 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10250 			     tem);
10251 	}
10252 
10253       if (TREE_CODE (arg0) == COND_EXPR
10254 	  || TREE_CODE (arg0) == VEC_COND_EXPR
10255 	  || COMPARISON_CLASS_P (arg0))
10256 	{
10257 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10258 						     arg0, arg1,
10259 						     /*cond_first_p=*/1);
10260 	  if (tem != NULL_TREE)
10261 	    return tem;
10262 	}
10263 
10264       if (TREE_CODE (arg1) == COND_EXPR
10265 	  || TREE_CODE (arg1) == VEC_COND_EXPR
10266 	  || COMPARISON_CLASS_P (arg1))
10267 	{
10268 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10269 						     arg1, arg0,
10270 					             /*cond_first_p=*/0);
10271 	  if (tem != NULL_TREE)
10272 	    return tem;
10273 	}
10274     }
10275 
10276   switch (code)
10277     {
10278     case MEM_REF:
10279       /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2].  */
10280       if (TREE_CODE (arg0) == ADDR_EXPR
10281 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10282 	{
10283 	  tree iref = TREE_OPERAND (arg0, 0);
10284 	  return fold_build2 (MEM_REF, type,
10285 			      TREE_OPERAND (iref, 0),
10286 			      int_const_binop (PLUS_EXPR, arg1,
10287 					       TREE_OPERAND (iref, 1)));
10288 	}
10289 
10290       /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2].  */
10291       if (TREE_CODE (arg0) == ADDR_EXPR
10292 	  && handled_component_p (TREE_OPERAND (arg0, 0)))
10293 	{
10294 	  tree base;
10295 	  poly_int64 coffset;
10296 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10297 						&coffset);
10298 	  if (!base)
10299 	    return NULL_TREE;
10300 	  return fold_build2 (MEM_REF, type,
10301 			      build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
10302 			      int_const_binop (PLUS_EXPR, arg1,
10303 					       size_int (coffset)));
10304 	}
10305 
10306       return NULL_TREE;
10307 
10308     case POINTER_PLUS_EXPR:
10309       /* INT +p INT -> (PTR)(INT + INT).  Stripping types allows for this. */
10310       if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10311 	   && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10312         return fold_convert_loc (loc, type,
10313 				 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10314 					      fold_convert_loc (loc, sizetype,
10315 								arg1),
10316 					      fold_convert_loc (loc, sizetype,
10317 								arg0)));
10318 
10319       return NULL_TREE;
10320 
10321     case PLUS_EXPR:
10322       if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10323 	{
10324 	  /* X + (X / CST) * -CST is X % CST.  */
10325 	  if (TREE_CODE (arg1) == MULT_EXPR
10326 	      && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10327 	      && operand_equal_p (arg0,
10328 				  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10329 	    {
10330 	      tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10331 	      tree cst1 = TREE_OPERAND (arg1, 1);
10332 	      tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10333 				      cst1, cst0);
10334 	      if (sum && integer_zerop (sum))
10335 		return fold_convert_loc (loc, type,
10336 					 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10337 						      TREE_TYPE (arg0), arg0,
10338 						      cst0));
10339 	    }
10340 	}
10341 
10342       /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10343 	 one.  Make sure the type is not saturating and has the signedness of
10344 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10345 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
10346       if ((TREE_CODE (arg0) == MULT_EXPR
10347 	   || TREE_CODE (arg1) == MULT_EXPR)
10348 	  && !TYPE_SATURATING (type)
10349 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10350 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10351 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
10352         {
10353 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10354 	  if (tem)
10355 	    return tem;
10356 	}
10357 
10358       if (! FLOAT_TYPE_P (type))
10359 	{
10360 	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10361 	     (plus (plus (mult) (mult)) (foo)) so that we can
10362 	     take advantage of the factoring cases below.  */
10363 	  if (ANY_INTEGRAL_TYPE_P (type)
10364 	      && TYPE_OVERFLOW_WRAPS (type)
10365 	      && (((TREE_CODE (arg0) == PLUS_EXPR
10366 		    || TREE_CODE (arg0) == MINUS_EXPR)
10367 		   && TREE_CODE (arg1) == MULT_EXPR)
10368 		  || ((TREE_CODE (arg1) == PLUS_EXPR
10369 		       || TREE_CODE (arg1) == MINUS_EXPR)
10370 		      && TREE_CODE (arg0) == MULT_EXPR)))
10371 	    {
10372 	      tree parg0, parg1, parg, marg;
10373 	      enum tree_code pcode;
10374 
10375 	      if (TREE_CODE (arg1) == MULT_EXPR)
10376 		parg = arg0, marg = arg1;
10377 	      else
10378 		parg = arg1, marg = arg0;
10379 	      pcode = TREE_CODE (parg);
10380 	      parg0 = TREE_OPERAND (parg, 0);
10381 	      parg1 = TREE_OPERAND (parg, 1);
10382 	      STRIP_NOPS (parg0);
10383 	      STRIP_NOPS (parg1);
10384 
10385 	      if (TREE_CODE (parg0) == MULT_EXPR
10386 		  && TREE_CODE (parg1) != MULT_EXPR)
10387 		return fold_build2_loc (loc, pcode, type,
10388 				    fold_build2_loc (loc, PLUS_EXPR, type,
10389 						 fold_convert_loc (loc, type,
10390 								   parg0),
10391 						 fold_convert_loc (loc, type,
10392 								   marg)),
10393 				    fold_convert_loc (loc, type, parg1));
10394 	      if (TREE_CODE (parg0) != MULT_EXPR
10395 		  && TREE_CODE (parg1) == MULT_EXPR)
10396 		return
10397 		  fold_build2_loc (loc, PLUS_EXPR, type,
10398 			       fold_convert_loc (loc, type, parg0),
10399 			       fold_build2_loc (loc, pcode, type,
10400 					    fold_convert_loc (loc, type, marg),
10401 					    fold_convert_loc (loc, type,
10402 							      parg1)));
10403 	    }
10404 	}
10405       else
10406 	{
10407 	  /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10408 	     to __complex__ ( x, y ).  This is not the same for SNaNs or
10409 	     if signed zeros are involved.  */
10410 	  if (!HONOR_SNANS (element_mode (arg0))
10411               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10412 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10413 	    {
10414 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10415 	      tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10416 	      tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10417 	      bool arg0rz = false, arg0iz = false;
10418 	      if ((arg0r && (arg0rz = real_zerop (arg0r)))
10419 		  || (arg0i && (arg0iz = real_zerop (arg0i))))
10420 		{
10421 		  tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10422 		  tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10423 		  if (arg0rz && arg1i && real_zerop (arg1i))
10424 		    {
10425 		      tree rp = arg1r ? arg1r
10426 				  : build1 (REALPART_EXPR, rtype, arg1);
10427 		      tree ip = arg0i ? arg0i
10428 				  : build1 (IMAGPART_EXPR, rtype, arg0);
10429 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10430 		    }
10431 		  else if (arg0iz && arg1r && real_zerop (arg1r))
10432 		    {
10433 		      tree rp = arg0r ? arg0r
10434 				  : build1 (REALPART_EXPR, rtype, arg0);
10435 		      tree ip = arg1i ? arg1i
10436 				  : build1 (IMAGPART_EXPR, rtype, arg1);
10437 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10438 		    }
10439 		}
10440 	    }
10441 
10442           /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10443              We associate floats only if the user has specified
10444              -fassociative-math.  */
10445           if (flag_associative_math
10446               && TREE_CODE (arg1) == PLUS_EXPR
10447               && TREE_CODE (arg0) != MULT_EXPR)
10448             {
10449               tree tree10 = TREE_OPERAND (arg1, 0);
10450               tree tree11 = TREE_OPERAND (arg1, 1);
10451               if (TREE_CODE (tree11) == MULT_EXPR
10452 		  && TREE_CODE (tree10) == MULT_EXPR)
10453                 {
10454                   tree tree0;
10455                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10456                   return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10457                 }
10458             }
10459           /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10460              We associate floats only if the user has specified
10461              -fassociative-math.  */
10462           if (flag_associative_math
10463               && TREE_CODE (arg0) == PLUS_EXPR
10464               && TREE_CODE (arg1) != MULT_EXPR)
10465             {
10466               tree tree00 = TREE_OPERAND (arg0, 0);
10467               tree tree01 = TREE_OPERAND (arg0, 1);
10468               if (TREE_CODE (tree01) == MULT_EXPR
10469 		  && TREE_CODE (tree00) == MULT_EXPR)
10470                 {
10471                   tree tree0;
10472                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10473                   return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10474                 }
10475             }
10476 	}
10477 
10478      bit_rotate:
10479       /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10480 	 is a rotate of A by C1 bits.  */
10481       /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10482 	 is a rotate of A by B bits.
10483 	 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
10484 	 though in this case CODE must be | and not + or ^, otherwise
10485 	 it doesn't return A when B is 0.  */
10486       {
10487 	enum tree_code code0, code1;
10488 	tree rtype;
10489 	code0 = TREE_CODE (arg0);
10490 	code1 = TREE_CODE (arg1);
10491 	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10492 	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10493 	    && operand_equal_p (TREE_OPERAND (arg0, 0),
10494 			        TREE_OPERAND (arg1, 0), 0)
10495 	    && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10496 	        TYPE_UNSIGNED (rtype))
10497 	    /* Only create rotates in complete modes.  Other cases are not
10498 	       expanded properly.  */
10499 	    && (element_precision (rtype)
10500 		== GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
10501 	  {
10502 	    tree tree01, tree11;
10503 	    tree orig_tree01, orig_tree11;
10504 	    enum tree_code code01, code11;
10505 
10506 	    tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
10507 	    tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
10508 	    STRIP_NOPS (tree01);
10509 	    STRIP_NOPS (tree11);
10510 	    code01 = TREE_CODE (tree01);
10511 	    code11 = TREE_CODE (tree11);
10512 	    if (code11 != MINUS_EXPR
10513 		&& (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
10514 	      {
10515 		std::swap (code0, code1);
10516 		std::swap (code01, code11);
10517 		std::swap (tree01, tree11);
10518 		std::swap (orig_tree01, orig_tree11);
10519 	      }
10520 	    if (code01 == INTEGER_CST
10521 		&& code11 == INTEGER_CST
10522 		&& (wi::to_widest (tree01) + wi::to_widest (tree11)
10523 		    == element_precision (rtype)))
10524 	      {
10525 		tem = build2_loc (loc, LROTATE_EXPR,
10526 				  rtype, TREE_OPERAND (arg0, 0),
10527 				  code0 == LSHIFT_EXPR
10528 				  ? orig_tree01 : orig_tree11);
10529 		return fold_convert_loc (loc, type, tem);
10530 	      }
10531 	    else if (code11 == MINUS_EXPR)
10532 	      {
10533 		tree tree110, tree111;
10534 		tree110 = TREE_OPERAND (tree11, 0);
10535 		tree111 = TREE_OPERAND (tree11, 1);
10536 		STRIP_NOPS (tree110);
10537 		STRIP_NOPS (tree111);
10538 		if (TREE_CODE (tree110) == INTEGER_CST
10539 		    && compare_tree_int (tree110,
10540 					 element_precision (rtype)) == 0
10541 		    && operand_equal_p (tree01, tree111, 0))
10542 		  {
10543 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10544 					    ? LROTATE_EXPR : RROTATE_EXPR),
10545 				      rtype, TREE_OPERAND (arg0, 0),
10546 				      orig_tree01);
10547 		    return fold_convert_loc (loc, type, tem);
10548 		  }
10549 	      }
10550 	    else if (code == BIT_IOR_EXPR
10551 		     && code11 == BIT_AND_EXPR
10552 		     && pow2p_hwi (element_precision (rtype)))
10553 	      {
10554 		tree tree110, tree111;
10555 		tree110 = TREE_OPERAND (tree11, 0);
10556 		tree111 = TREE_OPERAND (tree11, 1);
10557 		STRIP_NOPS (tree110);
10558 		STRIP_NOPS (tree111);
10559 		if (TREE_CODE (tree110) == NEGATE_EXPR
10560 		    && TREE_CODE (tree111) == INTEGER_CST
10561 		    && compare_tree_int (tree111,
10562 					 element_precision (rtype) - 1) == 0
10563 		    && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
10564 		  {
10565 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
10566 					    ? LROTATE_EXPR : RROTATE_EXPR),
10567 				      rtype, TREE_OPERAND (arg0, 0),
10568 				      orig_tree01);
10569 		    return fold_convert_loc (loc, type, tem);
10570 		  }
10571 	      }
10572 	  }
10573       }
10574 
10575     associate:
10576       /* In most languages, can't associate operations on floats through
10577 	 parentheses.  Rather than remember where the parentheses were, we
10578 	 don't associate floats at all, unless the user has specified
10579 	 -fassociative-math.
10580 	 And, we need to make sure type is not saturating.  */
10581 
10582       if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10583 	  && !TYPE_SATURATING (type))
10584 	{
10585 	  tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
10586 	  tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
10587 	  tree atype = type;
10588 	  bool ok = true;
10589 
10590 	  /* Split both trees into variables, constants, and literals.  Then
10591 	     associate each group together, the constants with literals,
10592 	     then the result with variables.  This increases the chances of
10593 	     literals being recombined later and of generating relocatable
10594 	     expressions for the sum of a constant and literal.  */
10595 	  var0 = split_tree (arg0, type, code,
10596 			     &minus_var0, &con0, &minus_con0,
10597 			     &lit0, &minus_lit0, 0);
10598 	  var1 = split_tree (arg1, type, code,
10599 			     &minus_var1, &con1, &minus_con1,
10600 			     &lit1, &minus_lit1, code == MINUS_EXPR);
10601 
10602 	  /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
10603 	  if (code == MINUS_EXPR)
10604 	    code = PLUS_EXPR;
10605 
10606 	  /* With undefined overflow prefer doing association in a type
10607 	     which wraps on overflow, if that is one of the operand types.  */
10608 	  if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
10609 	      && !TYPE_OVERFLOW_WRAPS (type))
10610 	    {
10611 	      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10612 		  && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10613 		atype = TREE_TYPE (arg0);
10614 	      else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10615 		       && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10616 		atype = TREE_TYPE (arg1);
10617 	      gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10618 	    }
10619 
10620 	  /* With undefined overflow we can only associate constants with one
10621 	     variable, and constants whose association doesn't overflow.  */
10622 	  if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
10623 	      && !TYPE_OVERFLOW_WRAPS (atype))
10624 	    {
10625 	      if ((var0 && var1) || (minus_var0 && minus_var1))
10626 		{
10627 		  /* ???  If split_tree would handle NEGATE_EXPR we could
10628 		     simply reject these cases and the allowed cases would
10629 		     be the var0/minus_var1 ones.  */
10630 		  tree tmp0 = var0 ? var0 : minus_var0;
10631 		  tree tmp1 = var1 ? var1 : minus_var1;
10632 		  bool one_neg = false;
10633 
10634 		  if (TREE_CODE (tmp0) == NEGATE_EXPR)
10635 		    {
10636 		      tmp0 = TREE_OPERAND (tmp0, 0);
10637 		      one_neg = !one_neg;
10638 		    }
10639 		  if (CONVERT_EXPR_P (tmp0)
10640 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10641 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10642 			  <= TYPE_PRECISION (atype)))
10643 		    tmp0 = TREE_OPERAND (tmp0, 0);
10644 		  if (TREE_CODE (tmp1) == NEGATE_EXPR)
10645 		    {
10646 		      tmp1 = TREE_OPERAND (tmp1, 0);
10647 		      one_neg = !one_neg;
10648 		    }
10649 		  if (CONVERT_EXPR_P (tmp1)
10650 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10651 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10652 			  <= TYPE_PRECISION (atype)))
10653 		    tmp1 = TREE_OPERAND (tmp1, 0);
10654 		  /* The only case we can still associate with two variables
10655 		     is if they cancel out.  */
10656 		  if (!one_neg
10657 		      || !operand_equal_p (tmp0, tmp1, 0))
10658 		    ok = false;
10659 		}
10660 	      else if ((var0 && minus_var1
10661 			&& ! operand_equal_p (var0, minus_var1, 0))
10662 		       || (minus_var0 && var1
10663 			   && ! operand_equal_p (minus_var0, var1, 0)))
10664 		ok = false;
10665 	    }
10666 
10667 	  /* Only do something if we found more than two objects.  Otherwise,
10668 	     nothing has changed and we risk infinite recursion.  */
10669 	  if (ok
10670 	      && ((var0 != 0) + (var1 != 0)
10671 		  + (minus_var0 != 0) + (minus_var1 != 0)
10672 		  + (con0 != 0) + (con1 != 0)
10673 		  + (minus_con0 != 0) + (minus_con1 != 0)
10674 		  + (lit0 != 0) + (lit1 != 0)
10675 		  + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
10676 	    {
10677 	      var0 = associate_trees (loc, var0, var1, code, atype);
10678 	      minus_var0 = associate_trees (loc, minus_var0, minus_var1,
10679 					    code, atype);
10680 	      con0 = associate_trees (loc, con0, con1, code, atype);
10681 	      minus_con0 = associate_trees (loc, minus_con0, minus_con1,
10682 					    code, atype);
10683 	      lit0 = associate_trees (loc, lit0, lit1, code, atype);
10684 	      minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10685 					    code, atype);
10686 
10687 	      if (minus_var0 && var0)
10688 		{
10689 		  var0 = associate_trees (loc, var0, minus_var0,
10690 					  MINUS_EXPR, atype);
10691 		  minus_var0 = 0;
10692 		}
10693 	      if (minus_con0 && con0)
10694 		{
10695 		  con0 = associate_trees (loc, con0, minus_con0,
10696 					  MINUS_EXPR, atype);
10697 		  minus_con0 = 0;
10698 		}
10699 
10700 	      /* Preserve the MINUS_EXPR if the negative part of the literal is
10701 		 greater than the positive part.  Otherwise, the multiplicative
10702 		 folding code (i.e extract_muldiv) may be fooled in case
10703 		 unsigned constants are subtracted, like in the following
10704 		 example: ((X*2 + 4) - 8U)/2.  */
10705 	      if (minus_lit0 && lit0)
10706 		{
10707 		  if (TREE_CODE (lit0) == INTEGER_CST
10708 		      && TREE_CODE (minus_lit0) == INTEGER_CST
10709 		      && tree_int_cst_lt (lit0, minus_lit0)
10710 		      /* But avoid ending up with only negated parts.  */
10711 		      && (var0 || con0))
10712 		    {
10713 		      minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10714 						    MINUS_EXPR, atype);
10715 		      lit0 = 0;
10716 		    }
10717 		  else
10718 		    {
10719 		      lit0 = associate_trees (loc, lit0, minus_lit0,
10720 					      MINUS_EXPR, atype);
10721 		      minus_lit0 = 0;
10722 		    }
10723 		}
10724 
10725 	      /* Don't introduce overflows through reassociation.  */
10726 	      if ((lit0 && TREE_OVERFLOW_P (lit0))
10727 		  || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
10728 		return NULL_TREE;
10729 
10730 	      /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
10731 	      con0 = associate_trees (loc, con0, lit0, code, atype);
10732 	      lit0 = 0;
10733 	      minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
10734 					    code, atype);
10735 	      minus_lit0 = 0;
10736 
10737 	      /* Eliminate minus_con0.  */
10738 	      if (minus_con0)
10739 		{
10740 		  if (con0)
10741 		    con0 = associate_trees (loc, con0, minus_con0,
10742 					    MINUS_EXPR, atype);
10743 		  else if (var0)
10744 		    var0 = associate_trees (loc, var0, minus_con0,
10745 					    MINUS_EXPR, atype);
10746 		  else
10747 		    gcc_unreachable ();
10748 		  minus_con0 = 0;
10749 		}
10750 
10751 	      /* Eliminate minus_var0.  */
10752 	      if (minus_var0)
10753 		{
10754 		  if (con0)
10755 		    con0 = associate_trees (loc, con0, minus_var0,
10756 					    MINUS_EXPR, atype);
10757 		  else
10758 		    gcc_unreachable ();
10759 		  minus_var0 = 0;
10760 		}
10761 
10762 	      return
10763 		fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10764 							      code, atype));
10765 	    }
10766 	}
10767 
10768       return NULL_TREE;
10769 
10770     case POINTER_DIFF_EXPR:
10771     case MINUS_EXPR:
10772       /* Fold &a[i] - &a[j] to i-j.  */
10773       if (TREE_CODE (arg0) == ADDR_EXPR
10774 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10775 	  && TREE_CODE (arg1) == ADDR_EXPR
10776 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10777         {
10778 	  tree tem = fold_addr_of_array_ref_difference (loc, type,
10779 							TREE_OPERAND (arg0, 0),
10780 							TREE_OPERAND (arg1, 0),
10781 							code
10782 							== POINTER_DIFF_EXPR);
10783 	  if (tem)
10784 	    return tem;
10785 	}
10786 
10787       /* Further transformations are not for pointers.  */
10788       if (code == POINTER_DIFF_EXPR)
10789 	return NULL_TREE;
10790 
10791       /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
10792       if (TREE_CODE (arg0) == NEGATE_EXPR
10793 	  && negate_expr_p (op1)
10794 	  /* If arg0 is e.g. unsigned int and type is int, then this could
10795 	     introduce UB, because if A is INT_MIN at runtime, the original
10796 	     expression can be well defined while the latter is not.
10797 	     See PR83269.  */
10798 	  && !(ANY_INTEGRAL_TYPE_P (type)
10799 	       && TYPE_OVERFLOW_UNDEFINED (type)
10800 	       && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10801 	       && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10802 	return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
10803 			        fold_convert_loc (loc, type,
10804 						  TREE_OPERAND (arg0, 0)));
10805 
10806       /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10807 	 __complex__ ( x, -y ).  This is not the same for SNaNs or if
10808 	 signed zeros are involved.  */
10809       if (!HONOR_SNANS (element_mode (arg0))
10810 	  && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10811 	  && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10812         {
10813 	  tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10814 	  tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10815 	  tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10816 	  bool arg0rz = false, arg0iz = false;
10817 	  if ((arg0r && (arg0rz = real_zerop (arg0r)))
10818 	      || (arg0i && (arg0iz = real_zerop (arg0i))))
10819 	    {
10820 	      tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10821 	      tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10822 	      if (arg0rz && arg1i && real_zerop (arg1i))
10823 	        {
10824 		  tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10825 					 arg1r ? arg1r
10826 					 : build1 (REALPART_EXPR, rtype, arg1));
10827 		  tree ip = arg0i ? arg0i
10828 		    : build1 (IMAGPART_EXPR, rtype, arg0);
10829 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10830 		}
10831 	      else if (arg0iz && arg1r && real_zerop (arg1r))
10832 	        {
10833 		  tree rp = arg0r ? arg0r
10834 		    : build1 (REALPART_EXPR, rtype, arg0);
10835 		  tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10836 					 arg1i ? arg1i
10837 					 : build1 (IMAGPART_EXPR, rtype, arg1));
10838 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10839 		}
10840 	    }
10841 	}
10842 
10843       /* A - B -> A + (-B) if B is easily negatable.  */
10844       if (negate_expr_p (op1)
10845 	  && ! TYPE_OVERFLOW_SANITIZED (type)
10846 	  && ((FLOAT_TYPE_P (type)
10847                /* Avoid this transformation if B is a positive REAL_CST.  */
10848 	       && (TREE_CODE (op1) != REAL_CST
10849 		   || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
10850 	      || INTEGRAL_TYPE_P (type)))
10851 	return fold_build2_loc (loc, PLUS_EXPR, type,
10852 				fold_convert_loc (loc, type, arg0),
10853 				negate_expr (op1));
10854 
10855       /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10856 	 one.  Make sure the type is not saturating and has the signedness of
10857 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10858 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
10859       if ((TREE_CODE (arg0) == MULT_EXPR
10860 	   || TREE_CODE (arg1) == MULT_EXPR)
10861 	  && !TYPE_SATURATING (type)
10862 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10863 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10864 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
10865         {
10866 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10867 	  if (tem)
10868 	    return tem;
10869 	}
10870 
10871       goto associate;
10872 
10873     case MULT_EXPR:
10874       if (! FLOAT_TYPE_P (type))
10875 	{
10876 	  /* Transform x * -C into -x * C if x is easily negatable.  */
10877 	  if (TREE_CODE (op1) == INTEGER_CST
10878 	      && tree_int_cst_sgn (op1) == -1
10879 	      && negate_expr_p (op0)
10880 	      && negate_expr_p (op1)
10881 	      && (tem = negate_expr (op1)) != op1
10882 	      && ! TREE_OVERFLOW (tem))
10883 	    return fold_build2_loc (loc, MULT_EXPR, type,
10884 				    fold_convert_loc (loc, type,
10885 						      negate_expr (op0)), tem);
10886 
10887 	  strict_overflow_p = false;
10888 	  if (TREE_CODE (arg1) == INTEGER_CST
10889 	      && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10890 					&strict_overflow_p)) != 0)
10891 	    {
10892 	      if (strict_overflow_p)
10893 		fold_overflow_warning (("assuming signed overflow does not "
10894 					"occur when simplifying "
10895 					"multiplication"),
10896 				       WARN_STRICT_OVERFLOW_MISC);
10897 	      return fold_convert_loc (loc, type, tem);
10898 	    }
10899 
10900 	  /* Optimize z * conj(z) for integer complex numbers.  */
10901 	  if (TREE_CODE (arg0) == CONJ_EXPR
10902 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10903 	    return fold_mult_zconjz (loc, type, arg1);
10904 	  if (TREE_CODE (arg1) == CONJ_EXPR
10905 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10906 	    return fold_mult_zconjz (loc, type, arg0);
10907 	}
10908       else
10909 	{
10910 	  /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10911 	     This is not the same for NaNs or if signed zeros are
10912 	     involved.  */
10913 	  if (!HONOR_NANS (arg0)
10914               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10915 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10916 	      && TREE_CODE (arg1) == COMPLEX_CST
10917 	      && real_zerop (TREE_REALPART (arg1)))
10918 	    {
10919 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10920 	      if (real_onep (TREE_IMAGPART (arg1)))
10921 		return
10922 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
10923 			       negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10924 							     rtype, arg0)),
10925 			       fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10926 	      else if (real_minus_onep (TREE_IMAGPART (arg1)))
10927 		return
10928 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
10929 			       fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10930 			       negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10931 							     rtype, arg0)));
10932 	    }
10933 
10934 	  /* Optimize z * conj(z) for floating point complex numbers.
10935 	     Guarded by flag_unsafe_math_optimizations as non-finite
10936 	     imaginary components don't produce scalar results.  */
10937 	  if (flag_unsafe_math_optimizations
10938 	      && TREE_CODE (arg0) == CONJ_EXPR
10939 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10940 	    return fold_mult_zconjz (loc, type, arg1);
10941 	  if (flag_unsafe_math_optimizations
10942 	      && TREE_CODE (arg1) == CONJ_EXPR
10943 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10944 	    return fold_mult_zconjz (loc, type, arg0);
10945 	}
10946       goto associate;
10947 
10948     case BIT_IOR_EXPR:
10949       /* Canonicalize (X & C1) | C2.  */
10950       if (TREE_CODE (arg0) == BIT_AND_EXPR
10951 	  && TREE_CODE (arg1) == INTEGER_CST
10952 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10953 	{
10954 	  int width = TYPE_PRECISION (type), w;
10955 	  wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
10956 	  wide_int c2 = wi::to_wide (arg1);
10957 
10958 	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
10959 	  if ((c1 & c2) == c1)
10960 	    return omit_one_operand_loc (loc, type, arg1,
10961 					 TREE_OPERAND (arg0, 0));
10962 
10963 	  wide_int msk = wi::mask (width, false,
10964 				   TYPE_PRECISION (TREE_TYPE (arg1)));
10965 
10966 	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
10967 	  if (wi::bit_and_not (msk, c1 | c2) == 0)
10968 	    {
10969 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10970 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10971 	    }
10972 
10973 	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10974 	     unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10975 	     mode which allows further optimizations.  */
10976 	  c1 &= msk;
10977 	  c2 &= msk;
10978 	  wide_int c3 = wi::bit_and_not (c1, c2);
10979 	  for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10980 	    {
10981 	      wide_int mask = wi::mask (w, false,
10982 					TYPE_PRECISION (type));
10983 	      if (((c1 | c2) & mask) == mask
10984 		  && wi::bit_and_not (c1, mask) == 0)
10985 		{
10986 		  c3 = mask;
10987 		  break;
10988 		}
10989 	    }
10990 
10991 	  if (c3 != c1)
10992 	    {
10993 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10994 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
10995 				     wide_int_to_tree (type, c3));
10996 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
10997 	    }
10998 	}
10999 
11000       /* See if this can be simplified into a rotate first.  If that
11001 	 is unsuccessful continue in the association code.  */
11002       goto bit_rotate;
11003 
11004     case BIT_XOR_EXPR:
11005       /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
11006       if (TREE_CODE (arg0) == BIT_AND_EXPR
11007 	  && INTEGRAL_TYPE_P (type)
11008 	  && integer_onep (TREE_OPERAND (arg0, 1))
11009 	  && integer_onep (arg1))
11010 	return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11011 				build_zero_cst (TREE_TYPE (arg0)));
11012 
11013       /* See if this can be simplified into a rotate first.  If that
11014 	 is unsuccessful continue in the association code.  */
11015       goto bit_rotate;
11016 
11017     case BIT_AND_EXPR:
11018       /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
11019       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11020 	  && INTEGRAL_TYPE_P (type)
11021 	  && integer_onep (TREE_OPERAND (arg0, 1))
11022 	  && integer_onep (arg1))
11023 	{
11024 	  tree tem2;
11025 	  tem = TREE_OPERAND (arg0, 0);
11026 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11027 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11028 				  tem, tem2);
11029 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11030 				  build_zero_cst (TREE_TYPE (tem)));
11031 	}
11032       /* Fold ~X & 1 as (X & 1) == 0.  */
11033       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11034 	  && INTEGRAL_TYPE_P (type)
11035 	  && integer_onep (arg1))
11036 	{
11037 	  tree tem2;
11038 	  tem = TREE_OPERAND (arg0, 0);
11039 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11040 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11041 				  tem, tem2);
11042 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11043 				  build_zero_cst (TREE_TYPE (tem)));
11044 	}
11045       /* Fold !X & 1 as X == 0.  */
11046       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11047 	  && integer_onep (arg1))
11048 	{
11049 	  tem = TREE_OPERAND (arg0, 0);
11050 	  return fold_build2_loc (loc, EQ_EXPR, type, tem,
11051 				  build_zero_cst (TREE_TYPE (tem)));
11052 	}
11053 
11054       /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11055          multiple of 1 << CST.  */
11056       if (TREE_CODE (arg1) == INTEGER_CST)
11057 	{
11058 	  wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11059 	  wide_int ncst1 = -cst1;
11060 	  if ((cst1 & ncst1) == ncst1
11061 	      && multiple_of_p (type, arg0,
11062 				wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11063 	    return fold_convert_loc (loc, type, arg0);
11064 	}
11065 
11066       /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11067          bits from CST2.  */
11068       if (TREE_CODE (arg1) == INTEGER_CST
11069 	  && TREE_CODE (arg0) == MULT_EXPR
11070 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11071 	{
11072 	  wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11073 	  wide_int masked
11074 	    = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11075 
11076 	  if (masked == 0)
11077 	    return omit_two_operands_loc (loc, type, build_zero_cst (type),
11078 	                                  arg0, arg1);
11079 	  else if (masked != warg1)
11080 	    {
11081 	      /* Avoid the transform if arg1 is a mask of some
11082 	         mode which allows further optimizations.  */
11083 	      int pop = wi::popcount (warg1);
11084 	      if (!(pop >= BITS_PER_UNIT
11085 		    && pow2p_hwi (pop)
11086 		    && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11087 		return fold_build2_loc (loc, code, type, op0,
11088 					wide_int_to_tree (type, masked));
11089 	    }
11090 	}
11091 
11092       /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
11093       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11094 	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11095 	{
11096 	  prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11097 
11098 	  wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11099 	  if (mask == -1)
11100 	    return
11101 	      fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11102 	}
11103 
11104       goto associate;
11105 
11106     case RDIV_EXPR:
11107       /* Don't touch a floating-point divide by zero unless the mode
11108 	 of the constant can represent infinity.  */
11109       if (TREE_CODE (arg1) == REAL_CST
11110 	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11111 	  && real_zerop (arg1))
11112 	return NULL_TREE;
11113 
11114       /* (-A) / (-B) -> A / B  */
11115       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11116 	return fold_build2_loc (loc, RDIV_EXPR, type,
11117 			    TREE_OPERAND (arg0, 0),
11118 			    negate_expr (arg1));
11119       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11120 	return fold_build2_loc (loc, RDIV_EXPR, type,
11121 			    negate_expr (arg0),
11122 			    TREE_OPERAND (arg1, 0));
11123       return NULL_TREE;
11124 
11125     case TRUNC_DIV_EXPR:
11126       /* Fall through */
11127 
11128     case FLOOR_DIV_EXPR:
11129       /* Simplify A / (B << N) where A and B are positive and B is
11130 	 a power of 2, to A >> (N + log2(B)).  */
11131       strict_overflow_p = false;
11132       if (TREE_CODE (arg1) == LSHIFT_EXPR
11133 	  && (TYPE_UNSIGNED (type)
11134 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11135 	{
11136 	  tree sval = TREE_OPERAND (arg1, 0);
11137 	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11138 	    {
11139 	      tree sh_cnt = TREE_OPERAND (arg1, 1);
11140 	      tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11141 					 wi::exact_log2 (wi::to_wide (sval)));
11142 
11143 	      if (strict_overflow_p)
11144 		fold_overflow_warning (("assuming signed overflow does not "
11145 					"occur when simplifying A / (B << N)"),
11146 				       WARN_STRICT_OVERFLOW_MISC);
11147 
11148 	      sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11149 					sh_cnt, pow2);
11150 	      return fold_build2_loc (loc, RSHIFT_EXPR, type,
11151 				      fold_convert_loc (loc, type, arg0), sh_cnt);
11152 	    }
11153 	}
11154 
11155       /* Fall through */
11156 
11157     case ROUND_DIV_EXPR:
11158     case CEIL_DIV_EXPR:
11159     case EXACT_DIV_EXPR:
11160       if (integer_zerop (arg1))
11161 	return NULL_TREE;
11162 
11163       /* Convert -A / -B to A / B when the type is signed and overflow is
11164 	 undefined.  */
11165       if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11166 	  && TREE_CODE (op0) == NEGATE_EXPR
11167 	  && negate_expr_p (op1))
11168 	{
11169 	  if (ANY_INTEGRAL_TYPE_P (type))
11170 	    fold_overflow_warning (("assuming signed overflow does not occur "
11171 				    "when distributing negation across "
11172 				    "division"),
11173 				   WARN_STRICT_OVERFLOW_MISC);
11174 	  return fold_build2_loc (loc, code, type,
11175 				  fold_convert_loc (loc, type,
11176 						    TREE_OPERAND (arg0, 0)),
11177 				  negate_expr (op1));
11178 	}
11179       if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11180 	  && TREE_CODE (arg1) == NEGATE_EXPR
11181 	  && negate_expr_p (op0))
11182 	{
11183 	  if (ANY_INTEGRAL_TYPE_P (type))
11184 	    fold_overflow_warning (("assuming signed overflow does not occur "
11185 				    "when distributing negation across "
11186 				    "division"),
11187 				   WARN_STRICT_OVERFLOW_MISC);
11188 	  return fold_build2_loc (loc, code, type,
11189 				  negate_expr (op0),
11190 				  fold_convert_loc (loc, type,
11191 						    TREE_OPERAND (arg1, 0)));
11192 	}
11193 
11194       /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11195 	 operation, EXACT_DIV_EXPR.
11196 
11197 	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11198 	 At one time others generated faster code, it's not clear if they do
11199 	 after the last round to changes to the DIV code in expmed.c.  */
11200       if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11201 	  && multiple_of_p (type, arg0, arg1))
11202 	return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11203 				fold_convert (type, arg0),
11204 				fold_convert (type, arg1));
11205 
11206       strict_overflow_p = false;
11207       if (TREE_CODE (arg1) == INTEGER_CST
11208 	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11209 				    &strict_overflow_p)) != 0)
11210 	{
11211 	  if (strict_overflow_p)
11212 	    fold_overflow_warning (("assuming signed overflow does not occur "
11213 				    "when simplifying division"),
11214 				   WARN_STRICT_OVERFLOW_MISC);
11215 	  return fold_convert_loc (loc, type, tem);
11216 	}
11217 
11218       return NULL_TREE;
11219 
11220     case CEIL_MOD_EXPR:
11221     case FLOOR_MOD_EXPR:
11222     case ROUND_MOD_EXPR:
11223     case TRUNC_MOD_EXPR:
11224       strict_overflow_p = false;
11225       if (TREE_CODE (arg1) == INTEGER_CST
11226 	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11227 				    &strict_overflow_p)) != 0)
11228 	{
11229 	  if (strict_overflow_p)
11230 	    fold_overflow_warning (("assuming signed overflow does not occur "
11231 				    "when simplifying modulus"),
11232 				   WARN_STRICT_OVERFLOW_MISC);
11233 	  return fold_convert_loc (loc, type, tem);
11234 	}
11235 
11236       return NULL_TREE;
11237 
11238     case LROTATE_EXPR:
11239     case RROTATE_EXPR:
11240     case RSHIFT_EXPR:
11241     case LSHIFT_EXPR:
11242       /* Since negative shift count is not well-defined,
11243 	 don't try to compute it in the compiler.  */
11244       if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11245 	return NULL_TREE;
11246 
11247       prec = element_precision (type);
11248 
11249       /* If we have a rotate of a bit operation with the rotate count and
11250 	 the second operand of the bit operation both constant,
11251 	 permute the two operations.  */
11252       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11253 	  && (TREE_CODE (arg0) == BIT_AND_EXPR
11254 	      || TREE_CODE (arg0) == BIT_IOR_EXPR
11255 	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
11256 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11257 	{
11258 	  tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11259 	  tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11260 	  return fold_build2_loc (loc, TREE_CODE (arg0), type,
11261 				  fold_build2_loc (loc, code, type,
11262 						   arg00, arg1),
11263 				  fold_build2_loc (loc, code, type,
11264 						   arg01, arg1));
11265 	}
11266 
11267       /* Two consecutive rotates adding up to the some integer
11268 	 multiple of the precision of the type can be ignored.  */
11269       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11270 	  && TREE_CODE (arg0) == RROTATE_EXPR
11271 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11272 	  && wi::umod_trunc (wi::to_wide (arg1)
11273 			     + wi::to_wide (TREE_OPERAND (arg0, 1)),
11274 			     prec) == 0)
11275 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11276 
11277       return NULL_TREE;
11278 
11279     case MIN_EXPR:
11280     case MAX_EXPR:
11281       goto associate;
11282 
11283     case TRUTH_ANDIF_EXPR:
11284       /* Note that the operands of this must be ints
11285 	 and their values must be 0 or 1.
11286 	 ("true" is a fixed value perhaps depending on the language.)  */
11287       /* If first arg is constant zero, return it.  */
11288       if (integer_zerop (arg0))
11289 	return fold_convert_loc (loc, type, arg0);
11290       /* FALLTHRU */
11291     case TRUTH_AND_EXPR:
11292       /* If either arg is constant true, drop it.  */
11293       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11294 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11295       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11296 	  /* Preserve sequence points.  */
11297 	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11298 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11299       /* If second arg is constant zero, result is zero, but first arg
11300 	 must be evaluated.  */
11301       if (integer_zerop (arg1))
11302 	return omit_one_operand_loc (loc, type, arg1, arg0);
11303       /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11304 	 case will be handled here.  */
11305       if (integer_zerop (arg0))
11306 	return omit_one_operand_loc (loc, type, arg0, arg1);
11307 
11308       /* !X && X is always false.  */
11309       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11310 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11311 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11312       /* X && !X is always false.  */
11313       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11314 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11315 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11316 
11317       /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
11318 	 means A >= Y && A != MAX, but in this case we know that
11319 	 A < X <= MAX.  */
11320 
11321       if (!TREE_SIDE_EFFECTS (arg0)
11322 	  && !TREE_SIDE_EFFECTS (arg1))
11323 	{
11324 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11325 	  if (tem && !operand_equal_p (tem, arg0, 0))
11326 	    return fold_build2_loc (loc, code, type, tem, arg1);
11327 
11328 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11329 	  if (tem && !operand_equal_p (tem, arg1, 0))
11330 	    return fold_build2_loc (loc, code, type, arg0, tem);
11331 	}
11332 
11333       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11334           != NULL_TREE)
11335         return tem;
11336 
11337       return NULL_TREE;
11338 
11339     case TRUTH_ORIF_EXPR:
11340       /* Note that the operands of this must be ints
11341 	 and their values must be 0 or true.
11342 	 ("true" is a fixed value perhaps depending on the language.)  */
11343       /* If first arg is constant true, return it.  */
11344       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11345 	return fold_convert_loc (loc, type, arg0);
11346       /* FALLTHRU */
11347     case TRUTH_OR_EXPR:
11348       /* If either arg is constant zero, drop it.  */
11349       if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11350 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11351       if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11352 	  /* Preserve sequence points.  */
11353 	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11354 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11355       /* If second arg is constant true, result is true, but we must
11356 	 evaluate first arg.  */
11357       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11358 	return omit_one_operand_loc (loc, type, arg1, arg0);
11359       /* Likewise for first arg, but note this only occurs here for
11360 	 TRUTH_OR_EXPR.  */
11361       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11362 	return omit_one_operand_loc (loc, type, arg0, arg1);
11363 
11364       /* !X || X is always true.  */
11365       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11366 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11367 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11368       /* X || !X is always true.  */
11369       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11370 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11371 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11372 
11373       /* (X && !Y) || (!X && Y) is X ^ Y */
11374       if (TREE_CODE (arg0) == TRUTH_AND_EXPR
11375 	  && TREE_CODE (arg1) == TRUTH_AND_EXPR)
11376         {
11377 	  tree a0, a1, l0, l1, n0, n1;
11378 
11379 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11380 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11381 
11382 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11383 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11384 
11385 	  n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
11386 	  n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
11387 
11388 	  if ((operand_equal_p (n0, a0, 0)
11389 	       && operand_equal_p (n1, a1, 0))
11390 	      || (operand_equal_p (n0, a1, 0)
11391 		  && operand_equal_p (n1, a0, 0)))
11392 	    return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
11393 	}
11394 
11395       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11396           != NULL_TREE)
11397         return tem;
11398 
11399       return NULL_TREE;
11400 
11401     case TRUTH_XOR_EXPR:
11402       /* If the second arg is constant zero, drop it.  */
11403       if (integer_zerop (arg1))
11404 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11405       /* If the second arg is constant true, this is a logical inversion.  */
11406       if (integer_onep (arg1))
11407 	{
11408 	  tem = invert_truthvalue_loc (loc, arg0);
11409 	  return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
11410 	}
11411       /* Identical arguments cancel to zero.  */
11412       if (operand_equal_p (arg0, arg1, 0))
11413 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11414 
11415       /* !X ^ X is always true.  */
11416       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11417 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11418 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11419 
11420       /* X ^ !X is always true.  */
11421       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11422 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11423 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11424 
11425       return NULL_TREE;
11426 
11427     case EQ_EXPR:
11428     case NE_EXPR:
11429       STRIP_NOPS (arg0);
11430       STRIP_NOPS (arg1);
11431 
11432       tem = fold_comparison (loc, code, type, op0, op1);
11433       if (tem != NULL_TREE)
11434 	return tem;
11435 
11436       /* bool_var != 1 becomes !bool_var. */
11437       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11438           && code == NE_EXPR)
11439         return fold_convert_loc (loc, type,
11440 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11441 						  TREE_TYPE (arg0), arg0));
11442 
11443       /* bool_var == 0 becomes !bool_var. */
11444       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11445           && code == EQ_EXPR)
11446         return fold_convert_loc (loc, type,
11447 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
11448 						  TREE_TYPE (arg0), arg0));
11449 
11450       /* !exp != 0 becomes !exp */
11451       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
11452 	  && code == NE_EXPR)
11453         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11454 
11455       /* If this is an EQ or NE comparison with zero and ARG0 is
11456 	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
11457 	 two operations, but the latter can be done in one less insn
11458 	 on machines that have only two-operand insns or on which a
11459 	 constant cannot be the first operand.  */
11460       if (TREE_CODE (arg0) == BIT_AND_EXPR
11461 	  && integer_zerop (arg1))
11462 	{
11463 	  tree arg00 = TREE_OPERAND (arg0, 0);
11464 	  tree arg01 = TREE_OPERAND (arg0, 1);
11465 	  if (TREE_CODE (arg00) == LSHIFT_EXPR
11466 	      && integer_onep (TREE_OPERAND (arg00, 0)))
11467 	    {
11468 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
11469 				      arg01, TREE_OPERAND (arg00, 1));
11470 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11471 				 build_int_cst (TREE_TYPE (arg0), 1));
11472 	      return fold_build2_loc (loc, code, type,
11473 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11474 				  arg1);
11475 	    }
11476 	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
11477 		   && integer_onep (TREE_OPERAND (arg01, 0)))
11478 	    {
11479 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
11480 				      arg00, TREE_OPERAND (arg01, 1));
11481 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11482 				 build_int_cst (TREE_TYPE (arg0), 1));
11483 	      return fold_build2_loc (loc, code, type,
11484 				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
11485 				  arg1);
11486 	    }
11487 	}
11488 
11489       /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11490 	 C1 is a valid shift constant, and C2 is a power of two, i.e.
11491 	 a single bit.  */
11492       if (TREE_CODE (arg0) == BIT_AND_EXPR
11493 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11494 	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11495 	     == INTEGER_CST
11496 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
11497 	  && integer_zerop (arg1))
11498 	{
11499 	  tree itype = TREE_TYPE (arg0);
11500 	  tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11501 	  prec = TYPE_PRECISION (itype);
11502 
11503 	  /* Check for a valid shift count.  */
11504 	  if (wi::ltu_p (wi::to_wide (arg001), prec))
11505 	    {
11506 	      tree arg01 = TREE_OPERAND (arg0, 1);
11507 	      tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11508 	      unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11509 	      /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11510 		 can be rewritten as (X & (C2 << C1)) != 0.  */
11511 	      if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11512 		{
11513 		  tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
11514 		  tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
11515 		  return fold_build2_loc (loc, code, type, tem,
11516 					  fold_convert_loc (loc, itype, arg1));
11517 		}
11518 	      /* Otherwise, for signed (arithmetic) shifts,
11519 		 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11520 		 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
11521 	      else if (!TYPE_UNSIGNED (itype))
11522 		return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11523 				    arg000, build_int_cst (itype, 0));
11524 	      /* Otherwise, of unsigned (logical) shifts,
11525 		 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11526 		 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
11527 	      else
11528 		return omit_one_operand_loc (loc, type,
11529 					 code == EQ_EXPR ? integer_one_node
11530 							 : integer_zero_node,
11531 					 arg000);
11532 	    }
11533 	}
11534 
11535       /* If this is a comparison of a field, we may be able to simplify it.  */
11536       if ((TREE_CODE (arg0) == COMPONENT_REF
11537 	   || TREE_CODE (arg0) == BIT_FIELD_REF)
11538 	  /* Handle the constant case even without -O
11539 	     to make sure the warnings are given.  */
11540 	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
11541 	{
11542 	  t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
11543 	  if (t1)
11544 	    return t1;
11545 	}
11546 
11547       /* Optimize comparisons of strlen vs zero to a compare of the
11548 	 first character of the string vs zero.  To wit,
11549 		strlen(ptr) == 0   =>  *ptr == 0
11550 		strlen(ptr) != 0   =>  *ptr != 0
11551 	 Other cases should reduce to one of these two (or a constant)
11552 	 due to the return value of strlen being unsigned.  */
11553       if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
11554 	{
11555 	  tree fndecl = get_callee_fndecl (arg0);
11556 
11557 	  if (fndecl
11558 	      && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
11559 	      && call_expr_nargs (arg0) == 1
11560 	      && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
11561 		  == POINTER_TYPE))
11562 	    {
11563 	      tree ptrtype
11564 		= build_pointer_type (build_qualified_type (char_type_node,
11565 							    TYPE_QUAL_CONST));
11566 	      tree ptr = fold_convert_loc (loc, ptrtype,
11567 					   CALL_EXPR_ARG (arg0, 0));
11568 	      tree iref = build_fold_indirect_ref_loc (loc, ptr);
11569 	      return fold_build2_loc (loc, code, type, iref,
11570 				      build_int_cst (TREE_TYPE (iref), 0));
11571 	    }
11572 	}
11573 
11574       /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11575 	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
11576       if (TREE_CODE (arg0) == RSHIFT_EXPR
11577 	  && integer_zerop (arg1)
11578 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11579 	{
11580 	  tree arg00 = TREE_OPERAND (arg0, 0);
11581 	  tree arg01 = TREE_OPERAND (arg0, 1);
11582 	  tree itype = TREE_TYPE (arg00);
11583 	  if (wi::to_wide (arg01) == element_precision (itype) - 1)
11584 	    {
11585 	      if (TYPE_UNSIGNED (itype))
11586 		{
11587 		  itype = signed_type_for (itype);
11588 		  arg00 = fold_convert_loc (loc, itype, arg00);
11589 		}
11590 	      return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11591 				  type, arg00, build_zero_cst (itype));
11592 	    }
11593 	}
11594 
11595       /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11596 	 (X & C) == 0 when C is a single bit.  */
11597       if (TREE_CODE (arg0) == BIT_AND_EXPR
11598 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11599 	  && integer_zerop (arg1)
11600 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
11601 	{
11602 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
11603 				 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11604 				 TREE_OPERAND (arg0, 1));
11605 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11606 				  type, tem,
11607 				  fold_convert_loc (loc, TREE_TYPE (arg0),
11608 						    arg1));
11609 	}
11610 
11611       /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11612 	 constant C is a power of two, i.e. a single bit.  */
11613       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11614 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11615 	  && integer_zerop (arg1)
11616 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
11617 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11618 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11619 	{
11620 	  tree arg00 = TREE_OPERAND (arg0, 0);
11621 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11622 			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
11623 	}
11624 
11625       /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11626 	 when is C is a power of two, i.e. a single bit.  */
11627       if (TREE_CODE (arg0) == BIT_AND_EXPR
11628 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11629 	  && integer_zerop (arg1)
11630 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
11631 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11632 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11633 	{
11634 	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11635 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
11636 			     arg000, TREE_OPERAND (arg0, 1));
11637 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11638 			      tem, build_int_cst (TREE_TYPE (tem), 0));
11639 	}
11640 
11641       if (integer_zerop (arg1)
11642 	  && tree_expr_nonzero_p (arg0))
11643         {
11644 	  tree res = constant_boolean_node (code==NE_EXPR, type);
11645 	  return omit_one_operand_loc (loc, type, res, arg0);
11646 	}
11647 
11648       /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries.  */
11649       if (TREE_CODE (arg0) == BIT_AND_EXPR
11650 	  && TREE_CODE (arg1) == BIT_AND_EXPR)
11651 	{
11652 	  tree arg00 = TREE_OPERAND (arg0, 0);
11653 	  tree arg01 = TREE_OPERAND (arg0, 1);
11654 	  tree arg10 = TREE_OPERAND (arg1, 0);
11655 	  tree arg11 = TREE_OPERAND (arg1, 1);
11656 	  tree itype = TREE_TYPE (arg0);
11657 
11658 	  if (operand_equal_p (arg01, arg11, 0))
11659 	    {
11660 	      tem = fold_convert_loc (loc, itype, arg10);
11661 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11662 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
11663 	      return fold_build2_loc (loc, code, type, tem,
11664 				      build_zero_cst (itype));
11665 	    }
11666 	  if (operand_equal_p (arg01, arg10, 0))
11667 	    {
11668 	      tem = fold_convert_loc (loc, itype, arg11);
11669 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11670 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg01);
11671 	      return fold_build2_loc (loc, code, type, tem,
11672 				      build_zero_cst (itype));
11673 	    }
11674 	  if (operand_equal_p (arg00, arg11, 0))
11675 	    {
11676 	      tem = fold_convert_loc (loc, itype, arg10);
11677 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
11678 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
11679 	      return fold_build2_loc (loc, code, type, tem,
11680 				      build_zero_cst (itype));
11681 	    }
11682 	  if (operand_equal_p (arg00, arg10, 0))
11683 	    {
11684 	      tem = fold_convert_loc (loc, itype, arg11);
11685 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01, tem);
11686 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, tem, arg00);
11687 	      return fold_build2_loc (loc, code, type, tem,
11688 				      build_zero_cst (itype));
11689 	    }
11690 	}
11691 
11692       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11693 	  && TREE_CODE (arg1) == BIT_XOR_EXPR)
11694 	{
11695 	  tree arg00 = TREE_OPERAND (arg0, 0);
11696 	  tree arg01 = TREE_OPERAND (arg0, 1);
11697 	  tree arg10 = TREE_OPERAND (arg1, 0);
11698 	  tree arg11 = TREE_OPERAND (arg1, 1);
11699 	  tree itype = TREE_TYPE (arg0);
11700 
11701 	  /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11702 	     operand_equal_p guarantees no side-effects so we don't need
11703 	     to use omit_one_operand on Z.  */
11704 	  if (operand_equal_p (arg01, arg11, 0))
11705 	    return fold_build2_loc (loc, code, type, arg00,
11706 				    fold_convert_loc (loc, TREE_TYPE (arg00),
11707 						      arg10));
11708 	  if (operand_equal_p (arg01, arg10, 0))
11709 	    return fold_build2_loc (loc, code, type, arg00,
11710 				    fold_convert_loc (loc, TREE_TYPE (arg00),
11711 						      arg11));
11712 	  if (operand_equal_p (arg00, arg11, 0))
11713 	    return fold_build2_loc (loc, code, type, arg01,
11714 				    fold_convert_loc (loc, TREE_TYPE (arg01),
11715 						      arg10));
11716 	  if (operand_equal_p (arg00, arg10, 0))
11717 	    return fold_build2_loc (loc, code, type, arg01,
11718 				    fold_convert_loc (loc, TREE_TYPE (arg01),
11719 						      arg11));
11720 
11721 	  /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y.  */
11722 	  if (TREE_CODE (arg01) == INTEGER_CST
11723 	      && TREE_CODE (arg11) == INTEGER_CST)
11724 	    {
11725 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11726 				     fold_convert_loc (loc, itype, arg11));
11727 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11728 	      return fold_build2_loc (loc, code, type, tem,
11729 				      fold_convert_loc (loc, itype, arg10));
11730 	    }
11731 	}
11732 
11733       /* Attempt to simplify equality/inequality comparisons of complex
11734 	 values.  Only lower the comparison if the result is known or
11735 	 can be simplified to a single scalar comparison.  */
11736       if ((TREE_CODE (arg0) == COMPLEX_EXPR
11737 	   || TREE_CODE (arg0) == COMPLEX_CST)
11738 	  && (TREE_CODE (arg1) == COMPLEX_EXPR
11739 	      || TREE_CODE (arg1) == COMPLEX_CST))
11740 	{
11741 	  tree real0, imag0, real1, imag1;
11742 	  tree rcond, icond;
11743 
11744 	  if (TREE_CODE (arg0) == COMPLEX_EXPR)
11745 	    {
11746 	      real0 = TREE_OPERAND (arg0, 0);
11747 	      imag0 = TREE_OPERAND (arg0, 1);
11748 	    }
11749 	  else
11750 	    {
11751 	      real0 = TREE_REALPART (arg0);
11752 	      imag0 = TREE_IMAGPART (arg0);
11753 	    }
11754 
11755 	  if (TREE_CODE (arg1) == COMPLEX_EXPR)
11756 	    {
11757 	      real1 = TREE_OPERAND (arg1, 0);
11758 	      imag1 = TREE_OPERAND (arg1, 1);
11759 	    }
11760 	  else
11761 	    {
11762 	      real1 = TREE_REALPART (arg1);
11763 	      imag1 = TREE_IMAGPART (arg1);
11764 	    }
11765 
11766 	  rcond = fold_binary_loc (loc, code, type, real0, real1);
11767 	  if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11768 	    {
11769 	      if (integer_zerop (rcond))
11770 		{
11771 		  if (code == EQ_EXPR)
11772 		    return omit_two_operands_loc (loc, type, boolean_false_node,
11773 					      imag0, imag1);
11774 		  return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11775 		}
11776 	      else
11777 		{
11778 		  if (code == NE_EXPR)
11779 		    return omit_two_operands_loc (loc, type, boolean_true_node,
11780 					      imag0, imag1);
11781 		  return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11782 		}
11783 	    }
11784 
11785 	  icond = fold_binary_loc (loc, code, type, imag0, imag1);
11786 	  if (icond && TREE_CODE (icond) == INTEGER_CST)
11787 	    {
11788 	      if (integer_zerop (icond))
11789 		{
11790 		  if (code == EQ_EXPR)
11791 		    return omit_two_operands_loc (loc, type, boolean_false_node,
11792 					      real0, real1);
11793 		  return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11794 		}
11795 	      else
11796 		{
11797 		  if (code == NE_EXPR)
11798 		    return omit_two_operands_loc (loc, type, boolean_true_node,
11799 					      real0, real1);
11800 		  return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11801 		}
11802 	    }
11803 	}
11804 
11805       return NULL_TREE;
11806 
11807     case LT_EXPR:
11808     case GT_EXPR:
11809     case LE_EXPR:
11810     case GE_EXPR:
11811       tem = fold_comparison (loc, code, type, op0, op1);
11812       if (tem != NULL_TREE)
11813 	return tem;
11814 
11815       /* Transform comparisons of the form X +- C CMP X.  */
11816       if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11817 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11818 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11819 	  && !HONOR_SNANS (arg0))
11820 	{
11821 	  tree arg01 = TREE_OPERAND (arg0, 1);
11822 	  enum tree_code code0 = TREE_CODE (arg0);
11823 	  int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11824 
11825 	  /* (X - c) > X becomes false.  */
11826 	  if (code == GT_EXPR
11827 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
11828 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
11829 	    return constant_boolean_node (0, type);
11830 
11831 	  /* Likewise (X + c) < X becomes false.  */
11832 	  if (code == LT_EXPR
11833 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11834 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11835 	    return constant_boolean_node (0, type);
11836 
11837 	  /* Convert (X - c) <= X to true.  */
11838 	  if (!HONOR_NANS (arg1)
11839 	      && code == LE_EXPR
11840 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
11841 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
11842 	    return constant_boolean_node (1, type);
11843 
11844 	  /* Convert (X + c) >= X to true.  */
11845 	  if (!HONOR_NANS (arg1)
11846 	      && code == GE_EXPR
11847 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
11848 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
11849 	    return constant_boolean_node (1, type);
11850 	}
11851 
11852       /* If we are comparing an ABS_EXPR with a constant, we can
11853 	 convert all the cases into explicit comparisons, but they may
11854 	 well not be faster than doing the ABS and one comparison.
11855 	 But ABS (X) <= C is a range comparison, which becomes a subtraction
11856 	 and a comparison, and is probably faster.  */
11857       if (code == LE_EXPR
11858 	  && TREE_CODE (arg1) == INTEGER_CST
11859 	  && TREE_CODE (arg0) == ABS_EXPR
11860 	  && ! TREE_SIDE_EFFECTS (arg0)
11861 	  && (tem = negate_expr (arg1)) != 0
11862 	  && TREE_CODE (tem) == INTEGER_CST
11863 	  && !TREE_OVERFLOW (tem))
11864 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11865 			    build2 (GE_EXPR, type,
11866 				    TREE_OPERAND (arg0, 0), tem),
11867 			    build2 (LE_EXPR, type,
11868 				    TREE_OPERAND (arg0, 0), arg1));
11869 
11870       /* Convert ABS_EXPR<x> >= 0 to true.  */
11871       strict_overflow_p = false;
11872       if (code == GE_EXPR
11873 	  && (integer_zerop (arg1)
11874 	      || (! HONOR_NANS (arg0)
11875 		  && real_zerop (arg1)))
11876 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11877 	{
11878 	  if (strict_overflow_p)
11879 	    fold_overflow_warning (("assuming signed overflow does not occur "
11880 				    "when simplifying comparison of "
11881 				    "absolute value and zero"),
11882 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11883 	  return omit_one_operand_loc (loc, type,
11884 				       constant_boolean_node (true, type),
11885 				       arg0);
11886 	}
11887 
11888       /* Convert ABS_EXPR<x> < 0 to false.  */
11889       strict_overflow_p = false;
11890       if (code == LT_EXPR
11891 	  && (integer_zerop (arg1) || real_zerop (arg1))
11892 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11893 	{
11894 	  if (strict_overflow_p)
11895 	    fold_overflow_warning (("assuming signed overflow does not occur "
11896 				    "when simplifying comparison of "
11897 				    "absolute value and zero"),
11898 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
11899 	  return omit_one_operand_loc (loc, type,
11900 				       constant_boolean_node (false, type),
11901 				       arg0);
11902 	}
11903 
11904       /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11905 	 and similarly for >= into !=.  */
11906       if ((code == LT_EXPR || code == GE_EXPR)
11907 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11908 	  && TREE_CODE (arg1) == LSHIFT_EXPR
11909 	  && integer_onep (TREE_OPERAND (arg1, 0)))
11910 	return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11911 			   build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11912 				   TREE_OPERAND (arg1, 1)),
11913 			   build_zero_cst (TREE_TYPE (arg0)));
11914 
11915       /* Similarly for X < (cast) (1 << Y).  But cast can't be narrowing,
11916 	 otherwise Y might be >= # of bits in X's type and thus e.g.
11917 	 (unsigned char) (1 << Y) for Y 15 might be 0.
11918 	 If the cast is widening, then 1 << Y should have unsigned type,
11919 	 otherwise if Y is number of bits in the signed shift type minus 1,
11920 	 we can't optimize this.  E.g. (unsigned long long) (1 << Y) for Y
11921 	 31 might be 0xffffffff80000000.  */
11922       if ((code == LT_EXPR || code == GE_EXPR)
11923 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
11924 	  && CONVERT_EXPR_P (arg1)
11925 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11926 	  && (element_precision (TREE_TYPE (arg1))
11927 	      >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11928 	  && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11929 	      || (element_precision (TREE_TYPE (arg1))
11930 		  == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11931 	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11932 	{
11933 	  tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11934 			TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11935 	  return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11936 			     fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11937 			     build_zero_cst (TREE_TYPE (arg0)));
11938 	}
11939 
11940       return NULL_TREE;
11941 
11942     case UNORDERED_EXPR:
11943     case ORDERED_EXPR:
11944     case UNLT_EXPR:
11945     case UNLE_EXPR:
11946     case UNGT_EXPR:
11947     case UNGE_EXPR:
11948     case UNEQ_EXPR:
11949     case LTGT_EXPR:
11950       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
11951       {
11952 	tree targ0 = strip_float_extensions (arg0);
11953 	tree targ1 = strip_float_extensions (arg1);
11954 	tree newtype = TREE_TYPE (targ0);
11955 
11956 	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11957 	  newtype = TREE_TYPE (targ1);
11958 
11959 	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11960 	  return fold_build2_loc (loc, code, type,
11961 			      fold_convert_loc (loc, newtype, targ0),
11962 			      fold_convert_loc (loc, newtype, targ1));
11963       }
11964 
11965       return NULL_TREE;
11966 
11967     case COMPOUND_EXPR:
11968       /* When pedantic, a compound expression can be neither an lvalue
11969 	 nor an integer constant expression.  */
11970       if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11971 	return NULL_TREE;
11972       /* Don't let (0, 0) be null pointer constant.  */
11973       tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11974 				 : fold_convert_loc (loc, type, arg1);
11975       return pedantic_non_lvalue_loc (loc, tem);
11976 
11977     case ASSERT_EXPR:
11978       /* An ASSERT_EXPR should never be passed to fold_binary.  */
11979       gcc_unreachable ();
11980 
11981     default:
11982       return NULL_TREE;
11983     } /* switch (code) */
11984 }
11985 
11986 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11987    ((A & N) + B) & M -> (A + B) & M
11988    Similarly if (N & M) == 0,
11989    ((A | N) + B) & M -> (A + B) & M
11990    and for - instead of + (or unary - instead of +)
11991    and/or ^ instead of |.
11992    If B is constant and (B & M) == 0, fold into A & M.
11993 
11994    This function is a helper for match.pd patterns.  Return non-NULL
11995    type in which the simplified operation should be performed only
11996    if any optimization is possible.
11997 
11998    ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
11999    then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12000    Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12001    +/-.  */
12002 tree
fold_bit_and_mask(tree type,tree arg1,enum tree_code code,tree arg00,enum tree_code code00,tree arg000,tree arg001,tree arg01,enum tree_code code01,tree arg010,tree arg011,tree * pmop)12003 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12004 		   tree arg00, enum tree_code code00, tree arg000, tree arg001,
12005 		   tree arg01, enum tree_code code01, tree arg010, tree arg011,
12006 		   tree *pmop)
12007 {
12008   gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12009   gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12010   wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12011   if (~cst1 == 0
12012       || (cst1 & (cst1 + 1)) != 0
12013       || !INTEGRAL_TYPE_P (type)
12014       || (!TYPE_OVERFLOW_WRAPS (type)
12015 	  && TREE_CODE (type) != INTEGER_TYPE)
12016       || (wi::max_value (type) & cst1) != cst1)
12017     return NULL_TREE;
12018 
12019   enum tree_code codes[2] = { code00, code01 };
12020   tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12021   int which = 0;
12022   wide_int cst0;
12023 
12024   /* Now we know that arg0 is (C + D) or (C - D) or -C and
12025      arg1 (M) is == (1LL << cst) - 1.
12026      Store C into PMOP[0] and D into PMOP[1].  */
12027   pmop[0] = arg00;
12028   pmop[1] = arg01;
12029   which = code != NEGATE_EXPR;
12030 
12031   for (; which >= 0; which--)
12032     switch (codes[which])
12033       {
12034       case BIT_AND_EXPR:
12035       case BIT_IOR_EXPR:
12036       case BIT_XOR_EXPR:
12037 	gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12038 	cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12039 	if (codes[which] == BIT_AND_EXPR)
12040 	  {
12041 	    if (cst0 != cst1)
12042 	      break;
12043 	  }
12044 	else if (cst0 != 0)
12045 	  break;
12046 	/* If C or D is of the form (A & N) where
12047 	   (N & M) == M, or of the form (A | N) or
12048 	   (A ^ N) where (N & M) == 0, replace it with A.  */
12049 	pmop[which] = arg0xx[2 * which];
12050 	break;
12051       case ERROR_MARK:
12052 	if (TREE_CODE (pmop[which]) != INTEGER_CST)
12053 	  break;
12054 	/* If C or D is a N where (N & M) == 0, it can be
12055 	   omitted (replaced with 0).  */
12056 	if ((code == PLUS_EXPR
12057 	     || (code == MINUS_EXPR && which == 0))
12058 	    && (cst1 & wi::to_wide (pmop[which])) == 0)
12059 	  pmop[which] = build_int_cst (type, 0);
12060 	/* Similarly, with C - N where (-N & M) == 0.  */
12061 	if (code == MINUS_EXPR
12062 	    && which == 1
12063 	    && (cst1 & -wi::to_wide (pmop[which])) == 0)
12064 	  pmop[which] = build_int_cst (type, 0);
12065 	break;
12066       default:
12067 	gcc_unreachable ();
12068       }
12069 
12070   /* Only build anything new if we optimized one or both arguments above.  */
12071   if (pmop[0] == arg00 && pmop[1] == arg01)
12072     return NULL_TREE;
12073 
12074   if (TYPE_OVERFLOW_WRAPS (type))
12075     return type;
12076   else
12077     return unsigned_type_for (type);
12078 }
12079 
12080 /* Used by contains_label_[p1].  */
12081 
12082 struct contains_label_data
12083 {
12084   hash_set<tree> *pset;
12085   bool inside_switch_p;
12086 };
12087 
12088 /* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
12089    a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12090    return NULL_TREE.  Do not check the subtrees of GOTO_EXPR.  */
12091 
12092 static tree
contains_label_1(tree * tp,int * walk_subtrees,void * data)12093 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12094 {
12095   contains_label_data *d = (contains_label_data *) data;
12096   switch (TREE_CODE (*tp))
12097     {
12098     case LABEL_EXPR:
12099       return *tp;
12100 
12101     case CASE_LABEL_EXPR:
12102       if (!d->inside_switch_p)
12103 	return *tp;
12104       return NULL_TREE;
12105 
12106     case SWITCH_EXPR:
12107       if (!d->inside_switch_p)
12108 	{
12109 	  if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12110 	    return *tp;
12111 	  d->inside_switch_p = true;
12112 	  if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12113 	    return *tp;
12114 	  d->inside_switch_p = false;
12115 	  *walk_subtrees = 0;
12116 	}
12117       return NULL_TREE;
12118 
12119     case GOTO_EXPR:
12120       *walk_subtrees = 0;
12121       return NULL_TREE;
12122 
12123     default:
12124       return NULL_TREE;
12125     }
12126 }
12127 
12128 /* Return whether the sub-tree ST contains a label which is accessible from
12129    outside the sub-tree.  */
12130 
12131 static bool
contains_label_p(tree st)12132 contains_label_p (tree st)
12133 {
12134   hash_set<tree> pset;
12135   contains_label_data data = { &pset, false };
12136   return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12137 }
12138 
12139 /* Fold a ternary expression of code CODE and type TYPE with operands
12140    OP0, OP1, and OP2.  Return the folded expression if folding is
12141    successful.  Otherwise, return NULL_TREE.  */
12142 
12143 tree
fold_ternary_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2)12144 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12145 		  tree op0, tree op1, tree op2)
12146 {
12147   tree tem;
12148   tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12149   enum tree_code_class kind = TREE_CODE_CLASS (code);
12150 
12151   gcc_assert (IS_EXPR_CODE_CLASS (kind)
12152 	      && TREE_CODE_LENGTH (code) == 3);
12153 
12154   /* If this is a commutative operation, and OP0 is a constant, move it
12155      to OP1 to reduce the number of tests below.  */
12156   if (commutative_ternary_tree_code (code)
12157       && tree_swap_operands_p (op0, op1))
12158     return fold_build3_loc (loc, code, type, op1, op0, op2);
12159 
12160   tem = generic_simplify (loc, code, type, op0, op1, op2);
12161   if (tem)
12162     return tem;
12163 
12164   /* Strip any conversions that don't change the mode.  This is safe
12165      for every expression, except for a comparison expression because
12166      its signedness is derived from its operands.  So, in the latter
12167      case, only strip conversions that don't change the signedness.
12168 
12169      Note that this is done as an internal manipulation within the
12170      constant folder, in order to find the simplest representation of
12171      the arguments so that their form can be studied.  In any cases,
12172      the appropriate type conversions should be put back in the tree
12173      that will get out of the constant folder.  */
12174   if (op0)
12175     {
12176       arg0 = op0;
12177       STRIP_NOPS (arg0);
12178     }
12179 
12180   if (op1)
12181     {
12182       arg1 = op1;
12183       STRIP_NOPS (arg1);
12184     }
12185 
12186   if (op2)
12187     {
12188       arg2 = op2;
12189       STRIP_NOPS (arg2);
12190     }
12191 
12192   switch (code)
12193     {
12194     case COMPONENT_REF:
12195       if (TREE_CODE (arg0) == CONSTRUCTOR
12196 	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12197 	{
12198 	  unsigned HOST_WIDE_INT idx;
12199 	  tree field, value;
12200 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12201 	    if (field == arg1)
12202 	      return value;
12203 	}
12204       return NULL_TREE;
12205 
12206     case COND_EXPR:
12207     case VEC_COND_EXPR:
12208       /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12209 	 so all simple results must be passed through pedantic_non_lvalue.  */
12210       if (TREE_CODE (arg0) == INTEGER_CST)
12211 	{
12212 	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
12213 	  tem = integer_zerop (arg0) ? op2 : op1;
12214 	  /* Only optimize constant conditions when the selected branch
12215 	     has the same type as the COND_EXPR.  This avoids optimizing
12216              away "c ? x : throw", where the throw has a void type.
12217              Avoid throwing away that operand which contains label.  */
12218           if ((!TREE_SIDE_EFFECTS (unused_op)
12219                || !contains_label_p (unused_op))
12220               && (! VOID_TYPE_P (TREE_TYPE (tem))
12221                   || VOID_TYPE_P (type)))
12222 	    return pedantic_non_lvalue_loc (loc, tem);
12223 	  return NULL_TREE;
12224 	}
12225       else if (TREE_CODE (arg0) == VECTOR_CST)
12226 	{
12227 	  unsigned HOST_WIDE_INT nelts;
12228 	  if ((TREE_CODE (arg1) == VECTOR_CST
12229 	       || TREE_CODE (arg1) == CONSTRUCTOR)
12230 	      && (TREE_CODE (arg2) == VECTOR_CST
12231 		  || TREE_CODE (arg2) == CONSTRUCTOR)
12232 	      && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12233 	    {
12234 	      vec_perm_builder sel (nelts, nelts, 1);
12235 	      for (unsigned int i = 0; i < nelts; i++)
12236 		{
12237 		  tree val = VECTOR_CST_ELT (arg0, i);
12238 		  if (integer_all_onesp (val))
12239 		    sel.quick_push (i);
12240 		  else if (integer_zerop (val))
12241 		    sel.quick_push (nelts + i);
12242 		  else /* Currently unreachable.  */
12243 		    return NULL_TREE;
12244 		}
12245 	      vec_perm_indices indices (sel, 2, nelts);
12246 	      tree t = fold_vec_perm (type, arg1, arg2, indices);
12247 	      if (t != NULL_TREE)
12248 		return t;
12249 	    }
12250 	}
12251 
12252       /* If we have A op B ? A : C, we may be able to convert this to a
12253 	 simpler expression, depending on the operation and the values
12254 	 of B and C.  Signed zeros prevent all of these transformations,
12255 	 for reasons given above each one.
12256 
12257          Also try swapping the arguments and inverting the conditional.  */
12258       if (COMPARISON_CLASS_P (arg0)
12259 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12260 	  && !HONOR_SIGNED_ZEROS (element_mode (op1)))
12261 	{
12262 	  tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
12263 	  if (tem)
12264 	    return tem;
12265 	}
12266 
12267       if (COMPARISON_CLASS_P (arg0)
12268 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12269 	  && !HONOR_SIGNED_ZEROS (element_mode (op2)))
12270 	{
12271 	  location_t loc0 = expr_location_or (arg0, loc);
12272 	  tem = fold_invert_truthvalue (loc0, arg0);
12273 	  if (tem && COMPARISON_CLASS_P (tem))
12274 	    {
12275 	      tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
12276 	      if (tem)
12277 		return tem;
12278 	    }
12279 	}
12280 
12281       /* If the second operand is simpler than the third, swap them
12282 	 since that produces better jump optimization results.  */
12283       if (truth_value_p (TREE_CODE (arg0))
12284 	  && tree_swap_operands_p (op1, op2))
12285 	{
12286 	  location_t loc0 = expr_location_or (arg0, loc);
12287 	  /* See if this can be inverted.  If it can't, possibly because
12288 	     it was a floating-point inequality comparison, don't do
12289 	     anything.  */
12290 	  tem = fold_invert_truthvalue (loc0, arg0);
12291 	  if (tem)
12292 	    return fold_build3_loc (loc, code, type, tem, op2, op1);
12293 	}
12294 
12295       /* Convert A ? 1 : 0 to simply A.  */
12296       if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12297 				 : (integer_onep (op1)
12298 				    && !VECTOR_TYPE_P (type)))
12299 	  && integer_zerop (op2)
12300 	  /* If we try to convert OP0 to our type, the
12301 	     call to fold will try to move the conversion inside
12302 	     a COND, which will recurse.  In that case, the COND_EXPR
12303 	     is probably the best choice, so leave it alone.  */
12304 	  && type == TREE_TYPE (arg0))
12305 	return pedantic_non_lvalue_loc (loc, arg0);
12306 
12307       /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
12308 	 over COND_EXPR in cases such as floating point comparisons.  */
12309       if (integer_zerop (op1)
12310 	  && code == COND_EXPR
12311 	  && integer_onep (op2)
12312 	  && !VECTOR_TYPE_P (type)
12313 	  && truth_value_p (TREE_CODE (arg0)))
12314 	return pedantic_non_lvalue_loc (loc,
12315 				    fold_convert_loc (loc, type,
12316 					      invert_truthvalue_loc (loc,
12317 								     arg0)));
12318 
12319       /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
12320       if (TREE_CODE (arg0) == LT_EXPR
12321 	  && integer_zerop (TREE_OPERAND (arg0, 1))
12322 	  && integer_zerop (op2)
12323 	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12324 	{
12325 	  /* sign_bit_p looks through both zero and sign extensions,
12326 	     but for this optimization only sign extensions are
12327 	     usable.  */
12328 	  tree tem2 = TREE_OPERAND (arg0, 0);
12329 	  while (tem != tem2)
12330 	    {
12331 	      if (TREE_CODE (tem2) != NOP_EXPR
12332 		  || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12333 		{
12334 		  tem = NULL_TREE;
12335 		  break;
12336 		}
12337 	      tem2 = TREE_OPERAND (tem2, 0);
12338 	    }
12339 	  /* sign_bit_p only checks ARG1 bits within A's precision.
12340 	     If <sign bit of A> has wider type than A, bits outside
12341 	     of A's precision in <sign bit of A> need to be checked.
12342 	     If they are all 0, this optimization needs to be done
12343 	     in unsigned A's type, if they are all 1 in signed A's type,
12344 	     otherwise this can't be done.  */
12345 	  if (tem
12346 	      && TYPE_PRECISION (TREE_TYPE (tem))
12347 		 < TYPE_PRECISION (TREE_TYPE (arg1))
12348 	      && TYPE_PRECISION (TREE_TYPE (tem))
12349 		 < TYPE_PRECISION (type))
12350 	    {
12351 	      int inner_width, outer_width;
12352 	      tree tem_type;
12353 
12354 	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12355 	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12356 	      if (outer_width > TYPE_PRECISION (type))
12357 		outer_width = TYPE_PRECISION (type);
12358 
12359 	      wide_int mask = wi::shifted_mask
12360 		(inner_width, outer_width - inner_width, false,
12361 		 TYPE_PRECISION (TREE_TYPE (arg1)));
12362 
12363 	      wide_int common = mask & wi::to_wide (arg1);
12364 	      if (common == mask)
12365 		{
12366 		  tem_type = signed_type_for (TREE_TYPE (tem));
12367 		  tem = fold_convert_loc (loc, tem_type, tem);
12368 		}
12369 	      else if (common == 0)
12370 		{
12371 		  tem_type = unsigned_type_for (TREE_TYPE (tem));
12372 		  tem = fold_convert_loc (loc, tem_type, tem);
12373 		}
12374 	      else
12375 		tem = NULL;
12376 	    }
12377 
12378 	  if (tem)
12379 	    return
12380 	      fold_convert_loc (loc, type,
12381 				fold_build2_loc (loc, BIT_AND_EXPR,
12382 					     TREE_TYPE (tem), tem,
12383 					     fold_convert_loc (loc,
12384 							       TREE_TYPE (tem),
12385 							       arg1)));
12386 	}
12387 
12388       /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
12389 	 already handled above.  */
12390       if (TREE_CODE (arg0) == BIT_AND_EXPR
12391 	  && integer_onep (TREE_OPERAND (arg0, 1))
12392 	  && integer_zerop (op2)
12393 	  && integer_pow2p (arg1))
12394 	{
12395 	  tree tem = TREE_OPERAND (arg0, 0);
12396 	  STRIP_NOPS (tem);
12397 	  if (TREE_CODE (tem) == RSHIFT_EXPR
12398 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
12399               && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
12400 		 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
12401 	    return fold_build2_loc (loc, BIT_AND_EXPR, type,
12402 				    fold_convert_loc (loc, type,
12403 						      TREE_OPERAND (tem, 0)),
12404 				    op1);
12405 	}
12406 
12407       /* A & N ? N : 0 is simply A & N if N is a power of two.  This
12408 	 is probably obsolete because the first operand should be a
12409 	 truth value (that's why we have the two cases above), but let's
12410 	 leave it in until we can confirm this for all front-ends.  */
12411       if (integer_zerop (op2)
12412 	  && TREE_CODE (arg0) == NE_EXPR
12413 	  && integer_zerop (TREE_OPERAND (arg0, 1))
12414 	  && integer_pow2p (arg1)
12415 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12416 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12417 			      arg1, OEP_ONLY_CONST)
12418 	  /* operand_equal_p compares just value, not precision, so e.g.
12419 	     arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
12420 	     second operand 32-bit -128, which is not a power of two (or vice
12421 	     versa.  */
12422 	  && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
12423 	return pedantic_non_lvalue_loc (loc,
12424 					fold_convert_loc (loc, type,
12425 							  TREE_OPERAND (arg0,
12426 									0)));
12427 
12428       /* Disable the transformations below for vectors, since
12429 	 fold_binary_op_with_conditional_arg may undo them immediately,
12430 	 yielding an infinite loop.  */
12431       if (code == VEC_COND_EXPR)
12432 	return NULL_TREE;
12433 
12434       /* Convert A ? B : 0 into A && B if A and B are truth values.  */
12435       if (integer_zerop (op2)
12436 	  && truth_value_p (TREE_CODE (arg0))
12437 	  && truth_value_p (TREE_CODE (arg1))
12438 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12439 	return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
12440 							   : TRUTH_ANDIF_EXPR,
12441 				type, fold_convert_loc (loc, type, arg0), op1);
12442 
12443       /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
12444       if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
12445 	  && truth_value_p (TREE_CODE (arg0))
12446 	  && truth_value_p (TREE_CODE (arg1))
12447 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12448 	{
12449 	  location_t loc0 = expr_location_or (arg0, loc);
12450 	  /* Only perform transformation if ARG0 is easily inverted.  */
12451 	  tem = fold_invert_truthvalue (loc0, arg0);
12452 	  if (tem)
12453 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
12454 					 ? BIT_IOR_EXPR
12455 					 : TRUTH_ORIF_EXPR,
12456 				    type, fold_convert_loc (loc, type, tem),
12457 				    op1);
12458 	}
12459 
12460       /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
12461       if (integer_zerop (arg1)
12462 	  && truth_value_p (TREE_CODE (arg0))
12463 	  && truth_value_p (TREE_CODE (op2))
12464 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12465 	{
12466 	  location_t loc0 = expr_location_or (arg0, loc);
12467 	  /* Only perform transformation if ARG0 is easily inverted.  */
12468 	  tem = fold_invert_truthvalue (loc0, arg0);
12469 	  if (tem)
12470 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
12471 					 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
12472 				    type, fold_convert_loc (loc, type, tem),
12473 				    op2);
12474 	}
12475 
12476       /* Convert A ? 1 : B into A || B if A and B are truth values.  */
12477       if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
12478 	  && truth_value_p (TREE_CODE (arg0))
12479 	  && truth_value_p (TREE_CODE (op2))
12480 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
12481 	return fold_build2_loc (loc, code == VEC_COND_EXPR
12482 				     ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
12483 				type, fold_convert_loc (loc, type, arg0), op2);
12484 
12485       return NULL_TREE;
12486 
12487     case CALL_EXPR:
12488       /* CALL_EXPRs used to be ternary exprs.  Catch any mistaken uses
12489 	 of fold_ternary on them.  */
12490       gcc_unreachable ();
12491 
12492     case BIT_FIELD_REF:
12493       if (TREE_CODE (arg0) == VECTOR_CST
12494 	  && (type == TREE_TYPE (TREE_TYPE (arg0))
12495 	      || (VECTOR_TYPE_P (type)
12496 		  && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
12497 	  && tree_fits_uhwi_p (op1)
12498 	  && tree_fits_uhwi_p (op2))
12499 	{
12500 	  tree eltype = TREE_TYPE (TREE_TYPE (arg0));
12501 	  unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
12502 	  unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
12503 	  unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
12504 
12505 	  if (n != 0
12506 	      && (idx % width) == 0
12507 	      && (n % width) == 0
12508 	      && known_le ((idx + n) / width,
12509 			   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
12510 	    {
12511 	      idx = idx / width;
12512 	      n = n / width;
12513 
12514 	      if (TREE_CODE (arg0) == VECTOR_CST)
12515 		{
12516 		  if (n == 1)
12517 		    {
12518 		      tem = VECTOR_CST_ELT (arg0, idx);
12519 		      if (VECTOR_TYPE_P (type))
12520 			tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
12521 		      return tem;
12522 		    }
12523 
12524 		  tree_vector_builder vals (type, n, 1);
12525 		  for (unsigned i = 0; i < n; ++i)
12526 		    vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
12527 		  return vals.build ();
12528 		}
12529 	    }
12530 	}
12531 
12532       /* On constants we can use native encode/interpret to constant
12533          fold (nearly) all BIT_FIELD_REFs.  */
12534       if (CONSTANT_CLASS_P (arg0)
12535 	  && can_native_interpret_type_p (type)
12536 	  && BITS_PER_UNIT == 8
12537 	  && tree_fits_uhwi_p (op1)
12538 	  && tree_fits_uhwi_p (op2))
12539 	{
12540 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12541 	  unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
12542 	  /* Limit us to a reasonable amount of work.  To relax the
12543 	     other limitations we need bit-shifting of the buffer
12544 	     and rounding up the size.  */
12545 	  if (bitpos % BITS_PER_UNIT == 0
12546 	      && bitsize % BITS_PER_UNIT == 0
12547 	      && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
12548 	    {
12549 	      unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
12550 	      unsigned HOST_WIDE_INT len
12551 		= native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
12552 				      bitpos / BITS_PER_UNIT);
12553 	      if (len > 0
12554 		  && len * BITS_PER_UNIT >= bitsize)
12555 		{
12556 		  tree v = native_interpret_expr (type, b,
12557 						  bitsize / BITS_PER_UNIT);
12558 		  if (v)
12559 		    return v;
12560 		}
12561 	    }
12562 	}
12563 
12564       return NULL_TREE;
12565 
12566     case VEC_PERM_EXPR:
12567       /* Perform constant folding of BIT_INSERT_EXPR.  */
12568       if (TREE_CODE (arg2) == VECTOR_CST
12569 	  && TREE_CODE (op0) == VECTOR_CST
12570 	  && TREE_CODE (op1) == VECTOR_CST)
12571 	{
12572 	  /* Build a vector of integers from the tree mask.  */
12573 	  vec_perm_builder builder;
12574 	  if (!tree_to_vec_perm_builder (&builder, arg2))
12575 	    return NULL_TREE;
12576 
12577 	  /* Create a vec_perm_indices for the integer vector.  */
12578 	  poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
12579 	  bool single_arg = (op0 == op1);
12580 	  vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
12581 	  return fold_vec_perm (type, op0, op1, sel);
12582 	}
12583       return NULL_TREE;
12584 
12585     case BIT_INSERT_EXPR:
12586       /* Perform (partial) constant folding of BIT_INSERT_EXPR.  */
12587       if (TREE_CODE (arg0) == INTEGER_CST
12588 	  && TREE_CODE (arg1) == INTEGER_CST)
12589 	{
12590 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12591 	  unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
12592 	  wide_int tem = (wi::to_wide (arg0)
12593 			  & wi::shifted_mask (bitpos, bitsize, true,
12594 					      TYPE_PRECISION (type)));
12595 	  wide_int tem2
12596 	    = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
12597 				    bitsize), bitpos);
12598 	  return wide_int_to_tree (type, wi::bit_or (tem, tem2));
12599 	}
12600       else if (TREE_CODE (arg0) == VECTOR_CST
12601 	       && CONSTANT_CLASS_P (arg1)
12602 	       && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
12603 				      TREE_TYPE (arg1)))
12604 	{
12605 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
12606 	  unsigned HOST_WIDE_INT elsize
12607 	    = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
12608 	  if (bitpos % elsize == 0)
12609 	    {
12610 	      unsigned k = bitpos / elsize;
12611 	      unsigned HOST_WIDE_INT nelts;
12612 	      if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
12613 		return arg0;
12614 	      else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
12615 		{
12616 		  tree_vector_builder elts (type, nelts, 1);
12617 		  elts.quick_grow (nelts);
12618 		  for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
12619 		    elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
12620 		  return elts.build ();
12621 		}
12622 	    }
12623 	}
12624       return NULL_TREE;
12625 
12626     default:
12627       return NULL_TREE;
12628     } /* switch (code) */
12629 }
12630 
12631 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
12632    of an array (or vector).  *CTOR_IDX if non-NULL is updated with the
12633    constructor element index of the value returned.  If the element is
12634    not found NULL_TREE is returned and *CTOR_IDX is updated to
12635    the index of the element after the ACCESS_INDEX position (which
12636    may be outside of the CTOR array).  */
12637 
12638 tree
get_array_ctor_element_at_index(tree ctor,offset_int access_index,unsigned * ctor_idx)12639 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
12640 				 unsigned *ctor_idx)
12641 {
12642   tree index_type = NULL_TREE;
12643   signop index_sgn = UNSIGNED;
12644   offset_int low_bound = 0;
12645 
12646   if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12647     {
12648       tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12649       if (domain_type && TYPE_MIN_VALUE (domain_type))
12650 	{
12651 	  /* Static constructors for variably sized objects makes no sense.  */
12652 	  gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12653 	  index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12654 	  /* ???  When it is obvious that the range is signed, treat it so.  */
12655 	  if (TYPE_UNSIGNED (index_type)
12656 	      && TYPE_MAX_VALUE (domain_type)
12657 	      && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
12658 				  TYPE_MIN_VALUE (domain_type)))
12659 	    {
12660 	      index_sgn = SIGNED;
12661 	      low_bound
12662 		= offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
12663 				    SIGNED);
12664 	    }
12665 	  else
12666 	    {
12667 	      index_sgn = TYPE_SIGN (index_type);
12668 	      low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12669 	    }
12670 	}
12671     }
12672 
12673   if (index_type)
12674     access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12675 			    index_sgn);
12676 
12677   offset_int index = low_bound;
12678   if (index_type)
12679     index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12680 
12681   offset_int max_index = index;
12682   unsigned cnt;
12683   tree cfield, cval;
12684   bool first_p = true;
12685 
12686   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12687     {
12688       /* Array constructor might explicitly set index, or specify a range,
12689 	 or leave index NULL meaning that it is next index after previous
12690 	 one.  */
12691       if (cfield)
12692 	{
12693 	  if (TREE_CODE (cfield) == INTEGER_CST)
12694 	    max_index = index
12695 	      = offset_int::from (wi::to_wide (cfield), index_sgn);
12696 	  else
12697 	    {
12698 	      gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12699 	      index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
12700 					index_sgn);
12701 	      max_index
12702 	        = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
12703 				    index_sgn);
12704 	      gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
12705 	    }
12706 	}
12707       else if (!first_p)
12708 	{
12709 	  index = max_index + 1;
12710 	  if (index_type)
12711 	    index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
12712 	  gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
12713 	  max_index = index;
12714 	}
12715       else
12716 	first_p = false;
12717 
12718       /* Do we have match?  */
12719       if (wi::cmp (access_index, index, index_sgn) >= 0)
12720 	{
12721 	  if (wi::cmp (access_index, max_index, index_sgn) <= 0)
12722 	    {
12723 	      if (ctor_idx)
12724 		*ctor_idx = cnt;
12725 	      return cval;
12726 	    }
12727 	}
12728       else if (in_gimple_form)
12729 	/* We're past the element we search for.  Note during parsing
12730 	   the elements might not be sorted.
12731 	   ???  We should use a binary search and a flag on the
12732 	   CONSTRUCTOR as to whether elements are sorted in declaration
12733 	   order.  */
12734 	break;
12735     }
12736   if (ctor_idx)
12737     *ctor_idx = cnt;
12738   return NULL_TREE;
12739 }
12740 
12741 /* Perform constant folding and related simplification of EXPR.
12742    The related simplifications include x*1 => x, x*0 => 0, etc.,
12743    and application of the associative law.
12744    NOP_EXPR conversions may be removed freely (as long as we
12745    are careful not to change the type of the overall expression).
12746    We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12747    but we can constant-fold them if they have constant operands.  */
12748 
12749 #ifdef ENABLE_FOLD_CHECKING
12750 # define fold(x) fold_1 (x)
12751 static tree fold_1 (tree);
12752 static
12753 #endif
12754 tree
fold(tree expr)12755 fold (tree expr)
12756 {
12757   const tree t = expr;
12758   enum tree_code code = TREE_CODE (t);
12759   enum tree_code_class kind = TREE_CODE_CLASS (code);
12760   tree tem;
12761   location_t loc = EXPR_LOCATION (expr);
12762 
12763   /* Return right away if a constant.  */
12764   if (kind == tcc_constant)
12765     return t;
12766 
12767   /* CALL_EXPR-like objects with variable numbers of operands are
12768      treated specially.  */
12769   if (kind == tcc_vl_exp)
12770     {
12771       if (code == CALL_EXPR)
12772 	{
12773 	  tem = fold_call_expr (loc, expr, false);
12774 	  return tem ? tem : expr;
12775 	}
12776       return expr;
12777     }
12778 
12779   if (IS_EXPR_CODE_CLASS (kind))
12780     {
12781       tree type = TREE_TYPE (t);
12782       tree op0, op1, op2;
12783 
12784       switch (TREE_CODE_LENGTH (code))
12785 	{
12786 	case 1:
12787 	  op0 = TREE_OPERAND (t, 0);
12788 	  tem = fold_unary_loc (loc, code, type, op0);
12789 	  return tem ? tem : expr;
12790 	case 2:
12791 	  op0 = TREE_OPERAND (t, 0);
12792 	  op1 = TREE_OPERAND (t, 1);
12793 	  tem = fold_binary_loc (loc, code, type, op0, op1);
12794 	  return tem ? tem : expr;
12795 	case 3:
12796 	  op0 = TREE_OPERAND (t, 0);
12797 	  op1 = TREE_OPERAND (t, 1);
12798 	  op2 = TREE_OPERAND (t, 2);
12799 	  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12800 	  return tem ? tem : expr;
12801 	default:
12802 	  break;
12803 	}
12804     }
12805 
12806   switch (code)
12807     {
12808     case ARRAY_REF:
12809       {
12810 	tree op0 = TREE_OPERAND (t, 0);
12811 	tree op1 = TREE_OPERAND (t, 1);
12812 
12813 	if (TREE_CODE (op1) == INTEGER_CST
12814 	    && TREE_CODE (op0) == CONSTRUCTOR
12815 	    && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12816 	  {
12817 	    tree val = get_array_ctor_element_at_index (op0,
12818 							wi::to_offset (op1));
12819 	    if (val)
12820 	      return val;
12821 	  }
12822 
12823 	return t;
12824       }
12825 
12826       /* Return a VECTOR_CST if possible.  */
12827     case CONSTRUCTOR:
12828       {
12829 	tree type = TREE_TYPE (t);
12830 	if (TREE_CODE (type) != VECTOR_TYPE)
12831 	  return t;
12832 
12833 	unsigned i;
12834 	tree val;
12835 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12836 	  if (! CONSTANT_CLASS_P (val))
12837 	    return t;
12838 
12839 	return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12840       }
12841 
12842     case CONST_DECL:
12843       return fold (DECL_INITIAL (t));
12844 
12845     default:
12846       return t;
12847     } /* switch (code) */
12848 }
12849 
12850 #ifdef ENABLE_FOLD_CHECKING
12851 #undef fold
12852 
12853 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12854 				hash_table<nofree_ptr_hash<const tree_node> > *);
12855 static void fold_check_failed (const_tree, const_tree);
12856 void print_fold_checksum (const_tree);
12857 
12858 /* When --enable-checking=fold, compute a digest of expr before
12859    and after actual fold call to see if fold did not accidentally
12860    change original expr.  */
12861 
12862 tree
fold(tree expr)12863 fold (tree expr)
12864 {
12865   tree ret;
12866   struct md5_ctx ctx;
12867   unsigned char checksum_before[16], checksum_after[16];
12868   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12869 
12870   md5_init_ctx (&ctx);
12871   fold_checksum_tree (expr, &ctx, &ht);
12872   md5_finish_ctx (&ctx, checksum_before);
12873   ht.empty ();
12874 
12875   ret = fold_1 (expr);
12876 
12877   md5_init_ctx (&ctx);
12878   fold_checksum_tree (expr, &ctx, &ht);
12879   md5_finish_ctx (&ctx, checksum_after);
12880 
12881   if (memcmp (checksum_before, checksum_after, 16))
12882     fold_check_failed (expr, ret);
12883 
12884   return ret;
12885 }
12886 
12887 void
print_fold_checksum(const_tree expr)12888 print_fold_checksum (const_tree expr)
12889 {
12890   struct md5_ctx ctx;
12891   unsigned char checksum[16], cnt;
12892   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12893 
12894   md5_init_ctx (&ctx);
12895   fold_checksum_tree (expr, &ctx, &ht);
12896   md5_finish_ctx (&ctx, checksum);
12897   for (cnt = 0; cnt < 16; ++cnt)
12898     fprintf (stderr, "%02x", checksum[cnt]);
12899   putc ('\n', stderr);
12900 }
12901 
12902 static void
fold_check_failed(const_tree expr ATTRIBUTE_UNUSED,const_tree ret ATTRIBUTE_UNUSED)12903 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12904 {
12905   internal_error ("fold check: original tree changed by fold");
12906 }
12907 
12908 static void
fold_checksum_tree(const_tree expr,struct md5_ctx * ctx,hash_table<nofree_ptr_hash<const tree_node>> * ht)12909 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12910 		    hash_table<nofree_ptr_hash <const tree_node> > *ht)
12911 {
12912   const tree_node **slot;
12913   enum tree_code code;
12914   union tree_node *buf;
12915   int i, len;
12916 
12917  recursive_label:
12918   if (expr == NULL)
12919     return;
12920   slot = ht->find_slot (expr, INSERT);
12921   if (*slot != NULL)
12922     return;
12923   *slot = expr;
12924   code = TREE_CODE (expr);
12925   if (TREE_CODE_CLASS (code) == tcc_declaration
12926       && HAS_DECL_ASSEMBLER_NAME_P (expr))
12927     {
12928       /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified.  */
12929       size_t sz = tree_size (expr);
12930       buf = XALLOCAVAR (union tree_node, sz);
12931       memcpy ((char *) buf, expr, sz);
12932       SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
12933       buf->decl_with_vis.symtab_node = NULL;
12934       buf->base.nowarning_flag = 0;
12935       expr = (tree) buf;
12936     }
12937   else if (TREE_CODE_CLASS (code) == tcc_type
12938 	   && (TYPE_POINTER_TO (expr)
12939 	       || TYPE_REFERENCE_TO (expr)
12940 	       || TYPE_CACHED_VALUES_P (expr)
12941 	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12942 	       || TYPE_NEXT_VARIANT (expr)
12943 	       || TYPE_ALIAS_SET_KNOWN_P (expr)))
12944     {
12945       /* Allow these fields to be modified.  */
12946       tree tmp;
12947       size_t sz = tree_size (expr);
12948       buf = XALLOCAVAR (union tree_node, sz);
12949       memcpy ((char *) buf, expr, sz);
12950       expr = tmp = (tree) buf;
12951       TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12952       TYPE_POINTER_TO (tmp) = NULL;
12953       TYPE_REFERENCE_TO (tmp) = NULL;
12954       TYPE_NEXT_VARIANT (tmp) = NULL;
12955       TYPE_ALIAS_SET (tmp) = -1;
12956       if (TYPE_CACHED_VALUES_P (tmp))
12957 	{
12958 	  TYPE_CACHED_VALUES_P (tmp) = 0;
12959 	  TYPE_CACHED_VALUES (tmp) = NULL;
12960 	}
12961     }
12962   else if (TREE_NO_WARNING (expr) && (DECL_P (expr) || EXPR_P (expr)))
12963     {
12964       /* Allow TREE_NO_WARNING to be set.  Perhaps we shouldn't allow that
12965 	 and change builtins.c etc. instead - see PR89543.  */
12966       size_t sz = tree_size (expr);
12967       buf = XALLOCAVAR (union tree_node, sz);
12968       memcpy ((char *) buf, expr, sz);
12969       buf->base.nowarning_flag = 0;
12970       expr = (tree) buf;
12971     }
12972   md5_process_bytes (expr, tree_size (expr), ctx);
12973   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12974     fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12975   if (TREE_CODE_CLASS (code) != tcc_type
12976       && TREE_CODE_CLASS (code) != tcc_declaration
12977       && code != TREE_LIST
12978       && code != SSA_NAME
12979       && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12980     fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12981   switch (TREE_CODE_CLASS (code))
12982     {
12983     case tcc_constant:
12984       switch (code)
12985 	{
12986 	case STRING_CST:
12987 	  md5_process_bytes (TREE_STRING_POINTER (expr),
12988 			     TREE_STRING_LENGTH (expr), ctx);
12989 	  break;
12990 	case COMPLEX_CST:
12991 	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12992 	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12993 	  break;
12994 	case VECTOR_CST:
12995 	  len = vector_cst_encoded_nelts (expr);
12996 	  for (i = 0; i < len; ++i)
12997 	    fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
12998 	  break;
12999 	default:
13000 	  break;
13001 	}
13002       break;
13003     case tcc_exceptional:
13004       switch (code)
13005 	{
13006 	case TREE_LIST:
13007 	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13008 	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13009 	  expr = TREE_CHAIN (expr);
13010 	  goto recursive_label;
13011 	  break;
13012 	case TREE_VEC:
13013 	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13014 	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13015 	  break;
13016 	default:
13017 	  break;
13018 	}
13019       break;
13020     case tcc_expression:
13021     case tcc_reference:
13022     case tcc_comparison:
13023     case tcc_unary:
13024     case tcc_binary:
13025     case tcc_statement:
13026     case tcc_vl_exp:
13027       len = TREE_OPERAND_LENGTH (expr);
13028       for (i = 0; i < len; ++i)
13029 	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13030       break;
13031     case tcc_declaration:
13032       fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13033       fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13034       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13035 	{
13036 	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13037 	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13038 	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13039 	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13040 	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13041 	}
13042 
13043       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13044 	{
13045 	  if (TREE_CODE (expr) == FUNCTION_DECL)
13046 	    {
13047 	      fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13048 	      fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13049 	    }
13050 	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13051 	}
13052       break;
13053     case tcc_type:
13054       if (TREE_CODE (expr) == ENUMERAL_TYPE)
13055         fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13056       fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13057       fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13058       fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13059       fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13060       if (INTEGRAL_TYPE_P (expr)
13061           || SCALAR_FLOAT_TYPE_P (expr))
13062 	{
13063 	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13064 	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13065 	}
13066       fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13067       if (TREE_CODE (expr) == RECORD_TYPE
13068 	  || TREE_CODE (expr) == UNION_TYPE
13069 	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
13070 	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13071       fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13072       break;
13073     default:
13074       break;
13075     }
13076 }
13077 
13078 /* Helper function for outputting the checksum of a tree T.  When
13079    debugging with gdb, you can "define mynext" to be "next" followed
13080    by "call debug_fold_checksum (op0)", then just trace down till the
13081    outputs differ.  */
13082 
13083 DEBUG_FUNCTION void
debug_fold_checksum(const_tree t)13084 debug_fold_checksum (const_tree t)
13085 {
13086   int i;
13087   unsigned char checksum[16];
13088   struct md5_ctx ctx;
13089   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13090 
13091   md5_init_ctx (&ctx);
13092   fold_checksum_tree (t, &ctx, &ht);
13093   md5_finish_ctx (&ctx, checksum);
13094   ht.empty ();
13095 
13096   for (i = 0; i < 16; i++)
13097     fprintf (stderr, "%d ", checksum[i]);
13098 
13099   fprintf (stderr, "\n");
13100 }
13101 
13102 #endif
13103 
13104 /* Fold a unary tree expression with code CODE of type TYPE with an
13105    operand OP0.  LOC is the location of the resulting expression.
13106    Return a folded expression if successful.  Otherwise, return a tree
13107    expression with code CODE of type TYPE with an operand OP0.  */
13108 
13109 tree
fold_build1_loc(location_t loc,enum tree_code code,tree type,tree op0 MEM_STAT_DECL)13110 fold_build1_loc (location_t loc,
13111 		 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13112 {
13113   tree tem;
13114 #ifdef ENABLE_FOLD_CHECKING
13115   unsigned char checksum_before[16], checksum_after[16];
13116   struct md5_ctx ctx;
13117   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13118 
13119   md5_init_ctx (&ctx);
13120   fold_checksum_tree (op0, &ctx, &ht);
13121   md5_finish_ctx (&ctx, checksum_before);
13122   ht.empty ();
13123 #endif
13124 
13125   tem = fold_unary_loc (loc, code, type, op0);
13126   if (!tem)
13127     tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13128 
13129 #ifdef ENABLE_FOLD_CHECKING
13130   md5_init_ctx (&ctx);
13131   fold_checksum_tree (op0, &ctx, &ht);
13132   md5_finish_ctx (&ctx, checksum_after);
13133 
13134   if (memcmp (checksum_before, checksum_after, 16))
13135     fold_check_failed (op0, tem);
13136 #endif
13137   return tem;
13138 }
13139 
13140 /* Fold a binary tree expression with code CODE of type TYPE with
13141    operands OP0 and OP1.  LOC is the location of the resulting
13142    expression.  Return a folded expression if successful.  Otherwise,
13143    return a tree expression with code CODE of type TYPE with operands
13144    OP0 and OP1.  */
13145 
13146 tree
fold_build2_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1 MEM_STAT_DECL)13147 fold_build2_loc (location_t loc,
13148 		      enum tree_code code, tree type, tree op0, tree op1
13149 		      MEM_STAT_DECL)
13150 {
13151   tree tem;
13152 #ifdef ENABLE_FOLD_CHECKING
13153   unsigned char checksum_before_op0[16],
13154                 checksum_before_op1[16],
13155 		checksum_after_op0[16],
13156 		checksum_after_op1[16];
13157   struct md5_ctx ctx;
13158   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13159 
13160   md5_init_ctx (&ctx);
13161   fold_checksum_tree (op0, &ctx, &ht);
13162   md5_finish_ctx (&ctx, checksum_before_op0);
13163   ht.empty ();
13164 
13165   md5_init_ctx (&ctx);
13166   fold_checksum_tree (op1, &ctx, &ht);
13167   md5_finish_ctx (&ctx, checksum_before_op1);
13168   ht.empty ();
13169 #endif
13170 
13171   tem = fold_binary_loc (loc, code, type, op0, op1);
13172   if (!tem)
13173     tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13174 
13175 #ifdef ENABLE_FOLD_CHECKING
13176   md5_init_ctx (&ctx);
13177   fold_checksum_tree (op0, &ctx, &ht);
13178   md5_finish_ctx (&ctx, checksum_after_op0);
13179   ht.empty ();
13180 
13181   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13182     fold_check_failed (op0, tem);
13183 
13184   md5_init_ctx (&ctx);
13185   fold_checksum_tree (op1, &ctx, &ht);
13186   md5_finish_ctx (&ctx, checksum_after_op1);
13187 
13188   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13189     fold_check_failed (op1, tem);
13190 #endif
13191   return tem;
13192 }
13193 
13194 /* Fold a ternary tree expression with code CODE of type TYPE with
13195    operands OP0, OP1, and OP2.  Return a folded expression if
13196    successful.  Otherwise, return a tree expression with code CODE of
13197    type TYPE with operands OP0, OP1, and OP2.  */
13198 
13199 tree
fold_build3_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2 MEM_STAT_DECL)13200 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13201 		      tree op0, tree op1, tree op2 MEM_STAT_DECL)
13202 {
13203   tree tem;
13204 #ifdef ENABLE_FOLD_CHECKING
13205   unsigned char checksum_before_op0[16],
13206                 checksum_before_op1[16],
13207                 checksum_before_op2[16],
13208 		checksum_after_op0[16],
13209 		checksum_after_op1[16],
13210 		checksum_after_op2[16];
13211   struct md5_ctx ctx;
13212   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13213 
13214   md5_init_ctx (&ctx);
13215   fold_checksum_tree (op0, &ctx, &ht);
13216   md5_finish_ctx (&ctx, checksum_before_op0);
13217   ht.empty ();
13218 
13219   md5_init_ctx (&ctx);
13220   fold_checksum_tree (op1, &ctx, &ht);
13221   md5_finish_ctx (&ctx, checksum_before_op1);
13222   ht.empty ();
13223 
13224   md5_init_ctx (&ctx);
13225   fold_checksum_tree (op2, &ctx, &ht);
13226   md5_finish_ctx (&ctx, checksum_before_op2);
13227   ht.empty ();
13228 #endif
13229 
13230   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13231   tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13232   if (!tem)
13233     tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13234 
13235 #ifdef ENABLE_FOLD_CHECKING
13236   md5_init_ctx (&ctx);
13237   fold_checksum_tree (op0, &ctx, &ht);
13238   md5_finish_ctx (&ctx, checksum_after_op0);
13239   ht.empty ();
13240 
13241   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13242     fold_check_failed (op0, tem);
13243 
13244   md5_init_ctx (&ctx);
13245   fold_checksum_tree (op1, &ctx, &ht);
13246   md5_finish_ctx (&ctx, checksum_after_op1);
13247   ht.empty ();
13248 
13249   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13250     fold_check_failed (op1, tem);
13251 
13252   md5_init_ctx (&ctx);
13253   fold_checksum_tree (op2, &ctx, &ht);
13254   md5_finish_ctx (&ctx, checksum_after_op2);
13255 
13256   if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13257     fold_check_failed (op2, tem);
13258 #endif
13259   return tem;
13260 }
13261 
13262 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13263    arguments in ARGARRAY, and a null static chain.
13264    Return a folded expression if successful.  Otherwise, return a CALL_EXPR
13265    of type TYPE from the given operands as constructed by build_call_array.  */
13266 
13267 tree
fold_build_call_array_loc(location_t loc,tree type,tree fn,int nargs,tree * argarray)13268 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13269 			   int nargs, tree *argarray)
13270 {
13271   tree tem;
13272 #ifdef ENABLE_FOLD_CHECKING
13273   unsigned char checksum_before_fn[16],
13274                 checksum_before_arglist[16],
13275 		checksum_after_fn[16],
13276 		checksum_after_arglist[16];
13277   struct md5_ctx ctx;
13278   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13279   int i;
13280 
13281   md5_init_ctx (&ctx);
13282   fold_checksum_tree (fn, &ctx, &ht);
13283   md5_finish_ctx (&ctx, checksum_before_fn);
13284   ht.empty ();
13285 
13286   md5_init_ctx (&ctx);
13287   for (i = 0; i < nargs; i++)
13288     fold_checksum_tree (argarray[i], &ctx, &ht);
13289   md5_finish_ctx (&ctx, checksum_before_arglist);
13290   ht.empty ();
13291 #endif
13292 
13293   tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13294   if (!tem)
13295     tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13296 
13297 #ifdef ENABLE_FOLD_CHECKING
13298   md5_init_ctx (&ctx);
13299   fold_checksum_tree (fn, &ctx, &ht);
13300   md5_finish_ctx (&ctx, checksum_after_fn);
13301   ht.empty ();
13302 
13303   if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13304     fold_check_failed (fn, tem);
13305 
13306   md5_init_ctx (&ctx);
13307   for (i = 0; i < nargs; i++)
13308     fold_checksum_tree (argarray[i], &ctx, &ht);
13309   md5_finish_ctx (&ctx, checksum_after_arglist);
13310 
13311   if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13312     fold_check_failed (NULL_TREE, tem);
13313 #endif
13314   return tem;
13315 }
13316 
13317 /* Perform constant folding and related simplification of initializer
13318    expression EXPR.  These behave identically to "fold_buildN" but ignore
13319    potential run-time traps and exceptions that fold must preserve.  */
13320 
13321 #define START_FOLD_INIT \
13322   int saved_signaling_nans = flag_signaling_nans;\
13323   int saved_trapping_math = flag_trapping_math;\
13324   int saved_rounding_math = flag_rounding_math;\
13325   int saved_trapv = flag_trapv;\
13326   int saved_folding_initializer = folding_initializer;\
13327   flag_signaling_nans = 0;\
13328   flag_trapping_math = 0;\
13329   flag_rounding_math = 0;\
13330   flag_trapv = 0;\
13331   folding_initializer = 1;
13332 
13333 #define END_FOLD_INIT \
13334   flag_signaling_nans = saved_signaling_nans;\
13335   flag_trapping_math = saved_trapping_math;\
13336   flag_rounding_math = saved_rounding_math;\
13337   flag_trapv = saved_trapv;\
13338   folding_initializer = saved_folding_initializer;
13339 
13340 tree
fold_build1_initializer_loc(location_t loc,enum tree_code code,tree type,tree op)13341 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13342 			     tree type, tree op)
13343 {
13344   tree result;
13345   START_FOLD_INIT;
13346 
13347   result = fold_build1_loc (loc, code, type, op);
13348 
13349   END_FOLD_INIT;
13350   return result;
13351 }
13352 
13353 tree
fold_build2_initializer_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1)13354 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13355 			     tree type, tree op0, tree op1)
13356 {
13357   tree result;
13358   START_FOLD_INIT;
13359 
13360   result = fold_build2_loc (loc, code, type, op0, op1);
13361 
13362   END_FOLD_INIT;
13363   return result;
13364 }
13365 
13366 tree
fold_build_call_array_initializer_loc(location_t loc,tree type,tree fn,int nargs,tree * argarray)13367 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13368 				       int nargs, tree *argarray)
13369 {
13370   tree result;
13371   START_FOLD_INIT;
13372 
13373   result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13374 
13375   END_FOLD_INIT;
13376   return result;
13377 }
13378 
13379 #undef START_FOLD_INIT
13380 #undef END_FOLD_INIT
13381 
13382 /* Determine if first argument is a multiple of second argument.  Return 0 if
13383    it is not, or we cannot easily determined it to be.
13384 
13385    An example of the sort of thing we care about (at this point; this routine
13386    could surely be made more general, and expanded to do what the *_DIV_EXPR's
13387    fold cases do now) is discovering that
13388 
13389      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13390 
13391    is a multiple of
13392 
13393      SAVE_EXPR (J * 8)
13394 
13395    when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13396 
13397    This code also handles discovering that
13398 
13399      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13400 
13401    is a multiple of 8 so we don't have to worry about dealing with a
13402    possible remainder.
13403 
13404    Note that we *look* inside a SAVE_EXPR only to determine how it was
13405    calculated; it is not safe for fold to do much of anything else with the
13406    internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13407    at run time.  For example, the latter example above *cannot* be implemented
13408    as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13409    evaluation time of the original SAVE_EXPR is not necessarily the same at
13410    the time the new expression is evaluated.  The only optimization of this
13411    sort that would be valid is changing
13412 
13413      SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13414 
13415    divided by 8 to
13416 
13417      SAVE_EXPR (I) * SAVE_EXPR (J)
13418 
13419    (where the same SAVE_EXPR (J) is used in the original and the
13420    transformed version).  */
13421 
13422 int
multiple_of_p(tree type,const_tree top,const_tree bottom)13423 multiple_of_p (tree type, const_tree top, const_tree bottom)
13424 {
13425   gimple *stmt;
13426   tree t1, op1, op2;
13427 
13428   if (operand_equal_p (top, bottom, 0))
13429     return 1;
13430 
13431   if (TREE_CODE (type) != INTEGER_TYPE)
13432     return 0;
13433 
13434   switch (TREE_CODE (top))
13435     {
13436     case BIT_AND_EXPR:
13437       /* Bitwise and provides a power of two multiple.  If the mask is
13438 	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
13439       if (!integer_pow2p (bottom))
13440 	return 0;
13441       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13442 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13443 
13444     case MULT_EXPR:
13445       if (TREE_CODE (bottom) == INTEGER_CST)
13446 	{
13447 	  op1 = TREE_OPERAND (top, 0);
13448 	  op2 = TREE_OPERAND (top, 1);
13449 	  if (TREE_CODE (op1) == INTEGER_CST)
13450 	    std::swap (op1, op2);
13451 	  if (TREE_CODE (op2) == INTEGER_CST)
13452 	    {
13453 	      if (multiple_of_p (type, op2, bottom))
13454 		return 1;
13455 	      /* Handle multiple_of_p ((x * 2 + 2) * 4, 8).  */
13456 	      if (multiple_of_p (type, bottom, op2))
13457 		{
13458 		  widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
13459 						 wi::to_widest (op2));
13460 		  if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
13461 		    {
13462 		      op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
13463 		      return multiple_of_p (type, op1, op2);
13464 		    }
13465 		}
13466 	      return multiple_of_p (type, op1, bottom);
13467 	    }
13468 	}
13469       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13470 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13471 
13472     case MINUS_EXPR:
13473       /* It is impossible to prove if op0 - op1 is multiple of bottom
13474 	 precisely, so be conservative here checking if both op0 and op1
13475 	 are multiple of bottom.  Note we check the second operand first
13476 	 since it's usually simpler.  */
13477       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13478 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13479 
13480     case PLUS_EXPR:
13481       /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
13482 	 as op0 - 3 if the expression has unsigned type.  For example,
13483 	 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not.  */
13484       op1 = TREE_OPERAND (top, 1);
13485       if (TYPE_UNSIGNED (type)
13486 	  && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
13487 	op1 = fold_build1 (NEGATE_EXPR, type, op1);
13488       return (multiple_of_p (type, op1, bottom)
13489 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
13490 
13491     case LSHIFT_EXPR:
13492       if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13493 	{
13494 	  op1 = TREE_OPERAND (top, 1);
13495 	  /* const_binop may not detect overflow correctly,
13496 	     so check for it explicitly here.  */
13497 	  if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
13498 			 wi::to_wide (op1))
13499 	      && (t1 = fold_convert (type,
13500 				     const_binop (LSHIFT_EXPR, size_one_node,
13501 						  op1))) != 0
13502 	      && !TREE_OVERFLOW (t1))
13503 	    return multiple_of_p (type, t1, bottom);
13504 	}
13505       return 0;
13506 
13507     case NOP_EXPR:
13508       /* Can't handle conversions from non-integral or wider integral type.  */
13509       if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13510 	  || (TYPE_PRECISION (type)
13511 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13512 	return 0;
13513 
13514       /* fall through */
13515 
13516     case SAVE_EXPR:
13517       return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13518 
13519     case COND_EXPR:
13520       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
13521 	      && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
13522 
13523     case INTEGER_CST:
13524       if (TREE_CODE (bottom) != INTEGER_CST
13525 	  || integer_zerop (bottom)
13526 	  || (TYPE_UNSIGNED (type)
13527 	      && (tree_int_cst_sgn (top) < 0
13528 		  || tree_int_cst_sgn (bottom) < 0)))
13529 	return 0;
13530       return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
13531 				SIGNED);
13532 
13533     case SSA_NAME:
13534       if (TREE_CODE (bottom) == INTEGER_CST
13535 	  && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
13536 	  && gimple_code (stmt) == GIMPLE_ASSIGN)
13537 	{
13538 	  enum tree_code code = gimple_assign_rhs_code (stmt);
13539 
13540 	  /* Check for special cases to see if top is defined as multiple
13541 	     of bottom:
13542 
13543 	       top = (X & ~(bottom - 1) ; bottom is power of 2
13544 
13545 	     or
13546 
13547 	       Y = X % bottom
13548 	       top = X - Y.  */
13549 	  if (code == BIT_AND_EXPR
13550 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13551 	      && TREE_CODE (op2) == INTEGER_CST
13552 	      && integer_pow2p (bottom)
13553 	      && wi::multiple_of_p (wi::to_widest (op2),
13554 				    wi::to_widest (bottom), UNSIGNED))
13555 	    return 1;
13556 
13557 	  op1 = gimple_assign_rhs1 (stmt);
13558 	  if (code == MINUS_EXPR
13559 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
13560 	      && TREE_CODE (op2) == SSA_NAME
13561 	      && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
13562 	      && gimple_code (stmt) == GIMPLE_ASSIGN
13563 	      && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
13564 	      && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
13565 	      && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
13566 	    return 1;
13567 	}
13568 
13569       /* fall through */
13570 
13571     default:
13572       if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
13573 	return multiple_p (wi::to_poly_widest (top),
13574 			   wi::to_poly_widest (bottom));
13575 
13576       return 0;
13577     }
13578 }
13579 
13580 #define tree_expr_nonnegative_warnv_p(X, Y) \
13581   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13582 
13583 #define RECURSE(X) \
13584   ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
13585 
13586 /* Return true if CODE or TYPE is known to be non-negative. */
13587 
13588 static bool
tree_simple_nonnegative_warnv_p(enum tree_code code,tree type)13589 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13590 {
13591   if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13592       && truth_value_p (code))
13593     /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13594        have a signed:1 type (where the value is -1 and 0).  */
13595     return true;
13596   return false;
13597 }
13598 
13599 /* Return true if (CODE OP0) is known to be non-negative.  If the return
13600    value is based on the assumption that signed overflow is undefined,
13601    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13602    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13603 
13604 bool
tree_unary_nonnegative_warnv_p(enum tree_code code,tree type,tree op0,bool * strict_overflow_p,int depth)13605 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13606 				bool *strict_overflow_p, int depth)
13607 {
13608   if (TYPE_UNSIGNED (type))
13609     return true;
13610 
13611   switch (code)
13612     {
13613     case ABS_EXPR:
13614       /* We can't return 1 if flag_wrapv is set because
13615 	 ABS_EXPR<INT_MIN> = INT_MIN.  */
13616       if (!ANY_INTEGRAL_TYPE_P (type))
13617 	return true;
13618       if (TYPE_OVERFLOW_UNDEFINED (type))
13619 	{
13620 	  *strict_overflow_p = true;
13621 	  return true;
13622 	}
13623       break;
13624 
13625     case NON_LVALUE_EXPR:
13626     case FLOAT_EXPR:
13627     case FIX_TRUNC_EXPR:
13628       return RECURSE (op0);
13629 
13630     CASE_CONVERT:
13631       {
13632 	tree inner_type = TREE_TYPE (op0);
13633 	tree outer_type = type;
13634 
13635 	if (TREE_CODE (outer_type) == REAL_TYPE)
13636 	  {
13637 	    if (TREE_CODE (inner_type) == REAL_TYPE)
13638 	      return RECURSE (op0);
13639 	    if (INTEGRAL_TYPE_P (inner_type))
13640 	      {
13641 		if (TYPE_UNSIGNED (inner_type))
13642 		  return true;
13643 		return RECURSE (op0);
13644 	      }
13645 	  }
13646 	else if (INTEGRAL_TYPE_P (outer_type))
13647 	  {
13648 	    if (TREE_CODE (inner_type) == REAL_TYPE)
13649 	      return RECURSE (op0);
13650 	    if (INTEGRAL_TYPE_P (inner_type))
13651 	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13652 		      && TYPE_UNSIGNED (inner_type);
13653 	  }
13654       }
13655       break;
13656 
13657     default:
13658       return tree_simple_nonnegative_warnv_p (code, type);
13659     }
13660 
13661   /* We don't know sign of `t', so be conservative and return false.  */
13662   return false;
13663 }
13664 
13665 /* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
13666    value is based on the assumption that signed overflow is undefined,
13667    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13668    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13669 
13670 bool
tree_binary_nonnegative_warnv_p(enum tree_code code,tree type,tree op0,tree op1,bool * strict_overflow_p,int depth)13671 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13672 				 tree op1, bool *strict_overflow_p,
13673 				 int depth)
13674 {
13675   if (TYPE_UNSIGNED (type))
13676     return true;
13677 
13678   switch (code)
13679     {
13680     case POINTER_PLUS_EXPR:
13681     case PLUS_EXPR:
13682       if (FLOAT_TYPE_P (type))
13683 	return RECURSE (op0) && RECURSE (op1);
13684 
13685       /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13686 	 both unsigned and at least 2 bits shorter than the result.  */
13687       if (TREE_CODE (type) == INTEGER_TYPE
13688 	  && TREE_CODE (op0) == NOP_EXPR
13689 	  && TREE_CODE (op1) == NOP_EXPR)
13690 	{
13691 	  tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
13692 	  tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
13693 	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
13694 	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
13695 	    {
13696 	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
13697 				       TYPE_PRECISION (inner2)) + 1;
13698 	      return prec < TYPE_PRECISION (type);
13699 	    }
13700 	}
13701       break;
13702 
13703     case MULT_EXPR:
13704       if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
13705 	{
13706 	  /* x * x is always non-negative for floating point x
13707 	     or without overflow.  */
13708 	  if (operand_equal_p (op0, op1, 0)
13709 	      || (RECURSE (op0) && RECURSE (op1)))
13710 	    {
13711 	      if (ANY_INTEGRAL_TYPE_P (type)
13712 		  && TYPE_OVERFLOW_UNDEFINED (type))
13713 		*strict_overflow_p = true;
13714 	      return true;
13715 	    }
13716 	}
13717 
13718       /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
13719 	 both unsigned and their total bits is shorter than the result.  */
13720       if (TREE_CODE (type) == INTEGER_TYPE
13721 	  && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
13722 	  && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
13723 	{
13724 	  tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
13725 	    ? TREE_TYPE (TREE_OPERAND (op0, 0))
13726 	    : TREE_TYPE (op0);
13727 	  tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
13728 	    ? TREE_TYPE (TREE_OPERAND (op1, 0))
13729 	    : TREE_TYPE (op1);
13730 
13731 	  bool unsigned0 = TYPE_UNSIGNED (inner0);
13732 	  bool unsigned1 = TYPE_UNSIGNED (inner1);
13733 
13734 	  if (TREE_CODE (op0) == INTEGER_CST)
13735 	    unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
13736 
13737 	  if (TREE_CODE (op1) == INTEGER_CST)
13738 	    unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
13739 
13740 	  if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
13741 	      && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
13742 	    {
13743 	      unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
13744 		? tree_int_cst_min_precision (op0, UNSIGNED)
13745 		: TYPE_PRECISION (inner0);
13746 
13747 	      unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13748 		? tree_int_cst_min_precision (op1, UNSIGNED)
13749 		: TYPE_PRECISION (inner1);
13750 
13751 	      return precision0 + precision1 < TYPE_PRECISION (type);
13752 	    }
13753 	}
13754       return false;
13755 
13756     case BIT_AND_EXPR:
13757     case MAX_EXPR:
13758       return RECURSE (op0) || RECURSE (op1);
13759 
13760     case BIT_IOR_EXPR:
13761     case BIT_XOR_EXPR:
13762     case MIN_EXPR:
13763     case RDIV_EXPR:
13764     case TRUNC_DIV_EXPR:
13765     case CEIL_DIV_EXPR:
13766     case FLOOR_DIV_EXPR:
13767     case ROUND_DIV_EXPR:
13768       return RECURSE (op0) && RECURSE (op1);
13769 
13770     case TRUNC_MOD_EXPR:
13771       return RECURSE (op0);
13772 
13773     case FLOOR_MOD_EXPR:
13774       return RECURSE (op1);
13775 
13776     case CEIL_MOD_EXPR:
13777     case ROUND_MOD_EXPR:
13778     default:
13779       return tree_simple_nonnegative_warnv_p (code, type);
13780     }
13781 
13782   /* We don't know sign of `t', so be conservative and return false.  */
13783   return false;
13784 }
13785 
13786 /* Return true if T is known to be non-negative.  If the return
13787    value is based on the assumption that signed overflow is undefined,
13788    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13789    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13790 
13791 bool
tree_single_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)13792 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13793 {
13794   if (TYPE_UNSIGNED (TREE_TYPE (t)))
13795     return true;
13796 
13797   switch (TREE_CODE (t))
13798     {
13799     case INTEGER_CST:
13800       return tree_int_cst_sgn (t) >= 0;
13801 
13802     case REAL_CST:
13803       return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13804 
13805     case FIXED_CST:
13806       return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13807 
13808     case COND_EXPR:
13809       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13810 
13811     case SSA_NAME:
13812       /* Limit the depth of recursion to avoid quadratic behavior.
13813 	 This is expected to catch almost all occurrences in practice.
13814 	 If this code misses important cases that unbounded recursion
13815 	 would not, passes that need this information could be revised
13816 	 to provide it through dataflow propagation.  */
13817       return (!name_registered_for_update_p (t)
13818 	      && depth < param_max_ssa_name_query_depth
13819 	      && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13820 						  strict_overflow_p, depth));
13821 
13822     default:
13823       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13824     }
13825 }
13826 
13827 /* Return true if T is known to be non-negative.  If the return
13828    value is based on the assumption that signed overflow is undefined,
13829    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13830    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13831 
13832 bool
tree_call_nonnegative_warnv_p(tree type,combined_fn fn,tree arg0,tree arg1,bool * strict_overflow_p,int depth)13833 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13834 			       bool *strict_overflow_p, int depth)
13835 {
13836   switch (fn)
13837     {
13838     CASE_CFN_ACOS:
13839     CASE_CFN_ACOSH:
13840     CASE_CFN_CABS:
13841     CASE_CFN_COSH:
13842     CASE_CFN_ERFC:
13843     CASE_CFN_EXP:
13844     CASE_CFN_EXP10:
13845     CASE_CFN_EXP2:
13846     CASE_CFN_FABS:
13847     CASE_CFN_FDIM:
13848     CASE_CFN_HYPOT:
13849     CASE_CFN_POW10:
13850     CASE_CFN_FFS:
13851     CASE_CFN_PARITY:
13852     CASE_CFN_POPCOUNT:
13853     CASE_CFN_CLZ:
13854     CASE_CFN_CLRSB:
13855     case CFN_BUILT_IN_BSWAP32:
13856     case CFN_BUILT_IN_BSWAP64:
13857       /* Always true.  */
13858       return true;
13859 
13860     CASE_CFN_SQRT:
13861     CASE_CFN_SQRT_FN:
13862       /* sqrt(-0.0) is -0.0.  */
13863       if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13864 	return true;
13865       return RECURSE (arg0);
13866 
13867     CASE_CFN_ASINH:
13868     CASE_CFN_ATAN:
13869     CASE_CFN_ATANH:
13870     CASE_CFN_CBRT:
13871     CASE_CFN_CEIL:
13872     CASE_CFN_CEIL_FN:
13873     CASE_CFN_ERF:
13874     CASE_CFN_EXPM1:
13875     CASE_CFN_FLOOR:
13876     CASE_CFN_FLOOR_FN:
13877     CASE_CFN_FMOD:
13878     CASE_CFN_FREXP:
13879     CASE_CFN_ICEIL:
13880     CASE_CFN_IFLOOR:
13881     CASE_CFN_IRINT:
13882     CASE_CFN_IROUND:
13883     CASE_CFN_LCEIL:
13884     CASE_CFN_LDEXP:
13885     CASE_CFN_LFLOOR:
13886     CASE_CFN_LLCEIL:
13887     CASE_CFN_LLFLOOR:
13888     CASE_CFN_LLRINT:
13889     CASE_CFN_LLROUND:
13890     CASE_CFN_LRINT:
13891     CASE_CFN_LROUND:
13892     CASE_CFN_MODF:
13893     CASE_CFN_NEARBYINT:
13894     CASE_CFN_NEARBYINT_FN:
13895     CASE_CFN_RINT:
13896     CASE_CFN_RINT_FN:
13897     CASE_CFN_ROUND:
13898     CASE_CFN_ROUND_FN:
13899     CASE_CFN_ROUNDEVEN:
13900     CASE_CFN_ROUNDEVEN_FN:
13901     CASE_CFN_SCALB:
13902     CASE_CFN_SCALBLN:
13903     CASE_CFN_SCALBN:
13904     CASE_CFN_SIGNBIT:
13905     CASE_CFN_SIGNIFICAND:
13906     CASE_CFN_SINH:
13907     CASE_CFN_TANH:
13908     CASE_CFN_TRUNC:
13909     CASE_CFN_TRUNC_FN:
13910       /* True if the 1st argument is nonnegative.  */
13911       return RECURSE (arg0);
13912 
13913     CASE_CFN_FMAX:
13914     CASE_CFN_FMAX_FN:
13915       /* True if the 1st OR 2nd arguments are nonnegative.  */
13916       return RECURSE (arg0) || RECURSE (arg1);
13917 
13918     CASE_CFN_FMIN:
13919     CASE_CFN_FMIN_FN:
13920       /* True if the 1st AND 2nd arguments are nonnegative.  */
13921       return RECURSE (arg0) && RECURSE (arg1);
13922 
13923     CASE_CFN_COPYSIGN:
13924     CASE_CFN_COPYSIGN_FN:
13925       /* True if the 2nd argument is nonnegative.  */
13926       return RECURSE (arg1);
13927 
13928     CASE_CFN_POWI:
13929       /* True if the 1st argument is nonnegative or the second
13930 	 argument is an even integer.  */
13931       if (TREE_CODE (arg1) == INTEGER_CST
13932 	  && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13933 	return true;
13934       return RECURSE (arg0);
13935 
13936     CASE_CFN_POW:
13937       /* True if the 1st argument is nonnegative or the second
13938 	 argument is an even integer valued real.  */
13939       if (TREE_CODE (arg1) == REAL_CST)
13940 	{
13941 	  REAL_VALUE_TYPE c;
13942 	  HOST_WIDE_INT n;
13943 
13944 	  c = TREE_REAL_CST (arg1);
13945 	  n = real_to_integer (&c);
13946 	  if ((n & 1) == 0)
13947 	    {
13948 	      REAL_VALUE_TYPE cint;
13949 	      real_from_integer (&cint, VOIDmode, n, SIGNED);
13950 	      if (real_identical (&c, &cint))
13951 		return true;
13952 	    }
13953 	}
13954       return RECURSE (arg0);
13955 
13956     default:
13957       break;
13958     }
13959   return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13960 }
13961 
13962 /* Return true if T is known to be non-negative.  If the return
13963    value is based on the assumption that signed overflow is undefined,
13964    set *STRICT_OVERFLOW_P to true; otherwise, don't change
13965    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
13966 
13967 static bool
tree_invalid_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)13968 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13969 {
13970   enum tree_code code = TREE_CODE (t);
13971   if (TYPE_UNSIGNED (TREE_TYPE (t)))
13972     return true;
13973 
13974   switch (code)
13975     {
13976     case TARGET_EXPR:
13977       {
13978 	tree temp = TARGET_EXPR_SLOT (t);
13979 	t = TARGET_EXPR_INITIAL (t);
13980 
13981 	/* If the initializer is non-void, then it's a normal expression
13982 	   that will be assigned to the slot.  */
13983 	if (!VOID_TYPE_P (t))
13984 	  return RECURSE (t);
13985 
13986 	/* Otherwise, the initializer sets the slot in some way.  One common
13987 	   way is an assignment statement at the end of the initializer.  */
13988 	while (1)
13989 	  {
13990 	    if (TREE_CODE (t) == BIND_EXPR)
13991 	      t = expr_last (BIND_EXPR_BODY (t));
13992 	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13993 		     || TREE_CODE (t) == TRY_CATCH_EXPR)
13994 	      t = expr_last (TREE_OPERAND (t, 0));
13995 	    else if (TREE_CODE (t) == STATEMENT_LIST)
13996 	      t = expr_last (t);
13997 	    else
13998 	      break;
13999 	  }
14000 	if (TREE_CODE (t) == MODIFY_EXPR
14001 	    && TREE_OPERAND (t, 0) == temp)
14002 	  return RECURSE (TREE_OPERAND (t, 1));
14003 
14004 	return false;
14005       }
14006 
14007     case CALL_EXPR:
14008       {
14009 	tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
14010 	tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
14011 
14012 	return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14013 					      get_call_combined_fn (t),
14014 					      arg0,
14015 					      arg1,
14016 					      strict_overflow_p, depth);
14017       }
14018     case COMPOUND_EXPR:
14019     case MODIFY_EXPR:
14020       return RECURSE (TREE_OPERAND (t, 1));
14021 
14022     case BIND_EXPR:
14023       return RECURSE (expr_last (TREE_OPERAND (t, 1)));
14024 
14025     case SAVE_EXPR:
14026       return RECURSE (TREE_OPERAND (t, 0));
14027 
14028     default:
14029       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14030     }
14031 }
14032 
14033 #undef RECURSE
14034 #undef tree_expr_nonnegative_warnv_p
14035 
14036 /* Return true if T is known to be non-negative.  If the return
14037    value is based on the assumption that signed overflow is undefined,
14038    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14039    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
14040 
14041 bool
tree_expr_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)14042 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14043 {
14044   enum tree_code code;
14045   if (t == error_mark_node)
14046     return false;
14047 
14048   code = TREE_CODE (t);
14049   switch (TREE_CODE_CLASS (code))
14050     {
14051     case tcc_binary:
14052     case tcc_comparison:
14053       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14054 					      TREE_TYPE (t),
14055 					      TREE_OPERAND (t, 0),
14056 					      TREE_OPERAND (t, 1),
14057 					      strict_overflow_p, depth);
14058 
14059     case tcc_unary:
14060       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14061 					     TREE_TYPE (t),
14062 					     TREE_OPERAND (t, 0),
14063 					     strict_overflow_p, depth);
14064 
14065     case tcc_constant:
14066     case tcc_declaration:
14067     case tcc_reference:
14068       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14069 
14070     default:
14071       break;
14072     }
14073 
14074   switch (code)
14075     {
14076     case TRUTH_AND_EXPR:
14077     case TRUTH_OR_EXPR:
14078     case TRUTH_XOR_EXPR:
14079       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14080 					      TREE_TYPE (t),
14081 					      TREE_OPERAND (t, 0),
14082 					      TREE_OPERAND (t, 1),
14083 					      strict_overflow_p, depth);
14084     case TRUTH_NOT_EXPR:
14085       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14086 					     TREE_TYPE (t),
14087 					     TREE_OPERAND (t, 0),
14088 					     strict_overflow_p, depth);
14089 
14090     case COND_EXPR:
14091     case CONSTRUCTOR:
14092     case OBJ_TYPE_REF:
14093     case ASSERT_EXPR:
14094     case ADDR_EXPR:
14095     case WITH_SIZE_EXPR:
14096     case SSA_NAME:
14097       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14098 
14099     default:
14100       return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
14101     }
14102 }
14103 
14104 /* Return true if `t' is known to be non-negative.  Handle warnings
14105    about undefined signed overflow.  */
14106 
14107 bool
tree_expr_nonnegative_p(tree t)14108 tree_expr_nonnegative_p (tree t)
14109 {
14110   bool ret, strict_overflow_p;
14111 
14112   strict_overflow_p = false;
14113   ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14114   if (strict_overflow_p)
14115     fold_overflow_warning (("assuming signed overflow does not occur when "
14116 			    "determining that expression is always "
14117 			    "non-negative"),
14118 			   WARN_STRICT_OVERFLOW_MISC);
14119   return ret;
14120 }
14121 
14122 
14123 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14124    For floating point we further ensure that T is not denormal.
14125    Similar logic is present in nonzero_address in rtlanal.h.
14126 
14127    If the return value is based on the assumption that signed overflow
14128    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14129    change *STRICT_OVERFLOW_P.  */
14130 
14131 bool
tree_unary_nonzero_warnv_p(enum tree_code code,tree type,tree op0,bool * strict_overflow_p)14132 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14133 				 bool *strict_overflow_p)
14134 {
14135   switch (code)
14136     {
14137     case ABS_EXPR:
14138       return tree_expr_nonzero_warnv_p (op0,
14139 					strict_overflow_p);
14140 
14141     case NOP_EXPR:
14142       {
14143 	tree inner_type = TREE_TYPE (op0);
14144 	tree outer_type = type;
14145 
14146 	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14147 		&& tree_expr_nonzero_warnv_p (op0,
14148 					      strict_overflow_p));
14149       }
14150       break;
14151 
14152     case NON_LVALUE_EXPR:
14153       return tree_expr_nonzero_warnv_p (op0,
14154 					strict_overflow_p);
14155 
14156     default:
14157       break;
14158   }
14159 
14160   return false;
14161 }
14162 
14163 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14164    For floating point we further ensure that T is not denormal.
14165    Similar logic is present in nonzero_address in rtlanal.h.
14166 
14167    If the return value is based on the assumption that signed overflow
14168    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14169    change *STRICT_OVERFLOW_P.  */
14170 
14171 bool
tree_binary_nonzero_warnv_p(enum tree_code code,tree type,tree op0,tree op1,bool * strict_overflow_p)14172 tree_binary_nonzero_warnv_p (enum tree_code code,
14173 			     tree type,
14174 			     tree op0,
14175 			     tree op1, bool *strict_overflow_p)
14176 {
14177   bool sub_strict_overflow_p;
14178   switch (code)
14179     {
14180     case POINTER_PLUS_EXPR:
14181     case PLUS_EXPR:
14182       if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
14183 	{
14184 	  /* With the presence of negative values it is hard
14185 	     to say something.  */
14186 	  sub_strict_overflow_p = false;
14187 	  if (!tree_expr_nonnegative_warnv_p (op0,
14188 					      &sub_strict_overflow_p)
14189 	      || !tree_expr_nonnegative_warnv_p (op1,
14190 						 &sub_strict_overflow_p))
14191 	    return false;
14192 	  /* One of operands must be positive and the other non-negative.  */
14193 	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
14194 	     overflows, on a twos-complement machine the sum of two
14195 	     nonnegative numbers can never be zero.  */
14196 	  return (tree_expr_nonzero_warnv_p (op0,
14197 					     strict_overflow_p)
14198 		  || tree_expr_nonzero_warnv_p (op1,
14199 						strict_overflow_p));
14200 	}
14201       break;
14202 
14203     case MULT_EXPR:
14204       if (TYPE_OVERFLOW_UNDEFINED (type))
14205 	{
14206 	  if (tree_expr_nonzero_warnv_p (op0,
14207 					 strict_overflow_p)
14208 	      && tree_expr_nonzero_warnv_p (op1,
14209 					    strict_overflow_p))
14210 	    {
14211 	      *strict_overflow_p = true;
14212 	      return true;
14213 	    }
14214 	}
14215       break;
14216 
14217     case MIN_EXPR:
14218       sub_strict_overflow_p = false;
14219       if (tree_expr_nonzero_warnv_p (op0,
14220 				     &sub_strict_overflow_p)
14221 	  && tree_expr_nonzero_warnv_p (op1,
14222 					&sub_strict_overflow_p))
14223 	{
14224 	  if (sub_strict_overflow_p)
14225 	    *strict_overflow_p = true;
14226 	}
14227       break;
14228 
14229     case MAX_EXPR:
14230       sub_strict_overflow_p = false;
14231       if (tree_expr_nonzero_warnv_p (op0,
14232 				     &sub_strict_overflow_p))
14233 	{
14234 	  if (sub_strict_overflow_p)
14235 	    *strict_overflow_p = true;
14236 
14237 	  /* When both operands are nonzero, then MAX must be too.  */
14238 	  if (tree_expr_nonzero_warnv_p (op1,
14239 					 strict_overflow_p))
14240 	    return true;
14241 
14242 	  /* MAX where operand 0 is positive is positive.  */
14243 	  return tree_expr_nonnegative_warnv_p (op0,
14244 					       strict_overflow_p);
14245 	}
14246       /* MAX where operand 1 is positive is positive.  */
14247       else if (tree_expr_nonzero_warnv_p (op1,
14248 					  &sub_strict_overflow_p)
14249 	       && tree_expr_nonnegative_warnv_p (op1,
14250 						 &sub_strict_overflow_p))
14251 	{
14252 	  if (sub_strict_overflow_p)
14253 	    *strict_overflow_p = true;
14254 	  return true;
14255 	}
14256       break;
14257 
14258     case BIT_IOR_EXPR:
14259       return (tree_expr_nonzero_warnv_p (op1,
14260 					 strict_overflow_p)
14261 	      || tree_expr_nonzero_warnv_p (op0,
14262 					    strict_overflow_p));
14263 
14264     default:
14265       break;
14266   }
14267 
14268   return false;
14269 }
14270 
14271 /* Return true when T is an address and is known to be nonzero.
14272    For floating point we further ensure that T is not denormal.
14273    Similar logic is present in nonzero_address in rtlanal.h.
14274 
14275    If the return value is based on the assumption that signed overflow
14276    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14277    change *STRICT_OVERFLOW_P.  */
14278 
14279 bool
tree_single_nonzero_warnv_p(tree t,bool * strict_overflow_p)14280 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14281 {
14282   bool sub_strict_overflow_p;
14283   switch (TREE_CODE (t))
14284     {
14285     case INTEGER_CST:
14286       return !integer_zerop (t);
14287 
14288     case ADDR_EXPR:
14289       {
14290 	tree base = TREE_OPERAND (t, 0);
14291 
14292 	if (!DECL_P (base))
14293 	  base = get_base_address (base);
14294 
14295 	if (base && TREE_CODE (base) == TARGET_EXPR)
14296 	  base = TARGET_EXPR_SLOT (base);
14297 
14298 	if (!base)
14299 	  return false;
14300 
14301 	/* For objects in symbol table check if we know they are non-zero.
14302 	   Don't do anything for variables and functions before symtab is built;
14303 	   it is quite possible that they will be declared weak later.  */
14304 	int nonzero_addr = maybe_nonzero_address (base);
14305 	if (nonzero_addr >= 0)
14306 	  return nonzero_addr;
14307 
14308 	/* Constants are never weak.  */
14309 	if (CONSTANT_CLASS_P (base))
14310 	  return true;
14311 
14312 	return false;
14313       }
14314 
14315     case COND_EXPR:
14316       sub_strict_overflow_p = false;
14317       if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14318 				     &sub_strict_overflow_p)
14319 	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14320 					&sub_strict_overflow_p))
14321 	{
14322 	  if (sub_strict_overflow_p)
14323 	    *strict_overflow_p = true;
14324 	  return true;
14325 	}
14326       break;
14327 
14328     case SSA_NAME:
14329       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
14330 	break;
14331       return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
14332 
14333     default:
14334       break;
14335     }
14336   return false;
14337 }
14338 
14339 #define integer_valued_real_p(X) \
14340   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14341 
14342 #define RECURSE(X) \
14343   ((integer_valued_real_p) (X, depth + 1))
14344 
14345 /* Return true if the floating point result of (CODE OP0) has an
14346    integer value.  We also allow +Inf, -Inf and NaN to be considered
14347    integer values. Return false for signaling NaN.
14348 
14349    DEPTH is the current nesting depth of the query.  */
14350 
14351 bool
integer_valued_real_unary_p(tree_code code,tree op0,int depth)14352 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
14353 {
14354   switch (code)
14355     {
14356     case FLOAT_EXPR:
14357       return true;
14358 
14359     case ABS_EXPR:
14360       return RECURSE (op0);
14361 
14362     CASE_CONVERT:
14363       {
14364 	tree type = TREE_TYPE (op0);
14365 	if (TREE_CODE (type) == INTEGER_TYPE)
14366 	  return true;
14367 	if (TREE_CODE (type) == REAL_TYPE)
14368 	  return RECURSE (op0);
14369 	break;
14370       }
14371 
14372     default:
14373       break;
14374     }
14375   return false;
14376 }
14377 
14378 /* Return true if the floating point result of (CODE OP0 OP1) has an
14379    integer value.  We also allow +Inf, -Inf and NaN to be considered
14380    integer values. Return false for signaling NaN.
14381 
14382    DEPTH is the current nesting depth of the query.  */
14383 
14384 bool
integer_valued_real_binary_p(tree_code code,tree op0,tree op1,int depth)14385 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
14386 {
14387   switch (code)
14388     {
14389     case PLUS_EXPR:
14390     case MINUS_EXPR:
14391     case MULT_EXPR:
14392     case MIN_EXPR:
14393     case MAX_EXPR:
14394       return RECURSE (op0) && RECURSE (op1);
14395 
14396     default:
14397       break;
14398     }
14399   return false;
14400 }
14401 
14402 /* Return true if the floating point result of calling FNDECL with arguments
14403    ARG0 and ARG1 has an integer value.  We also allow +Inf, -Inf and NaN to be
14404    considered integer values. Return false for signaling NaN.  If FNDECL
14405    takes fewer than 2 arguments, the remaining ARGn are null.
14406 
14407    DEPTH is the current nesting depth of the query.  */
14408 
14409 bool
integer_valued_real_call_p(combined_fn fn,tree arg0,tree arg1,int depth)14410 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
14411 {
14412   switch (fn)
14413     {
14414     CASE_CFN_CEIL:
14415     CASE_CFN_CEIL_FN:
14416     CASE_CFN_FLOOR:
14417     CASE_CFN_FLOOR_FN:
14418     CASE_CFN_NEARBYINT:
14419     CASE_CFN_NEARBYINT_FN:
14420     CASE_CFN_RINT:
14421     CASE_CFN_RINT_FN:
14422     CASE_CFN_ROUND:
14423     CASE_CFN_ROUND_FN:
14424     CASE_CFN_ROUNDEVEN:
14425     CASE_CFN_ROUNDEVEN_FN:
14426     CASE_CFN_TRUNC:
14427     CASE_CFN_TRUNC_FN:
14428       return true;
14429 
14430     CASE_CFN_FMIN:
14431     CASE_CFN_FMIN_FN:
14432     CASE_CFN_FMAX:
14433     CASE_CFN_FMAX_FN:
14434       return RECURSE (arg0) && RECURSE (arg1);
14435 
14436     default:
14437       break;
14438     }
14439   return false;
14440 }
14441 
14442 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
14443    has an integer value.  We also allow +Inf, -Inf and NaN to be
14444    considered integer values. Return false for signaling NaN.
14445 
14446    DEPTH is the current nesting depth of the query.  */
14447 
14448 bool
integer_valued_real_single_p(tree t,int depth)14449 integer_valued_real_single_p (tree t, int depth)
14450 {
14451   switch (TREE_CODE (t))
14452     {
14453     case REAL_CST:
14454       return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
14455 
14456     case COND_EXPR:
14457       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14458 
14459     case SSA_NAME:
14460       /* Limit the depth of recursion to avoid quadratic behavior.
14461 	 This is expected to catch almost all occurrences in practice.
14462 	 If this code misses important cases that unbounded recursion
14463 	 would not, passes that need this information could be revised
14464 	 to provide it through dataflow propagation.  */
14465       return (!name_registered_for_update_p (t)
14466 	      && depth < param_max_ssa_name_query_depth
14467 	      && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
14468 						    depth));
14469 
14470     default:
14471       break;
14472     }
14473   return false;
14474 }
14475 
14476 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
14477    has an integer value.  We also allow +Inf, -Inf and NaN to be
14478    considered integer values. Return false for signaling NaN.
14479 
14480    DEPTH is the current nesting depth of the query.  */
14481 
14482 static bool
integer_valued_real_invalid_p(tree t,int depth)14483 integer_valued_real_invalid_p (tree t, int depth)
14484 {
14485   switch (TREE_CODE (t))
14486     {
14487     case COMPOUND_EXPR:
14488     case MODIFY_EXPR:
14489     case BIND_EXPR:
14490       return RECURSE (TREE_OPERAND (t, 1));
14491 
14492     case SAVE_EXPR:
14493       return RECURSE (TREE_OPERAND (t, 0));
14494 
14495     default:
14496       break;
14497     }
14498   return false;
14499 }
14500 
14501 #undef RECURSE
14502 #undef integer_valued_real_p
14503 
14504 /* Return true if the floating point expression T has an integer value.
14505    We also allow +Inf, -Inf and NaN to be considered integer values.
14506    Return false for signaling NaN.
14507 
14508    DEPTH is the current nesting depth of the query.  */
14509 
14510 bool
integer_valued_real_p(tree t,int depth)14511 integer_valued_real_p (tree t, int depth)
14512 {
14513   if (t == error_mark_node)
14514     return false;
14515 
14516   STRIP_ANY_LOCATION_WRAPPER (t);
14517 
14518   tree_code code = TREE_CODE (t);
14519   switch (TREE_CODE_CLASS (code))
14520     {
14521     case tcc_binary:
14522     case tcc_comparison:
14523       return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
14524 					   TREE_OPERAND (t, 1), depth);
14525 
14526     case tcc_unary:
14527       return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
14528 
14529     case tcc_constant:
14530     case tcc_declaration:
14531     case tcc_reference:
14532       return integer_valued_real_single_p (t, depth);
14533 
14534     default:
14535       break;
14536     }
14537 
14538   switch (code)
14539     {
14540     case COND_EXPR:
14541     case SSA_NAME:
14542       return integer_valued_real_single_p (t, depth);
14543 
14544     case CALL_EXPR:
14545       {
14546 	tree arg0 = (call_expr_nargs (t) > 0
14547 		     ? CALL_EXPR_ARG (t, 0)
14548 		     : NULL_TREE);
14549 	tree arg1 = (call_expr_nargs (t) > 1
14550 		     ? CALL_EXPR_ARG (t, 1)
14551 		     : NULL_TREE);
14552 	return integer_valued_real_call_p (get_call_combined_fn (t),
14553 					   arg0, arg1, depth);
14554       }
14555 
14556     default:
14557       return integer_valued_real_invalid_p (t, depth);
14558     }
14559 }
14560 
14561 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14562    attempt to fold the expression to a constant without modifying TYPE,
14563    OP0 or OP1.
14564 
14565    If the expression could be simplified to a constant, then return
14566    the constant.  If the expression would not be simplified to a
14567    constant, then return NULL_TREE.  */
14568 
14569 tree
fold_binary_to_constant(enum tree_code code,tree type,tree op0,tree op1)14570 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14571 {
14572   tree tem = fold_binary (code, type, op0, op1);
14573   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14574 }
14575 
14576 /* Given the components of a unary expression CODE, TYPE and OP0,
14577    attempt to fold the expression to a constant without modifying
14578    TYPE or OP0.
14579 
14580    If the expression could be simplified to a constant, then return
14581    the constant.  If the expression would not be simplified to a
14582    constant, then return NULL_TREE.  */
14583 
14584 tree
fold_unary_to_constant(enum tree_code code,tree type,tree op0)14585 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14586 {
14587   tree tem = fold_unary (code, type, op0);
14588   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14589 }
14590 
14591 /* If EXP represents referencing an element in a constant string
14592    (either via pointer arithmetic or array indexing), return the
14593    tree representing the value accessed, otherwise return NULL.  */
14594 
14595 tree
fold_read_from_constant_string(tree exp)14596 fold_read_from_constant_string (tree exp)
14597 {
14598   if ((TREE_CODE (exp) == INDIRECT_REF
14599        || TREE_CODE (exp) == ARRAY_REF)
14600       && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14601     {
14602       tree exp1 = TREE_OPERAND (exp, 0);
14603       tree index;
14604       tree string;
14605       location_t loc = EXPR_LOCATION (exp);
14606 
14607       if (TREE_CODE (exp) == INDIRECT_REF)
14608 	string = string_constant (exp1, &index, NULL, NULL);
14609       else
14610 	{
14611 	  tree low_bound = array_ref_low_bound (exp);
14612 	  index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
14613 
14614 	  /* Optimize the special-case of a zero lower bound.
14615 
14616 	     We convert the low_bound to sizetype to avoid some problems
14617 	     with constant folding.  (E.g. suppose the lower bound is 1,
14618 	     and its mode is QI.  Without the conversion,l (ARRAY
14619 	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14620 	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
14621 	  if (! integer_zerop (low_bound))
14622 	    index = size_diffop_loc (loc, index,
14623 				 fold_convert_loc (loc, sizetype, low_bound));
14624 
14625 	  string = exp1;
14626 	}
14627 
14628       scalar_int_mode char_mode;
14629       if (string
14630 	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14631 	  && TREE_CODE (string) == STRING_CST
14632 	  && TREE_CODE (index) == INTEGER_CST
14633 	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14634 	  && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
14635 			  &char_mode)
14636 	  && GET_MODE_SIZE (char_mode) == 1)
14637 	return build_int_cst_type (TREE_TYPE (exp),
14638 				   (TREE_STRING_POINTER (string)
14639 				    [TREE_INT_CST_LOW (index)]));
14640     }
14641   return NULL;
14642 }
14643 
14644 /* Folds a read from vector element at IDX of vector ARG.  */
14645 
14646 tree
fold_read_from_vector(tree arg,poly_uint64 idx)14647 fold_read_from_vector (tree arg, poly_uint64 idx)
14648 {
14649   unsigned HOST_WIDE_INT i;
14650   if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
14651       && known_ge (idx, 0u)
14652       && idx.is_constant (&i))
14653     {
14654       if (TREE_CODE (arg) == VECTOR_CST)
14655 	return VECTOR_CST_ELT (arg, i);
14656       else if (TREE_CODE (arg) == CONSTRUCTOR)
14657 	{
14658 	  if (i >= CONSTRUCTOR_NELTS (arg))
14659 	    return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
14660 	  return CONSTRUCTOR_ELT (arg, i)->value;
14661 	}
14662     }
14663   return NULL_TREE;
14664 }
14665 
14666 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14667    an integer constant, real, or fixed-point constant.
14668 
14669    TYPE is the type of the result.  */
14670 
14671 static tree
fold_negate_const(tree arg0,tree type)14672 fold_negate_const (tree arg0, tree type)
14673 {
14674   tree t = NULL_TREE;
14675 
14676   switch (TREE_CODE (arg0))
14677     {
14678     case REAL_CST:
14679       t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14680       break;
14681 
14682     case FIXED_CST:
14683       {
14684         FIXED_VALUE_TYPE f;
14685         bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14686 					    &(TREE_FIXED_CST (arg0)), NULL,
14687 					    TYPE_SATURATING (type));
14688 	t = build_fixed (type, f);
14689 	/* Propagate overflow flags.  */
14690 	if (overflow_p | TREE_OVERFLOW (arg0))
14691 	  TREE_OVERFLOW (t) = 1;
14692 	break;
14693       }
14694 
14695     default:
14696       if (poly_int_tree_p (arg0))
14697 	{
14698 	  wi::overflow_type overflow;
14699 	  poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
14700 	  t = force_fit_type (type, res, 1,
14701 			      (overflow && ! TYPE_UNSIGNED (type))
14702 			      || TREE_OVERFLOW (arg0));
14703 	  break;
14704 	}
14705 
14706       gcc_unreachable ();
14707     }
14708 
14709   return t;
14710 }
14711 
14712 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14713    an integer constant or real constant.
14714 
14715    TYPE is the type of the result.  */
14716 
14717 tree
fold_abs_const(tree arg0,tree type)14718 fold_abs_const (tree arg0, tree type)
14719 {
14720   tree t = NULL_TREE;
14721 
14722   switch (TREE_CODE (arg0))
14723     {
14724     case INTEGER_CST:
14725       {
14726         /* If the value is unsigned or non-negative, then the absolute value
14727 	   is the same as the ordinary value.  */
14728 	wide_int val = wi::to_wide (arg0);
14729 	wi::overflow_type overflow = wi::OVF_NONE;
14730 	if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
14731 	  ;
14732 
14733 	/* If the value is negative, then the absolute value is
14734 	   its negation.  */
14735 	else
14736 	  val = wi::neg (val, &overflow);
14737 
14738 	/* Force to the destination type, set TREE_OVERFLOW for signed
14739 	   TYPE only.  */
14740 	t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
14741       }
14742     break;
14743 
14744     case REAL_CST:
14745       if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14746 	t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
14747       else
14748 	t =  arg0;
14749       break;
14750 
14751     default:
14752       gcc_unreachable ();
14753     }
14754 
14755   return t;
14756 }
14757 
14758 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14759    constant.  TYPE is the type of the result.  */
14760 
14761 static tree
fold_not_const(const_tree arg0,tree type)14762 fold_not_const (const_tree arg0, tree type)
14763 {
14764   gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14765 
14766   return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
14767 }
14768 
14769 /* Given CODE, a relational operator, the target type, TYPE and two
14770    constant operands OP0 and OP1, return the result of the
14771    relational operation.  If the result is not a compile time
14772    constant, then return NULL_TREE.  */
14773 
14774 static tree
fold_relational_const(enum tree_code code,tree type,tree op0,tree op1)14775 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14776 {
14777   int result, invert;
14778 
14779   /* From here on, the only cases we handle are when the result is
14780      known to be a constant.  */
14781 
14782   if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14783     {
14784       const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14785       const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14786 
14787       /* Handle the cases where either operand is a NaN.  */
14788       if (real_isnan (c0) || real_isnan (c1))
14789 	{
14790 	  switch (code)
14791 	    {
14792 	    case EQ_EXPR:
14793 	    case ORDERED_EXPR:
14794 	      result = 0;
14795 	      break;
14796 
14797 	    case NE_EXPR:
14798 	    case UNORDERED_EXPR:
14799 	    case UNLT_EXPR:
14800 	    case UNLE_EXPR:
14801 	    case UNGT_EXPR:
14802 	    case UNGE_EXPR:
14803 	    case UNEQ_EXPR:
14804               result = 1;
14805 	      break;
14806 
14807 	    case LT_EXPR:
14808 	    case LE_EXPR:
14809 	    case GT_EXPR:
14810 	    case GE_EXPR:
14811 	    case LTGT_EXPR:
14812 	      if (flag_trapping_math)
14813 		return NULL_TREE;
14814 	      result = 0;
14815 	      break;
14816 
14817 	    default:
14818 	      gcc_unreachable ();
14819 	    }
14820 
14821 	  return constant_boolean_node (result, type);
14822 	}
14823 
14824       return constant_boolean_node (real_compare (code, c0, c1), type);
14825     }
14826 
14827   if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14828     {
14829       const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14830       const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14831       return constant_boolean_node (fixed_compare (code, c0, c1), type);
14832     }
14833 
14834   /* Handle equality/inequality of complex constants.  */
14835   if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14836     {
14837       tree rcond = fold_relational_const (code, type,
14838 					  TREE_REALPART (op0),
14839 					  TREE_REALPART (op1));
14840       tree icond = fold_relational_const (code, type,
14841 					  TREE_IMAGPART (op0),
14842 					  TREE_IMAGPART (op1));
14843       if (code == EQ_EXPR)
14844 	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14845       else if (code == NE_EXPR)
14846 	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14847       else
14848 	return NULL_TREE;
14849     }
14850 
14851   if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14852     {
14853       if (!VECTOR_TYPE_P (type))
14854 	{
14855 	  /* Have vector comparison with scalar boolean result.  */
14856 	  gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14857 		      && known_eq (VECTOR_CST_NELTS (op0),
14858 				   VECTOR_CST_NELTS (op1)));
14859 	  unsigned HOST_WIDE_INT nunits;
14860 	  if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
14861 	    return NULL_TREE;
14862 	  for (unsigned i = 0; i < nunits; i++)
14863 	    {
14864 	      tree elem0 = VECTOR_CST_ELT (op0, i);
14865 	      tree elem1 = VECTOR_CST_ELT (op1, i);
14866 	      tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
14867 	      if (tmp == NULL_TREE)
14868 		return NULL_TREE;
14869 	      if (integer_zerop (tmp))
14870 		return constant_boolean_node (code == NE_EXPR, type);
14871 	    }
14872 	  return constant_boolean_node (code == EQ_EXPR, type);
14873 	}
14874       tree_vector_builder elts;
14875       if (!elts.new_binary_operation (type, op0, op1, false))
14876 	return NULL_TREE;
14877       unsigned int count = elts.encoded_nelts ();
14878       for (unsigned i = 0; i < count; i++)
14879 	{
14880 	  tree elem_type = TREE_TYPE (type);
14881 	  tree elem0 = VECTOR_CST_ELT (op0, i);
14882 	  tree elem1 = VECTOR_CST_ELT (op1, i);
14883 
14884 	  tree tem = fold_relational_const (code, elem_type,
14885 					    elem0, elem1);
14886 
14887 	  if (tem == NULL_TREE)
14888 	    return NULL_TREE;
14889 
14890 	  elts.quick_push (build_int_cst (elem_type,
14891 					  integer_zerop (tem) ? 0 : -1));
14892 	}
14893 
14894       return elts.build ();
14895     }
14896 
14897   /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14898 
14899      To compute GT, swap the arguments and do LT.
14900      To compute GE, do LT and invert the result.
14901      To compute LE, swap the arguments, do LT and invert the result.
14902      To compute NE, do EQ and invert the result.
14903 
14904      Therefore, the code below must handle only EQ and LT.  */
14905 
14906   if (code == LE_EXPR || code == GT_EXPR)
14907     {
14908       std::swap (op0, op1);
14909       code = swap_tree_comparison (code);
14910     }
14911 
14912   /* Note that it is safe to invert for real values here because we
14913      have already handled the one case that it matters.  */
14914 
14915   invert = 0;
14916   if (code == NE_EXPR || code == GE_EXPR)
14917     {
14918       invert = 1;
14919       code = invert_tree_comparison (code, false);
14920     }
14921 
14922   /* Compute a result for LT or EQ if args permit;
14923      Otherwise return T.  */
14924   if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14925     {
14926       if (code == EQ_EXPR)
14927 	result = tree_int_cst_equal (op0, op1);
14928       else
14929 	result = tree_int_cst_lt (op0, op1);
14930     }
14931   else
14932     return NULL_TREE;
14933 
14934   if (invert)
14935     result ^= 1;
14936   return constant_boolean_node (result, type);
14937 }
14938 
14939 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14940    indicated TYPE.  If no CLEANUP_POINT_EXPR is necessary, return EXPR
14941    itself.  */
14942 
14943 tree
fold_build_cleanup_point_expr(tree type,tree expr)14944 fold_build_cleanup_point_expr (tree type, tree expr)
14945 {
14946   /* If the expression does not have side effects then we don't have to wrap
14947      it with a cleanup point expression.  */
14948   if (!TREE_SIDE_EFFECTS (expr))
14949     return expr;
14950 
14951   /* If the expression is a return, check to see if the expression inside the
14952      return has no side effects or the right hand side of the modify expression
14953      inside the return. If either don't have side effects set we don't need to
14954      wrap the expression in a cleanup point expression.  Note we don't check the
14955      left hand side of the modify because it should always be a return decl.  */
14956   if (TREE_CODE (expr) == RETURN_EXPR)
14957     {
14958       tree op = TREE_OPERAND (expr, 0);
14959       if (!op || !TREE_SIDE_EFFECTS (op))
14960         return expr;
14961       op = TREE_OPERAND (op, 1);
14962       if (!TREE_SIDE_EFFECTS (op))
14963         return expr;
14964     }
14965 
14966   return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
14967 }
14968 
14969 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14970    of an indirection through OP0, or NULL_TREE if no simplification is
14971    possible.  */
14972 
14973 tree
fold_indirect_ref_1(location_t loc,tree type,tree op0)14974 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14975 {
14976   tree sub = op0;
14977   tree subtype;
14978   poly_uint64 const_op01;
14979 
14980   STRIP_NOPS (sub);
14981   subtype = TREE_TYPE (sub);
14982   if (!POINTER_TYPE_P (subtype)
14983       || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
14984     return NULL_TREE;
14985 
14986   if (TREE_CODE (sub) == ADDR_EXPR)
14987     {
14988       tree op = TREE_OPERAND (sub, 0);
14989       tree optype = TREE_TYPE (op);
14990 
14991       /* *&CONST_DECL -> to the value of the const decl.  */
14992       if (TREE_CODE (op) == CONST_DECL)
14993 	return DECL_INITIAL (op);
14994       /* *&p => p;  make sure to handle *&"str"[cst] here.  */
14995       if (type == optype)
14996 	{
14997 	  tree fop = fold_read_from_constant_string (op);
14998 	  if (fop)
14999 	    return fop;
15000 	  else
15001 	    return op;
15002 	}
15003       /* *(foo *)&fooarray => fooarray[0] */
15004       else if (TREE_CODE (optype) == ARRAY_TYPE
15005 	       && type == TREE_TYPE (optype)
15006 	       && (!in_gimple_form
15007 		   || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15008 	{
15009 	  tree type_domain = TYPE_DOMAIN (optype);
15010 	  tree min_val = size_zero_node;
15011 	  if (type_domain && TYPE_MIN_VALUE (type_domain))
15012 	    min_val = TYPE_MIN_VALUE (type_domain);
15013 	  if (in_gimple_form
15014 	      && TREE_CODE (min_val) != INTEGER_CST)
15015 	    return NULL_TREE;
15016 	  return build4_loc (loc, ARRAY_REF, type, op, min_val,
15017 			     NULL_TREE, NULL_TREE);
15018 	}
15019       /* *(foo *)&complexfoo => __real__ complexfoo */
15020       else if (TREE_CODE (optype) == COMPLEX_TYPE
15021 	       && type == TREE_TYPE (optype))
15022 	return fold_build1_loc (loc, REALPART_EXPR, type, op);
15023       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15024       else if (VECTOR_TYPE_P (optype)
15025 	       && type == TREE_TYPE (optype))
15026 	{
15027 	  tree part_width = TYPE_SIZE (type);
15028 	  tree index = bitsize_int (0);
15029 	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
15030 				  index);
15031 	}
15032     }
15033 
15034   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15035       && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
15036     {
15037       tree op00 = TREE_OPERAND (sub, 0);
15038       tree op01 = TREE_OPERAND (sub, 1);
15039 
15040       STRIP_NOPS (op00);
15041       if (TREE_CODE (op00) == ADDR_EXPR)
15042 	{
15043 	  tree op00type;
15044 	  op00 = TREE_OPERAND (op00, 0);
15045 	  op00type = TREE_TYPE (op00);
15046 
15047 	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15048 	  if (VECTOR_TYPE_P (op00type)
15049 	      && type == TREE_TYPE (op00type)
15050 	      /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
15051 		 but we want to treat offsets with MSB set as negative.
15052 		 For the code below negative offsets are invalid and
15053 		 TYPE_SIZE of the element is something unsigned, so
15054 		 check whether op01 fits into poly_int64, which implies
15055 		 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
15056 		 then just use poly_uint64 because we want to treat the
15057 		 value as unsigned.  */
15058 	      && tree_fits_poly_int64_p (op01))
15059 	    {
15060 	      tree part_width = TYPE_SIZE (type);
15061 	      poly_uint64 max_offset
15062 		= (tree_to_uhwi (part_width) / BITS_PER_UNIT
15063 		   * TYPE_VECTOR_SUBPARTS (op00type));
15064 	      if (known_lt (const_op01, max_offset))
15065 		{
15066 		  tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
15067 		  return fold_build3_loc (loc,
15068 					  BIT_FIELD_REF, type, op00,
15069 					  part_width, index);
15070 		}
15071 	    }
15072 	  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15073 	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
15074 		   && type == TREE_TYPE (op00type))
15075 	    {
15076 	      if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
15077 			    const_op01))
15078 		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15079 	    }
15080 	  /* ((foo *)&fooarray)[1] => fooarray[1] */
15081 	  else if (TREE_CODE (op00type) == ARRAY_TYPE
15082 		   && type == TREE_TYPE (op00type))
15083 	    {
15084 	      tree type_domain = TYPE_DOMAIN (op00type);
15085 	      tree min_val = size_zero_node;
15086 	      if (type_domain && TYPE_MIN_VALUE (type_domain))
15087 		min_val = TYPE_MIN_VALUE (type_domain);
15088 	      poly_uint64 type_size, index;
15089 	      if (poly_int_tree_p (min_val)
15090 		  && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
15091 		  && multiple_p (const_op01, type_size, &index))
15092 		{
15093 		  poly_offset_int off = index + wi::to_poly_offset (min_val);
15094 		  op01 = wide_int_to_tree (sizetype, off);
15095 		  return build4_loc (loc, ARRAY_REF, type, op00, op01,
15096 				     NULL_TREE, NULL_TREE);
15097 		}
15098 	    }
15099 	}
15100     }
15101 
15102   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15103   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15104       && type == TREE_TYPE (TREE_TYPE (subtype))
15105       && (!in_gimple_form
15106 	  || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15107     {
15108       tree type_domain;
15109       tree min_val = size_zero_node;
15110       sub = build_fold_indirect_ref_loc (loc, sub);
15111       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15112       if (type_domain && TYPE_MIN_VALUE (type_domain))
15113 	min_val = TYPE_MIN_VALUE (type_domain);
15114       if (in_gimple_form
15115 	  && TREE_CODE (min_val) != INTEGER_CST)
15116 	return NULL_TREE;
15117       return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15118 			 NULL_TREE);
15119     }
15120 
15121   return NULL_TREE;
15122 }
15123 
15124 /* Builds an expression for an indirection through T, simplifying some
15125    cases.  */
15126 
15127 tree
build_fold_indirect_ref_loc(location_t loc,tree t)15128 build_fold_indirect_ref_loc (location_t loc, tree t)
15129 {
15130   tree type = TREE_TYPE (TREE_TYPE (t));
15131   tree sub = fold_indirect_ref_1 (loc, type, t);
15132 
15133   if (sub)
15134     return sub;
15135 
15136   return build1_loc (loc, INDIRECT_REF, type, t);
15137 }
15138 
15139 /* Given an INDIRECT_REF T, return either T or a simplified version.  */
15140 
15141 tree
fold_indirect_ref_loc(location_t loc,tree t)15142 fold_indirect_ref_loc (location_t loc, tree t)
15143 {
15144   tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15145 
15146   if (sub)
15147     return sub;
15148   else
15149     return t;
15150 }
15151 
15152 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15153    whose result is ignored.  The type of the returned tree need not be
15154    the same as the original expression.  */
15155 
15156 tree
fold_ignored_result(tree t)15157 fold_ignored_result (tree t)
15158 {
15159   if (!TREE_SIDE_EFFECTS (t))
15160     return integer_zero_node;
15161 
15162   for (;;)
15163     switch (TREE_CODE_CLASS (TREE_CODE (t)))
15164       {
15165       case tcc_unary:
15166 	t = TREE_OPERAND (t, 0);
15167 	break;
15168 
15169       case tcc_binary:
15170       case tcc_comparison:
15171 	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15172 	  t = TREE_OPERAND (t, 0);
15173 	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15174 	  t = TREE_OPERAND (t, 1);
15175 	else
15176 	  return t;
15177 	break;
15178 
15179       case tcc_expression:
15180 	switch (TREE_CODE (t))
15181 	  {
15182 	  case COMPOUND_EXPR:
15183 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15184 	      return t;
15185 	    t = TREE_OPERAND (t, 0);
15186 	    break;
15187 
15188 	  case COND_EXPR:
15189 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15190 		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15191 	      return t;
15192 	    t = TREE_OPERAND (t, 0);
15193 	    break;
15194 
15195 	  default:
15196 	    return t;
15197 	  }
15198 	break;
15199 
15200       default:
15201 	return t;
15202       }
15203 }
15204 
15205 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15206 
15207 tree
round_up_loc(location_t loc,tree value,unsigned int divisor)15208 round_up_loc (location_t loc, tree value, unsigned int divisor)
15209 {
15210   tree div = NULL_TREE;
15211 
15212   if (divisor == 1)
15213     return value;
15214 
15215   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
15216      have to do anything.  Only do this when we are not given a const,
15217      because in that case, this check is more expensive than just
15218      doing it.  */
15219   if (TREE_CODE (value) != INTEGER_CST)
15220     {
15221       div = build_int_cst (TREE_TYPE (value), divisor);
15222 
15223       if (multiple_of_p (TREE_TYPE (value), value, div))
15224 	return value;
15225     }
15226 
15227   /* If divisor is a power of two, simplify this to bit manipulation.  */
15228   if (pow2_or_zerop (divisor))
15229     {
15230       if (TREE_CODE (value) == INTEGER_CST)
15231 	{
15232 	  wide_int val = wi::to_wide (value);
15233 	  bool overflow_p;
15234 
15235 	  if ((val & (divisor - 1)) == 0)
15236 	    return value;
15237 
15238 	  overflow_p = TREE_OVERFLOW (value);
15239 	  val += divisor - 1;
15240 	  val &= (int) -divisor;
15241 	  if (val == 0)
15242 	    overflow_p = true;
15243 
15244 	  return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
15245 	}
15246       else
15247 	{
15248 	  tree t;
15249 
15250 	  t = build_int_cst (TREE_TYPE (value), divisor - 1);
15251 	  value = size_binop_loc (loc, PLUS_EXPR, value, t);
15252 	  t = build_int_cst (TREE_TYPE (value), - (int) divisor);
15253 	  value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15254 	}
15255     }
15256   else
15257     {
15258       if (!div)
15259 	div = build_int_cst (TREE_TYPE (value), divisor);
15260       value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
15261       value = size_binop_loc (loc, MULT_EXPR, value, div);
15262     }
15263 
15264   return value;
15265 }
15266 
15267 /* Likewise, but round down.  */
15268 
15269 tree
round_down_loc(location_t loc,tree value,int divisor)15270 round_down_loc (location_t loc, tree value, int divisor)
15271 {
15272   tree div = NULL_TREE;
15273 
15274   gcc_assert (divisor > 0);
15275   if (divisor == 1)
15276     return value;
15277 
15278   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
15279      have to do anything.  Only do this when we are not given a const,
15280      because in that case, this check is more expensive than just
15281      doing it.  */
15282   if (TREE_CODE (value) != INTEGER_CST)
15283     {
15284       div = build_int_cst (TREE_TYPE (value), divisor);
15285 
15286       if (multiple_of_p (TREE_TYPE (value), value, div))
15287 	return value;
15288     }
15289 
15290   /* If divisor is a power of two, simplify this to bit manipulation.  */
15291   if (pow2_or_zerop (divisor))
15292     {
15293       tree t;
15294 
15295       t = build_int_cst (TREE_TYPE (value), -divisor);
15296       value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15297     }
15298   else
15299     {
15300       if (!div)
15301 	div = build_int_cst (TREE_TYPE (value), divisor);
15302       value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
15303       value = size_binop_loc (loc, MULT_EXPR, value, div);
15304     }
15305 
15306   return value;
15307 }
15308 
15309 /* Returns the pointer to the base of the object addressed by EXP and
15310    extracts the information about the offset of the access, storing it
15311    to PBITPOS and POFFSET.  */
15312 
15313 static tree
split_address_to_core_and_offset(tree exp,poly_int64_pod * pbitpos,tree * poffset)15314 split_address_to_core_and_offset (tree exp,
15315 				  poly_int64_pod *pbitpos, tree *poffset)
15316 {
15317   tree core;
15318   machine_mode mode;
15319   int unsignedp, reversep, volatilep;
15320   poly_int64 bitsize;
15321   location_t loc = EXPR_LOCATION (exp);
15322 
15323   if (TREE_CODE (exp) == ADDR_EXPR)
15324     {
15325       core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15326 				  poffset, &mode, &unsignedp, &reversep,
15327 				  &volatilep);
15328       core = build_fold_addr_expr_loc (loc, core);
15329     }
15330   else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
15331     {
15332       core = TREE_OPERAND (exp, 0);
15333       STRIP_NOPS (core);
15334       *pbitpos = 0;
15335       *poffset = TREE_OPERAND (exp, 1);
15336       if (poly_int_tree_p (*poffset))
15337 	{
15338 	  poly_offset_int tem
15339 	    = wi::sext (wi::to_poly_offset (*poffset),
15340 			TYPE_PRECISION (TREE_TYPE (*poffset)));
15341 	  tem <<= LOG2_BITS_PER_UNIT;
15342 	  if (tem.to_shwi (pbitpos))
15343 	    *poffset = NULL_TREE;
15344 	}
15345     }
15346   else
15347     {
15348       core = exp;
15349       *pbitpos = 0;
15350       *poffset = NULL_TREE;
15351     }
15352 
15353   return core;
15354 }
15355 
15356 /* Returns true if addresses of E1 and E2 differ by a constant, false
15357    otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
15358 
15359 bool
ptr_difference_const(tree e1,tree e2,poly_int64_pod * diff)15360 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
15361 {
15362   tree core1, core2;
15363   poly_int64 bitpos1, bitpos2;
15364   tree toffset1, toffset2, tdiff, type;
15365 
15366   core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15367   core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15368 
15369   poly_int64 bytepos1, bytepos2;
15370   if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
15371       || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
15372       || !operand_equal_p (core1, core2, 0))
15373     return false;
15374 
15375   if (toffset1 && toffset2)
15376     {
15377       type = TREE_TYPE (toffset1);
15378       if (type != TREE_TYPE (toffset2))
15379 	toffset2 = fold_convert (type, toffset2);
15380 
15381       tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15382       if (!cst_and_fits_in_hwi (tdiff))
15383 	return false;
15384 
15385       *diff = int_cst_value (tdiff);
15386     }
15387   else if (toffset1 || toffset2)
15388     {
15389       /* If only one of the offsets is non-constant, the difference cannot
15390 	 be a constant.  */
15391       return false;
15392     }
15393   else
15394     *diff = 0;
15395 
15396   *diff += bytepos1 - bytepos2;
15397   return true;
15398 }
15399 
15400 /* Return OFF converted to a pointer offset type suitable as offset for
15401    POINTER_PLUS_EXPR.  Use location LOC for this conversion.  */
15402 tree
convert_to_ptrofftype_loc(location_t loc,tree off)15403 convert_to_ptrofftype_loc (location_t loc, tree off)
15404 {
15405   return fold_convert_loc (loc, sizetype, off);
15406 }
15407 
15408 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
15409 tree
fold_build_pointer_plus_loc(location_t loc,tree ptr,tree off)15410 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
15411 {
15412   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15413 			  ptr, convert_to_ptrofftype_loc (loc, off));
15414 }
15415 
15416 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
15417 tree
fold_build_pointer_plus_hwi_loc(location_t loc,tree ptr,HOST_WIDE_INT off)15418 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
15419 {
15420   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
15421 			  ptr, size_int (off));
15422 }
15423 
15424 /* Return a pointer P to a NUL-terminated string representing the sequence
15425    of constant characters referred to by SRC (or a subsequence of such
15426    characters within it if SRC is a reference to a string plus some
15427    constant offset).  If STRLEN is non-null, store the number of bytes
15428    in the string constant including the terminating NUL char.  *STRLEN is
15429    typically strlen(P) + 1 in the absence of embedded NUL characters.  */
15430 
15431 const char *
c_getstr(tree src,unsigned HOST_WIDE_INT * strlen)15432 c_getstr (tree src, unsigned HOST_WIDE_INT *strlen /* = NULL */)
15433 {
15434   tree offset_node;
15435   tree mem_size;
15436 
15437   if (strlen)
15438     *strlen = 0;
15439 
15440   src = string_constant (src, &offset_node, &mem_size, NULL);
15441   if (src == 0)
15442     return NULL;
15443 
15444   unsigned HOST_WIDE_INT offset = 0;
15445   if (offset_node != NULL_TREE)
15446     {
15447       if (!tree_fits_uhwi_p (offset_node))
15448 	return NULL;
15449       else
15450 	offset = tree_to_uhwi (offset_node);
15451     }
15452 
15453   if (!tree_fits_uhwi_p (mem_size))
15454     return NULL;
15455 
15456   /* STRING_LENGTH is the size of the string literal, including any
15457      embedded NULs.  STRING_SIZE is the size of the array the string
15458      literal is stored in.  */
15459   unsigned HOST_WIDE_INT string_length = TREE_STRING_LENGTH (src);
15460   unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size);
15461 
15462   /* Ideally this would turn into a gcc_checking_assert over time.  */
15463   if (string_length > string_size)
15464     string_length = string_size;
15465 
15466   const char *string = TREE_STRING_POINTER (src);
15467 
15468   /* Ideally this would turn into a gcc_checking_assert over time.  */
15469   if (string_length > string_size)
15470     string_length = string_size;
15471 
15472   if (string_length == 0
15473       || offset >= string_size)
15474     return NULL;
15475 
15476   if (strlen)
15477     {
15478       /* Compute and store the length of the substring at OFFSET.
15479 	 All offsets past the initial length refer to null strings.  */
15480       if (offset < string_length)
15481 	*strlen = string_length - offset;
15482       else
15483 	*strlen = 1;
15484     }
15485   else
15486     {
15487       tree eltype = TREE_TYPE (TREE_TYPE (src));
15488       /* Support only properly NUL-terminated single byte strings.  */
15489       if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
15490 	return NULL;
15491       if (string[string_length - 1] != '\0')
15492 	return NULL;
15493     }
15494 
15495   return offset < string_length ? string + offset : "";
15496 }
15497 
15498 /* Given a tree T, compute which bits in T may be nonzero.  */
15499 
15500 wide_int
tree_nonzero_bits(const_tree t)15501 tree_nonzero_bits (const_tree t)
15502 {
15503   switch (TREE_CODE (t))
15504     {
15505     case INTEGER_CST:
15506       return wi::to_wide (t);
15507     case SSA_NAME:
15508       return get_nonzero_bits (t);
15509     case NON_LVALUE_EXPR:
15510     case SAVE_EXPR:
15511       return tree_nonzero_bits (TREE_OPERAND (t, 0));
15512     case BIT_AND_EXPR:
15513       return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15514 			  tree_nonzero_bits (TREE_OPERAND (t, 1)));
15515     case BIT_IOR_EXPR:
15516     case BIT_XOR_EXPR:
15517       return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15518 			 tree_nonzero_bits (TREE_OPERAND (t, 1)));
15519     case COND_EXPR:
15520       return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
15521 			 tree_nonzero_bits (TREE_OPERAND (t, 2)));
15522     CASE_CONVERT:
15523       return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
15524 			     TYPE_PRECISION (TREE_TYPE (t)),
15525 			     TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
15526     case PLUS_EXPR:
15527       if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
15528 	{
15529 	  wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
15530 	  wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
15531 	  if (wi::bit_and (nzbits1, nzbits2) == 0)
15532 	    return wi::bit_or (nzbits1, nzbits2);
15533 	}
15534       break;
15535     case LSHIFT_EXPR:
15536       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15537 	{
15538 	  tree type = TREE_TYPE (t);
15539 	  wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15540 	  wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15541 				       TYPE_PRECISION (type));
15542 	  return wi::neg_p (arg1)
15543 		 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
15544 		 : wi::lshift (nzbits, arg1);
15545 	}
15546       break;
15547     case RSHIFT_EXPR:
15548       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
15549         {
15550 	  tree type = TREE_TYPE (t);
15551 	  wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
15552 	  wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
15553 				       TYPE_PRECISION (type));
15554 	  return wi::neg_p (arg1)
15555 		 ? wi::lshift (nzbits, -arg1)
15556 		 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
15557         }
15558       break;
15559     default:
15560       break;
15561     }
15562 
15563   return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
15564 }
15565 
15566 #if CHECKING_P
15567 
15568 namespace selftest {
15569 
15570 /* Helper functions for writing tests of folding trees.  */
15571 
15572 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT.  */
15573 
15574 static void
assert_binop_folds_to_const(tree lhs,enum tree_code code,tree rhs,tree constant)15575 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
15576 			     tree constant)
15577 {
15578   ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
15579 }
15580 
15581 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
15582    wrapping WRAPPED_EXPR.  */
15583 
15584 static void
assert_binop_folds_to_nonlvalue(tree lhs,enum tree_code code,tree rhs,tree wrapped_expr)15585 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
15586 				 tree wrapped_expr)
15587 {
15588   tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
15589   ASSERT_NE (wrapped_expr, result);
15590   ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
15591   ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
15592 }
15593 
15594 /* Verify that various arithmetic binary operations are folded
15595    correctly.  */
15596 
15597 static void
test_arithmetic_folding()15598 test_arithmetic_folding ()
15599 {
15600   tree type = integer_type_node;
15601   tree x = create_tmp_var_raw (type, "x");
15602   tree zero = build_zero_cst (type);
15603   tree one = build_int_cst (type, 1);
15604 
15605   /* Addition.  */
15606   /* 1 <-- (0 + 1) */
15607   assert_binop_folds_to_const (zero, PLUS_EXPR, one,
15608 			       one);
15609   assert_binop_folds_to_const (one, PLUS_EXPR, zero,
15610 			       one);
15611 
15612   /* (nonlvalue)x <-- (x + 0) */
15613   assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
15614 				   x);
15615 
15616   /* Subtraction.  */
15617   /* 0 <-- (x - x) */
15618   assert_binop_folds_to_const (x, MINUS_EXPR, x,
15619 			       zero);
15620   assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
15621 				   x);
15622 
15623   /* Multiplication.  */
15624   /* 0 <-- (x * 0) */
15625   assert_binop_folds_to_const (x, MULT_EXPR, zero,
15626 			       zero);
15627 
15628   /* (nonlvalue)x <-- (x * 1) */
15629   assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
15630 				   x);
15631 }
15632 
15633 /* Verify that various binary operations on vectors are folded
15634    correctly.  */
15635 
15636 static void
test_vector_folding()15637 test_vector_folding ()
15638 {
15639   tree inner_type = integer_type_node;
15640   tree type = build_vector_type (inner_type, 4);
15641   tree zero = build_zero_cst (type);
15642   tree one = build_one_cst (type);
15643   tree index = build_index_vector (type, 0, 1);
15644 
15645   /* Verify equality tests that return a scalar boolean result.  */
15646   tree res_type = boolean_type_node;
15647   ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
15648   ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
15649   ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
15650   ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
15651   ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
15652   ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15653 					       index, one)));
15654   ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
15655 					      index, index)));
15656   ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
15657 					      index, index)));
15658 }
15659 
15660 /* Verify folding of VEC_DUPLICATE_EXPRs.  */
15661 
15662 static void
test_vec_duplicate_folding()15663 test_vec_duplicate_folding ()
15664 {
15665   scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
15666   machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
15667   /* This will be 1 if VEC_MODE isn't a vector mode.  */
15668   poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
15669 
15670   tree type = build_vector_type (ssizetype, nunits);
15671   tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
15672   tree dup5_cst = build_vector_from_val (type, ssize_int (5));
15673   ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
15674 }
15675 
15676 /* Run all of the selftests within this file.  */
15677 
15678 void
fold_const_c_tests()15679 fold_const_c_tests ()
15680 {
15681   test_arithmetic_folding ();
15682   test_vector_folding ();
15683   test_vec_duplicate_folding ();
15684 }
15685 
15686 } // namespace selftest
15687 
15688 #endif /* CHECKING_P */
15689