1 /* Fold a constant sub-tree into a single node for C-compiler
2    Copyright (C) 1987-2021 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /*@@ This file should be rewritten to use an arbitrary precision
21   @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22   @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23   @@ The routines that translate from the ap rep should
24   @@ warn if precision et. al. is lost.
25   @@ This would also make life easier when this technology is used
26   @@ for cross-compilers.  */
27 
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29 
30    fold takes a tree as argument and returns a simplified tree.
31 
32    size_binop takes a tree code for an arithmetic operation
33    and two operands that are trees, and produces a tree for the
34    result, assuming the type comes from `sizetype'.
35 
36    size_int takes an integer value, and creates a tree constant
37    with type from `sizetype'.
38 
39    Note: Since the folders get called on non-gimple code as well as
40    gimple code, we need to handle GIMPLE tuples as well as their
41    corresponding tree equivalents.  */
42 
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "memmodel.h"
53 #include "tm_p.h"
54 #include "tree-ssa-operands.h"
55 #include "optabs-query.h"
56 #include "cgraph.h"
57 #include "diagnostic-core.h"
58 #include "flags.h"
59 #include "alias.h"
60 #include "fold-const.h"
61 #include "fold-const-call.h"
62 #include "stor-layout.h"
63 #include "calls.h"
64 #include "tree-iterator.h"
65 #include "expr.h"
66 #include "intl.h"
67 #include "langhooks.h"
68 #include "tree-eh.h"
69 #include "gimplify.h"
70 #include "tree-dfa.h"
71 #include "builtins.h"
72 #include "generic-match.h"
73 #include "gimple-fold.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-vrp.h"
79 #include "tree-ssanames.h"
80 #include "selftest.h"
81 #include "stringpool.h"
82 #include "attribs.h"
83 #include "tree-vector-builder.h"
84 #include "vec-perm-indices.h"
85 #include "asan.h"
86 
87 /* Nonzero if we are folding constants inside an initializer; zero
88    otherwise.  */
89 int folding_initializer = 0;
90 
91 /* The following constants represent a bit based encoding of GCC's
92    comparison operators.  This encoding simplifies transformations
93    on relational comparison operators, such as AND and OR.  */
94 enum comparison_code {
95   COMPCODE_FALSE = 0,
96   COMPCODE_LT = 1,
97   COMPCODE_EQ = 2,
98   COMPCODE_LE = 3,
99   COMPCODE_GT = 4,
100   COMPCODE_LTGT = 5,
101   COMPCODE_GE = 6,
102   COMPCODE_ORD = 7,
103   COMPCODE_UNORD = 8,
104   COMPCODE_UNLT = 9,
105   COMPCODE_UNEQ = 10,
106   COMPCODE_UNLE = 11,
107   COMPCODE_UNGT = 12,
108   COMPCODE_NE = 13,
109   COMPCODE_UNGE = 14,
110   COMPCODE_TRUE = 15
111 };
112 
113 static bool negate_expr_p (tree);
114 static tree negate_expr (tree);
115 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
116 static enum comparison_code comparison_to_compcode (enum tree_code);
117 static enum tree_code compcode_to_comparison (enum comparison_code);
118 static bool twoval_comparison_p (tree, tree *, tree *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree optimize_bit_field_compare (location_t, enum tree_code,
121 					tree, tree, tree);
122 static bool simple_operand_p (const_tree);
123 static bool simple_operand_p_2 (tree);
124 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
125 static tree range_predecessor (tree);
126 static tree range_successor (tree);
127 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
132 static tree fold_binary_op_with_conditional_arg (location_t,
133 						 enum tree_code, tree,
134 						 tree, tree,
135 						 tree, tree, int);
136 static tree fold_negate_const (tree, tree);
137 static tree fold_not_const (const_tree, tree);
138 static tree fold_relational_const (enum tree_code, tree, tree, tree);
139 static tree fold_convert_const (enum tree_code, tree, tree);
140 static tree fold_view_convert_expr (tree, tree);
141 static tree fold_negate_expr (location_t, tree);
142 
143 
144 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
145    Otherwise, return LOC.  */
146 
147 static location_t
expr_location_or(tree t,location_t loc)148 expr_location_or (tree t, location_t loc)
149 {
150   location_t tloc = EXPR_LOCATION (t);
151   return tloc == UNKNOWN_LOCATION ? loc : tloc;
152 }
153 
154 /* Similar to protected_set_expr_location, but never modify x in place,
155    if location can and needs to be set, unshare it.  */
156 
157 static inline tree
protected_set_expr_location_unshare(tree x,location_t loc)158 protected_set_expr_location_unshare (tree x, location_t loc)
159 {
160   if (CAN_HAVE_LOCATION_P (x)
161       && EXPR_LOCATION (x) != loc
162       && !(TREE_CODE (x) == SAVE_EXPR
163 	   || TREE_CODE (x) == TARGET_EXPR
164 	   || TREE_CODE (x) == BIND_EXPR))
165     {
166       x = copy_node (x);
167       SET_EXPR_LOCATION (x, loc);
168     }
169   return x;
170 }
171 
172 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
173    division and returns the quotient.  Otherwise returns
174    NULL_TREE.  */
175 
176 tree
div_if_zero_remainder(const_tree arg1,const_tree arg2)177 div_if_zero_remainder (const_tree arg1, const_tree arg2)
178 {
179   widest_int quo;
180 
181   if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
182 			 SIGNED, &quo))
183     return wide_int_to_tree (TREE_TYPE (arg1), quo);
184 
185   return NULL_TREE;
186 }
187 
188 /* This is nonzero if we should defer warnings about undefined
189    overflow.  This facility exists because these warnings are a
190    special case.  The code to estimate loop iterations does not want
191    to issue any warnings, since it works with expressions which do not
192    occur in user code.  Various bits of cleanup code call fold(), but
193    only use the result if it has certain characteristics (e.g., is a
194    constant); that code only wants to issue a warning if the result is
195    used.  */
196 
197 static int fold_deferring_overflow_warnings;
198 
199 /* If a warning about undefined overflow is deferred, this is the
200    warning.  Note that this may cause us to turn two warnings into
201    one, but that is fine since it is sufficient to only give one
202    warning per expression.  */
203 
204 static const char* fold_deferred_overflow_warning;
205 
206 /* If a warning about undefined overflow is deferred, this is the
207    level at which the warning should be emitted.  */
208 
209 static enum warn_strict_overflow_code fold_deferred_overflow_code;
210 
211 /* Start deferring overflow warnings.  We could use a stack here to
212    permit nested calls, but at present it is not necessary.  */
213 
214 void
fold_defer_overflow_warnings(void)215 fold_defer_overflow_warnings (void)
216 {
217   ++fold_deferring_overflow_warnings;
218 }
219 
220 /* Stop deferring overflow warnings.  If there is a pending warning,
221    and ISSUE is true, then issue the warning if appropriate.  STMT is
222    the statement with which the warning should be associated (used for
223    location information); STMT may be NULL.  CODE is the level of the
224    warning--a warn_strict_overflow_code value.  This function will use
225    the smaller of CODE and the deferred code when deciding whether to
226    issue the warning.  CODE may be zero to mean to always use the
227    deferred code.  */
228 
229 void
fold_undefer_overflow_warnings(bool issue,const gimple * stmt,int code)230 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
231 {
232   const char *warnmsg;
233   location_t locus;
234 
235   gcc_assert (fold_deferring_overflow_warnings > 0);
236   --fold_deferring_overflow_warnings;
237   if (fold_deferring_overflow_warnings > 0)
238     {
239       if (fold_deferred_overflow_warning != NULL
240 	  && code != 0
241 	  && code < (int) fold_deferred_overflow_code)
242 	fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
243       return;
244     }
245 
246   warnmsg = fold_deferred_overflow_warning;
247   fold_deferred_overflow_warning = NULL;
248 
249   if (!issue || warnmsg == NULL)
250     return;
251 
252   if (gimple_no_warning_p (stmt))
253     return;
254 
255   /* Use the smallest code level when deciding to issue the
256      warning.  */
257   if (code == 0 || code > (int) fold_deferred_overflow_code)
258     code = fold_deferred_overflow_code;
259 
260   if (!issue_strict_overflow_warning (code))
261     return;
262 
263   if (stmt == NULL)
264     locus = input_location;
265   else
266     locus = gimple_location (stmt);
267   warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
268 }
269 
270 /* Stop deferring overflow warnings, ignoring any deferred
271    warnings.  */
272 
273 void
fold_undefer_and_ignore_overflow_warnings(void)274 fold_undefer_and_ignore_overflow_warnings (void)
275 {
276   fold_undefer_overflow_warnings (false, NULL, 0);
277 }
278 
279 /* Whether we are deferring overflow warnings.  */
280 
281 bool
fold_deferring_overflow_warnings_p(void)282 fold_deferring_overflow_warnings_p (void)
283 {
284   return fold_deferring_overflow_warnings > 0;
285 }
286 
287 /* This is called when we fold something based on the fact that signed
288    overflow is undefined.  */
289 
290 void
fold_overflow_warning(const char * gmsgid,enum warn_strict_overflow_code wc)291 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
292 {
293   if (fold_deferring_overflow_warnings > 0)
294     {
295       if (fold_deferred_overflow_warning == NULL
296 	  || wc < fold_deferred_overflow_code)
297 	{
298 	  fold_deferred_overflow_warning = gmsgid;
299 	  fold_deferred_overflow_code = wc;
300 	}
301     }
302   else if (issue_strict_overflow_warning (wc))
303     warning (OPT_Wstrict_overflow, gmsgid);
304 }
305 
306 /* Return true if the built-in mathematical function specified by CODE
307    is odd, i.e. -f(x) == f(-x).  */
308 
309 bool
negate_mathfn_p(combined_fn fn)310 negate_mathfn_p (combined_fn fn)
311 {
312   switch (fn)
313     {
314     CASE_CFN_ASIN:
315     CASE_CFN_ASINH:
316     CASE_CFN_ATAN:
317     CASE_CFN_ATANH:
318     CASE_CFN_CASIN:
319     CASE_CFN_CASINH:
320     CASE_CFN_CATAN:
321     CASE_CFN_CATANH:
322     CASE_CFN_CBRT:
323     CASE_CFN_CPROJ:
324     CASE_CFN_CSIN:
325     CASE_CFN_CSINH:
326     CASE_CFN_CTAN:
327     CASE_CFN_CTANH:
328     CASE_CFN_ERF:
329     CASE_CFN_LLROUND:
330     CASE_CFN_LROUND:
331     CASE_CFN_ROUND:
332     CASE_CFN_ROUNDEVEN:
333     CASE_CFN_ROUNDEVEN_FN:
334     CASE_CFN_SIN:
335     CASE_CFN_SINH:
336     CASE_CFN_TAN:
337     CASE_CFN_TANH:
338     CASE_CFN_TRUNC:
339       return true;
340 
341     CASE_CFN_LLRINT:
342     CASE_CFN_LRINT:
343     CASE_CFN_NEARBYINT:
344     CASE_CFN_RINT:
345       return !flag_rounding_math;
346 
347     default:
348       break;
349     }
350   return false;
351 }
352 
353 /* Check whether we may negate an integer constant T without causing
354    overflow.  */
355 
356 bool
may_negate_without_overflow_p(const_tree t)357 may_negate_without_overflow_p (const_tree t)
358 {
359   tree type;
360 
361   gcc_assert (TREE_CODE (t) == INTEGER_CST);
362 
363   type = TREE_TYPE (t);
364   if (TYPE_UNSIGNED (type))
365     return false;
366 
367   return !wi::only_sign_bit_p (wi::to_wide (t));
368 }
369 
370 /* Determine whether an expression T can be cheaply negated using
371    the function negate_expr without introducing undefined overflow.  */
372 
373 static bool
negate_expr_p(tree t)374 negate_expr_p (tree t)
375 {
376   tree type;
377 
378   if (t == 0)
379     return false;
380 
381   type = TREE_TYPE (t);
382 
383   STRIP_SIGN_NOPS (t);
384   switch (TREE_CODE (t))
385     {
386     case INTEGER_CST:
387       if (INTEGRAL_TYPE_P (type) && TYPE_UNSIGNED (type))
388 	return true;
389 
390       /* Check that -CST will not overflow type.  */
391       return may_negate_without_overflow_p (t);
392     case BIT_NOT_EXPR:
393       return (INTEGRAL_TYPE_P (type)
394 	      && TYPE_OVERFLOW_WRAPS (type));
395 
396     case FIXED_CST:
397       return true;
398 
399     case NEGATE_EXPR:
400       return !TYPE_OVERFLOW_SANITIZED (type);
401 
402     case REAL_CST:
403       /* We want to canonicalize to positive real constants.  Pretend
404          that only negative ones can be easily negated.  */
405       return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
406 
407     case COMPLEX_CST:
408       return negate_expr_p (TREE_REALPART (t))
409 	     && negate_expr_p (TREE_IMAGPART (t));
410 
411     case VECTOR_CST:
412       {
413 	if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
414 	  return true;
415 
416 	/* Steps don't prevent negation.  */
417 	unsigned int count = vector_cst_encoded_nelts (t);
418 	for (unsigned int i = 0; i < count; ++i)
419 	  if (!negate_expr_p (VECTOR_CST_ENCODED_ELT (t, i)))
420 	    return false;
421 
422 	return true;
423       }
424 
425     case COMPLEX_EXPR:
426       return negate_expr_p (TREE_OPERAND (t, 0))
427 	     && negate_expr_p (TREE_OPERAND (t, 1));
428 
429     case CONJ_EXPR:
430       return negate_expr_p (TREE_OPERAND (t, 0));
431 
432     case PLUS_EXPR:
433       if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
434 	  || HONOR_SIGNED_ZEROS (element_mode (type))
435 	  || (ANY_INTEGRAL_TYPE_P (type)
436 	      && ! TYPE_OVERFLOW_WRAPS (type)))
437 	return false;
438       /* -(A + B) -> (-B) - A.  */
439       if (negate_expr_p (TREE_OPERAND (t, 1)))
440 	return true;
441       /* -(A + B) -> (-A) - B.  */
442       return negate_expr_p (TREE_OPERAND (t, 0));
443 
444     case MINUS_EXPR:
445       /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
446       return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
447 	     && !HONOR_SIGNED_ZEROS (element_mode (type))
448 	     && (! ANY_INTEGRAL_TYPE_P (type)
449 		 || TYPE_OVERFLOW_WRAPS (type));
450 
451     case MULT_EXPR:
452       if (TYPE_UNSIGNED (type))
453 	break;
454       /* INT_MIN/n * n doesn't overflow while negating one operand it does
455          if n is a (negative) power of two.  */
456       if (INTEGRAL_TYPE_P (TREE_TYPE (t))
457 	  && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
458 	  && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
459 		 && (wi::popcount
460 		     (wi::abs (wi::to_wide (TREE_OPERAND (t, 0))))) != 1)
461 		|| (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
462 		    && (wi::popcount
463 			(wi::abs (wi::to_wide (TREE_OPERAND (t, 1))))) != 1)))
464 	break;
465 
466       /* Fall through.  */
467 
468     case RDIV_EXPR:
469       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
470 	return negate_expr_p (TREE_OPERAND (t, 1))
471 	       || negate_expr_p (TREE_OPERAND (t, 0));
472       break;
473 
474     case TRUNC_DIV_EXPR:
475     case ROUND_DIV_EXPR:
476     case EXACT_DIV_EXPR:
477       if (TYPE_UNSIGNED (type))
478 	break;
479       /* In general we can't negate A in A / B, because if A is INT_MIN and
480          B is not 1 we change the sign of the result.  */
481       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
482 	  && negate_expr_p (TREE_OPERAND (t, 0)))
483 	return true;
484       /* In general we can't negate B in A / B, because if A is INT_MIN and
485 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
486 	 and actually traps on some architectures.  */
487       if (! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
488 	  || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
489 	  || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
490 	      && ! integer_onep (TREE_OPERAND (t, 1))))
491 	return negate_expr_p (TREE_OPERAND (t, 1));
492       break;
493 
494     case NOP_EXPR:
495       /* Negate -((double)float) as (double)(-float).  */
496       if (TREE_CODE (type) == REAL_TYPE)
497 	{
498 	  tree tem = strip_float_extensions (t);
499 	  if (tem != t)
500 	    return negate_expr_p (tem);
501 	}
502       break;
503 
504     case CALL_EXPR:
505       /* Negate -f(x) as f(-x).  */
506       if (negate_mathfn_p (get_call_combined_fn (t)))
507 	return negate_expr_p (CALL_EXPR_ARG (t, 0));
508       break;
509 
510     case RSHIFT_EXPR:
511       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
512       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
513 	{
514 	  tree op1 = TREE_OPERAND (t, 1);
515 	  if (wi::to_wide (op1) == element_precision (type) - 1)
516 	    return true;
517 	}
518       break;
519 
520     default:
521       break;
522     }
523   return false;
524 }
525 
526 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
527    simplification is possible.
528    If negate_expr_p would return true for T, NULL_TREE will never be
529    returned.  */
530 
531 static tree
fold_negate_expr_1(location_t loc,tree t)532 fold_negate_expr_1 (location_t loc, tree t)
533 {
534   tree type = TREE_TYPE (t);
535   tree tem;
536 
537   switch (TREE_CODE (t))
538     {
539     /* Convert - (~A) to A + 1.  */
540     case BIT_NOT_EXPR:
541       if (INTEGRAL_TYPE_P (type))
542         return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
543 				build_one_cst (type));
544       break;
545 
546     case INTEGER_CST:
547       tem = fold_negate_const (t, type);
548       if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
549 	  || (ANY_INTEGRAL_TYPE_P (type)
550 	      && !TYPE_OVERFLOW_TRAPS (type)
551 	      && TYPE_OVERFLOW_WRAPS (type))
552 	  || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
553 	return tem;
554       break;
555 
556     case POLY_INT_CST:
557     case REAL_CST:
558     case FIXED_CST:
559       tem = fold_negate_const (t, type);
560       return tem;
561 
562     case COMPLEX_CST:
563       {
564 	tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
565 	tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
566 	if (rpart && ipart)
567 	  return build_complex (type, rpart, ipart);
568       }
569       break;
570 
571     case VECTOR_CST:
572       {
573 	tree_vector_builder elts;
574 	elts.new_unary_operation (type, t, true);
575 	unsigned int count = elts.encoded_nelts ();
576 	for (unsigned int i = 0; i < count; ++i)
577 	  {
578 	    tree elt = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
579 	    if (elt == NULL_TREE)
580 	      return NULL_TREE;
581 	    elts.quick_push (elt);
582 	  }
583 
584 	return elts.build ();
585       }
586 
587     case COMPLEX_EXPR:
588       if (negate_expr_p (t))
589 	return fold_build2_loc (loc, COMPLEX_EXPR, type,
590 				fold_negate_expr (loc, TREE_OPERAND (t, 0)),
591 				fold_negate_expr (loc, TREE_OPERAND (t, 1)));
592       break;
593 
594     case CONJ_EXPR:
595       if (negate_expr_p (t))
596 	return fold_build1_loc (loc, CONJ_EXPR, type,
597 				fold_negate_expr (loc, TREE_OPERAND (t, 0)));
598       break;
599 
600     case NEGATE_EXPR:
601       if (!TYPE_OVERFLOW_SANITIZED (type))
602 	return TREE_OPERAND (t, 0);
603       break;
604 
605     case PLUS_EXPR:
606       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
607 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
608 	{
609 	  /* -(A + B) -> (-B) - A.  */
610 	  if (negate_expr_p (TREE_OPERAND (t, 1)))
611 	    {
612 	      tem = negate_expr (TREE_OPERAND (t, 1));
613 	      return fold_build2_loc (loc, MINUS_EXPR, type,
614 				      tem, TREE_OPERAND (t, 0));
615 	    }
616 
617 	  /* -(A + B) -> (-A) - B.  */
618 	  if (negate_expr_p (TREE_OPERAND (t, 0)))
619 	    {
620 	      tem = negate_expr (TREE_OPERAND (t, 0));
621 	      return fold_build2_loc (loc, MINUS_EXPR, type,
622 				      tem, TREE_OPERAND (t, 1));
623 	    }
624 	}
625       break;
626 
627     case MINUS_EXPR:
628       /* - (A - B) -> B - A  */
629       if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
630 	  && !HONOR_SIGNED_ZEROS (element_mode (type)))
631 	return fold_build2_loc (loc, MINUS_EXPR, type,
632 				TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
633       break;
634 
635     case MULT_EXPR:
636       if (TYPE_UNSIGNED (type))
637         break;
638 
639       /* Fall through.  */
640 
641     case RDIV_EXPR:
642       if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
643 	{
644 	  tem = TREE_OPERAND (t, 1);
645 	  if (negate_expr_p (tem))
646 	    return fold_build2_loc (loc, TREE_CODE (t), type,
647 				    TREE_OPERAND (t, 0), negate_expr (tem));
648 	  tem = TREE_OPERAND (t, 0);
649 	  if (negate_expr_p (tem))
650 	    return fold_build2_loc (loc, TREE_CODE (t), type,
651 				    negate_expr (tem), TREE_OPERAND (t, 1));
652 	}
653       break;
654 
655     case TRUNC_DIV_EXPR:
656     case ROUND_DIV_EXPR:
657     case EXACT_DIV_EXPR:
658       if (TYPE_UNSIGNED (type))
659 	break;
660       /* In general we can't negate A in A / B, because if A is INT_MIN and
661 	 B is not 1 we change the sign of the result.  */
662       if (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
663 	  && negate_expr_p (TREE_OPERAND (t, 0)))
664 	return fold_build2_loc (loc, TREE_CODE (t), type,
665 				negate_expr (TREE_OPERAND (t, 0)),
666 				TREE_OPERAND (t, 1));
667       /* In general we can't negate B in A / B, because if A is INT_MIN and
668 	 B is 1, we may turn this into INT_MIN / -1 which is undefined
669 	 and actually traps on some architectures.  */
670       if ((! ANY_INTEGRAL_TYPE_P (TREE_TYPE (t))
671 	   || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
672 	   || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
673 	       && ! integer_onep (TREE_OPERAND (t, 1))))
674 	  && negate_expr_p (TREE_OPERAND (t, 1)))
675 	return fold_build2_loc (loc, TREE_CODE (t), type,
676 				TREE_OPERAND (t, 0),
677 				negate_expr (TREE_OPERAND (t, 1)));
678       break;
679 
680     case NOP_EXPR:
681       /* Convert -((double)float) into (double)(-float).  */
682       if (TREE_CODE (type) == REAL_TYPE)
683 	{
684 	  tem = strip_float_extensions (t);
685 	  if (tem != t && negate_expr_p (tem))
686 	    return fold_convert_loc (loc, type, negate_expr (tem));
687 	}
688       break;
689 
690     case CALL_EXPR:
691       /* Negate -f(x) as f(-x).  */
692       if (negate_mathfn_p (get_call_combined_fn (t))
693 	  && negate_expr_p (CALL_EXPR_ARG (t, 0)))
694 	{
695 	  tree fndecl, arg;
696 
697 	  fndecl = get_callee_fndecl (t);
698 	  arg = negate_expr (CALL_EXPR_ARG (t, 0));
699 	  return build_call_expr_loc (loc, fndecl, 1, arg);
700 	}
701       break;
702 
703     case RSHIFT_EXPR:
704       /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int.  */
705       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
706 	{
707 	  tree op1 = TREE_OPERAND (t, 1);
708 	  if (wi::to_wide (op1) == element_precision (type) - 1)
709 	    {
710 	      tree ntype = TYPE_UNSIGNED (type)
711 			   ? signed_type_for (type)
712 			   : unsigned_type_for (type);
713 	      tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
714 	      temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
715 	      return fold_convert_loc (loc, type, temp);
716 	    }
717 	}
718       break;
719 
720     default:
721       break;
722     }
723 
724   return NULL_TREE;
725 }
726 
727 /* A wrapper for fold_negate_expr_1.  */
728 
729 static tree
fold_negate_expr(location_t loc,tree t)730 fold_negate_expr (location_t loc, tree t)
731 {
732   tree type = TREE_TYPE (t);
733   STRIP_SIGN_NOPS (t);
734   tree tem = fold_negate_expr_1 (loc, t);
735   if (tem == NULL_TREE)
736     return NULL_TREE;
737   return fold_convert_loc (loc, type, tem);
738 }
739 
740 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T cannot be
741    negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
742    return NULL_TREE. */
743 
744 static tree
negate_expr(tree t)745 negate_expr (tree t)
746 {
747   tree type, tem;
748   location_t loc;
749 
750   if (t == NULL_TREE)
751     return NULL_TREE;
752 
753   loc = EXPR_LOCATION (t);
754   type = TREE_TYPE (t);
755   STRIP_SIGN_NOPS (t);
756 
757   tem = fold_negate_expr (loc, t);
758   if (!tem)
759     tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
760   return fold_convert_loc (loc, type, tem);
761 }
762 
763 /* Split a tree IN into a constant, literal and variable parts that could be
764    combined with CODE to make IN.  "constant" means an expression with
765    TREE_CONSTANT but that isn't an actual constant.  CODE must be a
766    commutative arithmetic operation.  Store the constant part into *CONP,
767    the literal in *LITP and return the variable part.  If a part isn't
768    present, set it to null.  If the tree does not decompose in this way,
769    return the entire tree as the variable part and the other parts as null.
770 
771    If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
772    case, we negate an operand that was subtracted.  Except if it is a
773    literal for which we use *MINUS_LITP instead.
774 
775    If NEGATE_P is true, we are negating all of IN, again except a literal
776    for which we use *MINUS_LITP instead.  If a variable part is of pointer
777    type, it is negated after converting to TYPE.  This prevents us from
778    generating illegal MINUS pointer expression.  LOC is the location of
779    the converted variable part.
780 
781    If IN is itself a literal or constant, return it as appropriate.
782 
783    Note that we do not guarantee that any of the three values will be the
784    same type as IN, but they will have the same signedness and mode.  */
785 
786 static tree
split_tree(tree in,tree type,enum tree_code code,tree * minus_varp,tree * conp,tree * minus_conp,tree * litp,tree * minus_litp,int negate_p)787 split_tree (tree in, tree type, enum tree_code code,
788 	    tree *minus_varp, tree *conp, tree *minus_conp,
789 	    tree *litp, tree *minus_litp, int negate_p)
790 {
791   tree var = 0;
792   *minus_varp = 0;
793   *conp = 0;
794   *minus_conp = 0;
795   *litp = 0;
796   *minus_litp = 0;
797 
798   /* Strip any conversions that don't change the machine mode or signedness.  */
799   STRIP_SIGN_NOPS (in);
800 
801   if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
802       || TREE_CODE (in) == FIXED_CST)
803     *litp = in;
804   else if (TREE_CODE (in) == code
805 	   || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
806 	       && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
807 	       /* We can associate addition and subtraction together (even
808 		  though the C standard doesn't say so) for integers because
809 		  the value is not affected.  For reals, the value might be
810 		  affected, so we can't.  */
811 	       && ((code == PLUS_EXPR && TREE_CODE (in) == POINTER_PLUS_EXPR)
812 		   || (code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
813 		   || (code == MINUS_EXPR
814 		       && (TREE_CODE (in) == PLUS_EXPR
815 			   || TREE_CODE (in) == POINTER_PLUS_EXPR)))))
816     {
817       tree op0 = TREE_OPERAND (in, 0);
818       tree op1 = TREE_OPERAND (in, 1);
819       int neg1_p = TREE_CODE (in) == MINUS_EXPR;
820       int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
821 
822       /* First see if either of the operands is a literal, then a constant.  */
823       if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
824 	  || TREE_CODE (op0) == FIXED_CST)
825 	*litp = op0, op0 = 0;
826       else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
827 	       || TREE_CODE (op1) == FIXED_CST)
828 	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
829 
830       if (op0 != 0 && TREE_CONSTANT (op0))
831 	*conp = op0, op0 = 0;
832       else if (op1 != 0 && TREE_CONSTANT (op1))
833 	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
834 
835       /* If we haven't dealt with either operand, this is not a case we can
836 	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
837       if (op0 != 0 && op1 != 0)
838 	var = in;
839       else if (op0 != 0)
840 	var = op0;
841       else
842 	var = op1, neg_var_p = neg1_p;
843 
844       /* Now do any needed negations.  */
845       if (neg_litp_p)
846 	*minus_litp = *litp, *litp = 0;
847       if (neg_conp_p && *conp)
848 	*minus_conp = *conp, *conp = 0;
849       if (neg_var_p && var)
850 	*minus_varp = var, var = 0;
851     }
852   else if (TREE_CONSTANT (in))
853     *conp = in;
854   else if (TREE_CODE (in) == BIT_NOT_EXPR
855 	   && code == PLUS_EXPR)
856     {
857       /* -1 - X is folded to ~X, undo that here.  Do _not_ do this
858          when IN is constant.  */
859       *litp = build_minus_one_cst (type);
860       *minus_varp = TREE_OPERAND (in, 0);
861     }
862   else
863     var = in;
864 
865   if (negate_p)
866     {
867       if (*litp)
868 	*minus_litp = *litp, *litp = 0;
869       else if (*minus_litp)
870 	*litp = *minus_litp, *minus_litp = 0;
871       if (*conp)
872 	*minus_conp = *conp, *conp = 0;
873       else if (*minus_conp)
874 	*conp = *minus_conp, *minus_conp = 0;
875       if (var)
876 	*minus_varp = var, var = 0;
877       else if (*minus_varp)
878 	var = *minus_varp, *minus_varp = 0;
879     }
880 
881   if (*litp
882       && TREE_OVERFLOW_P (*litp))
883     *litp = drop_tree_overflow (*litp);
884   if (*minus_litp
885       && TREE_OVERFLOW_P (*minus_litp))
886     *minus_litp = drop_tree_overflow (*minus_litp);
887 
888   return var;
889 }
890 
891 /* Re-associate trees split by the above function.  T1 and T2 are
892    either expressions to associate or null.  Return the new
893    expression, if any.  LOC is the location of the new expression.  If
894    we build an operation, do it in TYPE and with CODE.  */
895 
896 static tree
associate_trees(location_t loc,tree t1,tree t2,enum tree_code code,tree type)897 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
898 {
899   if (t1 == 0)
900     {
901       gcc_assert (t2 == 0 || code != MINUS_EXPR);
902       return t2;
903     }
904   else if (t2 == 0)
905     return t1;
906 
907   /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
908      try to fold this since we will have infinite recursion.  But do
909      deal with any NEGATE_EXPRs.  */
910   if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
911       || TREE_CODE (t1) == PLUS_EXPR || TREE_CODE (t2) == PLUS_EXPR
912       || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
913     {
914       if (code == PLUS_EXPR)
915 	{
916 	  if (TREE_CODE (t1) == NEGATE_EXPR)
917 	    return build2_loc (loc, MINUS_EXPR, type,
918 			       fold_convert_loc (loc, type, t2),
919 			       fold_convert_loc (loc, type,
920 						 TREE_OPERAND (t1, 0)));
921 	  else if (TREE_CODE (t2) == NEGATE_EXPR)
922 	    return build2_loc (loc, MINUS_EXPR, type,
923 			       fold_convert_loc (loc, type, t1),
924 			       fold_convert_loc (loc, type,
925 						 TREE_OPERAND (t2, 0)));
926 	  else if (integer_zerop (t2))
927 	    return fold_convert_loc (loc, type, t1);
928 	}
929       else if (code == MINUS_EXPR)
930 	{
931 	  if (integer_zerop (t2))
932 	    return fold_convert_loc (loc, type, t1);
933 	}
934 
935       return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
936 			 fold_convert_loc (loc, type, t2));
937     }
938 
939   return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
940 			  fold_convert_loc (loc, type, t2));
941 }
942 
943 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
944    for use in int_const_binop, size_binop and size_diffop.  */
945 
946 static bool
int_binop_types_match_p(enum tree_code code,const_tree type1,const_tree type2)947 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
948 {
949   if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
950     return false;
951   if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
952     return false;
953 
954   switch (code)
955     {
956     case LSHIFT_EXPR:
957     case RSHIFT_EXPR:
958     case LROTATE_EXPR:
959     case RROTATE_EXPR:
960       return true;
961 
962     default:
963       break;
964     }
965 
966   return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
967 	 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
968 	 && TYPE_MODE (type1) == TYPE_MODE (type2);
969 }
970 
971 /* Combine two wide ints ARG1 and ARG2 under operation CODE to produce
972    a new constant in RES.  Return FALSE if we don't know how to
973    evaluate CODE at compile-time.  */
974 
975 bool
wide_int_binop(wide_int & res,enum tree_code code,const wide_int & arg1,const wide_int & arg2,signop sign,wi::overflow_type * overflow)976 wide_int_binop (wide_int &res,
977 		enum tree_code code, const wide_int &arg1, const wide_int &arg2,
978 		signop sign, wi::overflow_type *overflow)
979 {
980   wide_int tmp;
981   *overflow = wi::OVF_NONE;
982   switch (code)
983     {
984     case BIT_IOR_EXPR:
985       res = wi::bit_or (arg1, arg2);
986       break;
987 
988     case BIT_XOR_EXPR:
989       res = wi::bit_xor (arg1, arg2);
990       break;
991 
992     case BIT_AND_EXPR:
993       res = wi::bit_and (arg1, arg2);
994       break;
995 
996     case LSHIFT_EXPR:
997       if (wi::neg_p (arg2))
998 	return false;
999       res = wi::lshift (arg1, arg2);
1000       break;
1001 
1002     case RSHIFT_EXPR:
1003       if (wi::neg_p (arg2))
1004 	return false;
1005       /* It's unclear from the C standard whether shifts can overflow.
1006 	 The following code ignores overflow; perhaps a C standard
1007 	 interpretation ruling is needed.  */
1008       res = wi::rshift (arg1, arg2, sign);
1009       break;
1010 
1011     case RROTATE_EXPR:
1012     case LROTATE_EXPR:
1013       if (wi::neg_p (arg2))
1014 	{
1015 	  tmp = -arg2;
1016 	  if (code == RROTATE_EXPR)
1017 	    code = LROTATE_EXPR;
1018 	  else
1019 	    code = RROTATE_EXPR;
1020 	}
1021       else
1022         tmp = arg2;
1023 
1024       if (code == RROTATE_EXPR)
1025 	res = wi::rrotate (arg1, tmp);
1026       else
1027 	res = wi::lrotate (arg1, tmp);
1028       break;
1029 
1030     case PLUS_EXPR:
1031       res = wi::add (arg1, arg2, sign, overflow);
1032       break;
1033 
1034     case MINUS_EXPR:
1035       res = wi::sub (arg1, arg2, sign, overflow);
1036       break;
1037 
1038     case MULT_EXPR:
1039       res = wi::mul (arg1, arg2, sign, overflow);
1040       break;
1041 
1042     case MULT_HIGHPART_EXPR:
1043       res = wi::mul_high (arg1, arg2, sign);
1044       break;
1045 
1046     case TRUNC_DIV_EXPR:
1047     case EXACT_DIV_EXPR:
1048       if (arg2 == 0)
1049 	return false;
1050       res = wi::div_trunc (arg1, arg2, sign, overflow);
1051       break;
1052 
1053     case FLOOR_DIV_EXPR:
1054       if (arg2 == 0)
1055 	return false;
1056       res = wi::div_floor (arg1, arg2, sign, overflow);
1057       break;
1058 
1059     case CEIL_DIV_EXPR:
1060       if (arg2 == 0)
1061 	return false;
1062       res = wi::div_ceil (arg1, arg2, sign, overflow);
1063       break;
1064 
1065     case ROUND_DIV_EXPR:
1066       if (arg2 == 0)
1067 	return false;
1068       res = wi::div_round (arg1, arg2, sign, overflow);
1069       break;
1070 
1071     case TRUNC_MOD_EXPR:
1072       if (arg2 == 0)
1073 	return false;
1074       res = wi::mod_trunc (arg1, arg2, sign, overflow);
1075       break;
1076 
1077     case FLOOR_MOD_EXPR:
1078       if (arg2 == 0)
1079 	return false;
1080       res = wi::mod_floor (arg1, arg2, sign, overflow);
1081       break;
1082 
1083     case CEIL_MOD_EXPR:
1084       if (arg2 == 0)
1085 	return false;
1086       res = wi::mod_ceil (arg1, arg2, sign, overflow);
1087       break;
1088 
1089     case ROUND_MOD_EXPR:
1090       if (arg2 == 0)
1091 	return false;
1092       res = wi::mod_round (arg1, arg2, sign, overflow);
1093       break;
1094 
1095     case MIN_EXPR:
1096       res = wi::min (arg1, arg2, sign);
1097       break;
1098 
1099     case MAX_EXPR:
1100       res = wi::max (arg1, arg2, sign);
1101       break;
1102 
1103     default:
1104       return false;
1105     }
1106   return true;
1107 }
1108 
1109 /* Combine two poly int's ARG1 and ARG2 under operation CODE to
1110    produce a new constant in RES.  Return FALSE if we don't know how
1111    to evaluate CODE at compile-time.  */
1112 
1113 static bool
poly_int_binop(poly_wide_int & res,enum tree_code code,const_tree arg1,const_tree arg2,signop sign,wi::overflow_type * overflow)1114 poly_int_binop (poly_wide_int &res, enum tree_code code,
1115 		const_tree arg1, const_tree arg2,
1116 		signop sign, wi::overflow_type *overflow)
1117 {
1118   gcc_assert (NUM_POLY_INT_COEFFS != 1);
1119   gcc_assert (poly_int_tree_p (arg1) && poly_int_tree_p (arg2));
1120   switch (code)
1121     {
1122     case PLUS_EXPR:
1123       res = wi::add (wi::to_poly_wide (arg1),
1124 		     wi::to_poly_wide (arg2), sign, overflow);
1125       break;
1126 
1127     case MINUS_EXPR:
1128       res = wi::sub (wi::to_poly_wide (arg1),
1129 		     wi::to_poly_wide (arg2), sign, overflow);
1130       break;
1131 
1132     case MULT_EXPR:
1133       if (TREE_CODE (arg2) == INTEGER_CST)
1134 	res = wi::mul (wi::to_poly_wide (arg1),
1135 		       wi::to_wide (arg2), sign, overflow);
1136       else if (TREE_CODE (arg1) == INTEGER_CST)
1137 	res = wi::mul (wi::to_poly_wide (arg2),
1138 		       wi::to_wide (arg1), sign, overflow);
1139       else
1140 	return NULL_TREE;
1141       break;
1142 
1143     case LSHIFT_EXPR:
1144       if (TREE_CODE (arg2) == INTEGER_CST)
1145 	res = wi::to_poly_wide (arg1) << wi::to_wide (arg2);
1146       else
1147 	return false;
1148       break;
1149 
1150     case BIT_IOR_EXPR:
1151       if (TREE_CODE (arg2) != INTEGER_CST
1152 	  || !can_ior_p (wi::to_poly_wide (arg1), wi::to_wide (arg2),
1153 			 &res))
1154 	return false;
1155       break;
1156 
1157     default:
1158       return false;
1159     }
1160   return true;
1161 }
1162 
1163 /* Combine two integer constants ARG1 and ARG2 under operation CODE to
1164    produce a new constant.  Return NULL_TREE if we don't know how to
1165    evaluate CODE at compile-time.  */
1166 
1167 tree
int_const_binop(enum tree_code code,const_tree arg1,const_tree arg2,int overflowable)1168 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2,
1169 		 int overflowable)
1170 {
1171   poly_wide_int poly_res;
1172   tree type = TREE_TYPE (arg1);
1173   signop sign = TYPE_SIGN (type);
1174   wi::overflow_type overflow = wi::OVF_NONE;
1175 
1176   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1177     {
1178       wide_int warg1 = wi::to_wide (arg1), res;
1179       wide_int warg2 = wi::to_wide (arg2, TYPE_PRECISION (type));
1180       if (!wide_int_binop (res, code, warg1, warg2, sign, &overflow))
1181 	return NULL_TREE;
1182       poly_res = res;
1183     }
1184   else if (!poly_int_tree_p (arg1)
1185 	   || !poly_int_tree_p (arg2)
1186 	   || !poly_int_binop (poly_res, code, arg1, arg2, sign, &overflow))
1187     return NULL_TREE;
1188   return force_fit_type (type, poly_res, overflowable,
1189 			 (((sign == SIGNED || overflowable == -1)
1190 			   && overflow)
1191 			  | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2)));
1192 }
1193 
1194 /* Return true if binary operation OP distributes over addition in operand
1195    OPNO, with the other operand being held constant.  OPNO counts from 1.  */
1196 
1197 static bool
distributes_over_addition_p(tree_code op,int opno)1198 distributes_over_addition_p (tree_code op, int opno)
1199 {
1200   switch (op)
1201     {
1202     case PLUS_EXPR:
1203     case MINUS_EXPR:
1204     case MULT_EXPR:
1205       return true;
1206 
1207     case LSHIFT_EXPR:
1208       return opno == 1;
1209 
1210     default:
1211       return false;
1212     }
1213 }
1214 
1215 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1216    constant.  We assume ARG1 and ARG2 have the same data type, or at least
1217    are the same kind of constant and the same machine mode.  Return zero if
1218    combining the constants is not allowed in the current operating mode.  */
1219 
1220 static tree
const_binop(enum tree_code code,tree arg1,tree arg2)1221 const_binop (enum tree_code code, tree arg1, tree arg2)
1222 {
1223   /* Sanity check for the recursive cases.  */
1224   if (!arg1 || !arg2)
1225     return NULL_TREE;
1226 
1227   STRIP_NOPS (arg1);
1228   STRIP_NOPS (arg2);
1229 
1230   if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1231     {
1232       if (code == POINTER_PLUS_EXPR)
1233 	return int_const_binop (PLUS_EXPR,
1234 				arg1, fold_convert (TREE_TYPE (arg1), arg2));
1235 
1236       return int_const_binop (code, arg1, arg2);
1237     }
1238 
1239   if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1240     {
1241       machine_mode mode;
1242       REAL_VALUE_TYPE d1;
1243       REAL_VALUE_TYPE d2;
1244       REAL_VALUE_TYPE value;
1245       REAL_VALUE_TYPE result;
1246       bool inexact;
1247       tree t, type;
1248 
1249       /* The following codes are handled by real_arithmetic.  */
1250       switch (code)
1251 	{
1252 	case PLUS_EXPR:
1253 	case MINUS_EXPR:
1254 	case MULT_EXPR:
1255 	case RDIV_EXPR:
1256 	case MIN_EXPR:
1257 	case MAX_EXPR:
1258 	  break;
1259 
1260 	default:
1261 	  return NULL_TREE;
1262 	}
1263 
1264       d1 = TREE_REAL_CST (arg1);
1265       d2 = TREE_REAL_CST (arg2);
1266 
1267       type = TREE_TYPE (arg1);
1268       mode = TYPE_MODE (type);
1269 
1270       /* Don't perform operation if we honor signaling NaNs and
1271 	 either operand is a signaling NaN.  */
1272       if (HONOR_SNANS (mode)
1273 	  && (REAL_VALUE_ISSIGNALING_NAN (d1)
1274 	      || REAL_VALUE_ISSIGNALING_NAN (d2)))
1275 	return NULL_TREE;
1276 
1277       /* Don't perform operation if it would raise a division
1278 	 by zero exception.  */
1279       if (code == RDIV_EXPR
1280 	  && real_equal (&d2, &dconst0)
1281 	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1282 	return NULL_TREE;
1283 
1284       /* If either operand is a NaN, just return it.  Otherwise, set up
1285 	 for floating-point trap; we return an overflow.  */
1286       if (REAL_VALUE_ISNAN (d1))
1287       {
1288 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1289 	   is off.  */
1290 	d1.signalling = 0;
1291 	t = build_real (type, d1);
1292 	return t;
1293       }
1294       else if (REAL_VALUE_ISNAN (d2))
1295       {
1296 	/* Make resulting NaN value to be qNaN when flag_signaling_nans
1297 	   is off.  */
1298 	d2.signalling = 0;
1299 	t = build_real (type, d2);
1300 	return t;
1301       }
1302 
1303       inexact = real_arithmetic (&value, code, &d1, &d2);
1304       real_convert (&result, mode, &value);
1305 
1306       /* Don't constant fold this floating point operation if
1307 	 the result has overflowed and flag_trapping_math.  */
1308       if (flag_trapping_math
1309 	  && MODE_HAS_INFINITIES (mode)
1310 	  && REAL_VALUE_ISINF (result)
1311 	  && !REAL_VALUE_ISINF (d1)
1312 	  && !REAL_VALUE_ISINF (d2))
1313 	return NULL_TREE;
1314 
1315       /* Don't constant fold this floating point operation if the
1316 	 result may dependent upon the run-time rounding mode and
1317 	 flag_rounding_math is set, or if GCC's software emulation
1318 	 is unable to accurately represent the result.  */
1319       if ((flag_rounding_math
1320 	   || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1321 	  && (inexact || !real_identical (&result, &value)))
1322 	return NULL_TREE;
1323 
1324       t = build_real (type, result);
1325 
1326       TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1327       return t;
1328     }
1329 
1330   if (TREE_CODE (arg1) == FIXED_CST)
1331     {
1332       FIXED_VALUE_TYPE f1;
1333       FIXED_VALUE_TYPE f2;
1334       FIXED_VALUE_TYPE result;
1335       tree t, type;
1336       int sat_p;
1337       bool overflow_p;
1338 
1339       /* The following codes are handled by fixed_arithmetic.  */
1340       switch (code)
1341         {
1342 	case PLUS_EXPR:
1343 	case MINUS_EXPR:
1344 	case MULT_EXPR:
1345 	case TRUNC_DIV_EXPR:
1346 	  if (TREE_CODE (arg2) != FIXED_CST)
1347 	    return NULL_TREE;
1348 	  f2 = TREE_FIXED_CST (arg2);
1349 	  break;
1350 
1351 	case LSHIFT_EXPR:
1352 	case RSHIFT_EXPR:
1353 	  {
1354 	    if (TREE_CODE (arg2) != INTEGER_CST)
1355 	      return NULL_TREE;
1356 	    wi::tree_to_wide_ref w2 = wi::to_wide (arg2);
1357 	    f2.data.high = w2.elt (1);
1358 	    f2.data.low = w2.ulow ();
1359 	    f2.mode = SImode;
1360 	  }
1361 	  break;
1362 
1363         default:
1364 	  return NULL_TREE;
1365         }
1366 
1367       f1 = TREE_FIXED_CST (arg1);
1368       type = TREE_TYPE (arg1);
1369       sat_p = TYPE_SATURATING (type);
1370       overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1371       t = build_fixed (type, result);
1372       /* Propagate overflow flags.  */
1373       if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1374 	TREE_OVERFLOW (t) = 1;
1375       return t;
1376     }
1377 
1378   if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1379     {
1380       tree type = TREE_TYPE (arg1);
1381       tree r1 = TREE_REALPART (arg1);
1382       tree i1 = TREE_IMAGPART (arg1);
1383       tree r2 = TREE_REALPART (arg2);
1384       tree i2 = TREE_IMAGPART (arg2);
1385       tree real, imag;
1386 
1387       switch (code)
1388 	{
1389 	case PLUS_EXPR:
1390 	case MINUS_EXPR:
1391 	  real = const_binop (code, r1, r2);
1392 	  imag = const_binop (code, i1, i2);
1393 	  break;
1394 
1395 	case MULT_EXPR:
1396 	  if (COMPLEX_FLOAT_TYPE_P (type))
1397 	    return do_mpc_arg2 (arg1, arg2, type,
1398 				/* do_nonfinite= */ folding_initializer,
1399 				mpc_mul);
1400 
1401 	  real = const_binop (MINUS_EXPR,
1402 			      const_binop (MULT_EXPR, r1, r2),
1403 			      const_binop (MULT_EXPR, i1, i2));
1404 	  imag = const_binop (PLUS_EXPR,
1405 			      const_binop (MULT_EXPR, r1, i2),
1406 			      const_binop (MULT_EXPR, i1, r2));
1407 	  break;
1408 
1409 	case RDIV_EXPR:
1410 	  if (COMPLEX_FLOAT_TYPE_P (type))
1411 	    return do_mpc_arg2 (arg1, arg2, type,
1412                                 /* do_nonfinite= */ folding_initializer,
1413 				mpc_div);
1414 	  /* Fallthru. */
1415 	case TRUNC_DIV_EXPR:
1416 	case CEIL_DIV_EXPR:
1417 	case FLOOR_DIV_EXPR:
1418 	case ROUND_DIV_EXPR:
1419 	  if (flag_complex_method == 0)
1420 	  {
1421 	    /* Keep this algorithm in sync with
1422 	       tree-complex.c:expand_complex_div_straight().
1423 
1424 	       Expand complex division to scalars, straightforward algorithm.
1425 	       a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1426 	       t = br*br + bi*bi
1427 	    */
1428 	    tree magsquared
1429 	      = const_binop (PLUS_EXPR,
1430 			     const_binop (MULT_EXPR, r2, r2),
1431 			     const_binop (MULT_EXPR, i2, i2));
1432 	    tree t1
1433 	      = const_binop (PLUS_EXPR,
1434 			     const_binop (MULT_EXPR, r1, r2),
1435 			     const_binop (MULT_EXPR, i1, i2));
1436 	    tree t2
1437 	      = const_binop (MINUS_EXPR,
1438 			     const_binop (MULT_EXPR, i1, r2),
1439 			     const_binop (MULT_EXPR, r1, i2));
1440 
1441 	    real = const_binop (code, t1, magsquared);
1442 	    imag = const_binop (code, t2, magsquared);
1443 	  }
1444 	  else
1445 	  {
1446 	    /* Keep this algorithm in sync with
1447                tree-complex.c:expand_complex_div_wide().
1448 
1449 	       Expand complex division to scalars, modified algorithm to minimize
1450 	       overflow with wide input ranges.  */
1451 	    tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1452 					fold_abs_const (r2, TREE_TYPE (type)),
1453 					fold_abs_const (i2, TREE_TYPE (type)));
1454 
1455 	    if (integer_nonzerop (compare))
1456 	      {
1457 		/* In the TRUE branch, we compute
1458 		   ratio = br/bi;
1459 		   div = (br * ratio) + bi;
1460 		   tr = (ar * ratio) + ai;
1461 		   ti = (ai * ratio) - ar;
1462 		   tr = tr / div;
1463 		   ti = ti / div;  */
1464 		tree ratio = const_binop (code, r2, i2);
1465 		tree div = const_binop (PLUS_EXPR, i2,
1466 					const_binop (MULT_EXPR, r2, ratio));
1467 		real = const_binop (MULT_EXPR, r1, ratio);
1468 		real = const_binop (PLUS_EXPR, real, i1);
1469 		real = const_binop (code, real, div);
1470 
1471 		imag = const_binop (MULT_EXPR, i1, ratio);
1472 		imag = const_binop (MINUS_EXPR, imag, r1);
1473 		imag = const_binop (code, imag, div);
1474 	      }
1475 	    else
1476 	      {
1477 		/* In the FALSE branch, we compute
1478 		   ratio = d/c;
1479 		   divisor = (d * ratio) + c;
1480 		   tr = (b * ratio) + a;
1481 		   ti = b - (a * ratio);
1482 		   tr = tr / div;
1483 		   ti = ti / div;  */
1484 		tree ratio = const_binop (code, i2, r2);
1485 		tree div = const_binop (PLUS_EXPR, r2,
1486                                         const_binop (MULT_EXPR, i2, ratio));
1487 
1488 		real = const_binop (MULT_EXPR, i1, ratio);
1489 		real = const_binop (PLUS_EXPR, real, r1);
1490 		real = const_binop (code, real, div);
1491 
1492 		imag = const_binop (MULT_EXPR, r1, ratio);
1493 		imag = const_binop (MINUS_EXPR, i1, imag);
1494 		imag = const_binop (code, imag, div);
1495 	      }
1496 	  }
1497 	  break;
1498 
1499 	default:
1500 	  return NULL_TREE;
1501 	}
1502 
1503       if (real && imag)
1504 	return build_complex (type, real, imag);
1505     }
1506 
1507   if (TREE_CODE (arg1) == VECTOR_CST
1508       && TREE_CODE (arg2) == VECTOR_CST
1509       && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)),
1510 		   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2))))
1511     {
1512       tree type = TREE_TYPE (arg1);
1513       bool step_ok_p;
1514       if (VECTOR_CST_STEPPED_P (arg1)
1515 	  && VECTOR_CST_STEPPED_P (arg2))
1516 	/* We can operate directly on the encoding if:
1517 
1518 	      a3 - a2 == a2 - a1 && b3 - b2 == b2 - b1
1519 	    implies
1520 	      (a3 op b3) - (a2 op b2) == (a2 op b2) - (a1 op b1)
1521 
1522 	   Addition and subtraction are the supported operators
1523 	   for which this is true.  */
1524 	step_ok_p = (code == PLUS_EXPR || code == MINUS_EXPR);
1525       else if (VECTOR_CST_STEPPED_P (arg1))
1526 	/* We can operate directly on stepped encodings if:
1527 
1528 	     a3 - a2 == a2 - a1
1529 	   implies:
1530 	     (a3 op c) - (a2 op c) == (a2 op c) - (a1 op c)
1531 
1532 	   which is true if (x -> x op c) distributes over addition.  */
1533 	step_ok_p = distributes_over_addition_p (code, 1);
1534       else
1535 	/* Similarly in reverse.  */
1536 	step_ok_p = distributes_over_addition_p (code, 2);
1537       tree_vector_builder elts;
1538       if (!elts.new_binary_operation (type, arg1, arg2, step_ok_p))
1539 	return NULL_TREE;
1540       unsigned int count = elts.encoded_nelts ();
1541       for (unsigned int i = 0; i < count; ++i)
1542 	{
1543 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1544 	  tree elem2 = VECTOR_CST_ELT (arg2, i);
1545 
1546 	  tree elt = const_binop (code, elem1, elem2);
1547 
1548 	  /* It is possible that const_binop cannot handle the given
1549 	     code and return NULL_TREE */
1550 	  if (elt == NULL_TREE)
1551 	    return NULL_TREE;
1552 	  elts.quick_push (elt);
1553 	}
1554 
1555       return elts.build ();
1556     }
1557 
1558   /* Shifts allow a scalar offset for a vector.  */
1559   if (TREE_CODE (arg1) == VECTOR_CST
1560       && TREE_CODE (arg2) == INTEGER_CST)
1561     {
1562       tree type = TREE_TYPE (arg1);
1563       bool step_ok_p = distributes_over_addition_p (code, 1);
1564       tree_vector_builder elts;
1565       if (!elts.new_unary_operation (type, arg1, step_ok_p))
1566 	return NULL_TREE;
1567       unsigned int count = elts.encoded_nelts ();
1568       for (unsigned int i = 0; i < count; ++i)
1569 	{
1570 	  tree elem1 = VECTOR_CST_ELT (arg1, i);
1571 
1572 	  tree elt = const_binop (code, elem1, arg2);
1573 
1574 	  /* It is possible that const_binop cannot handle the given
1575 	     code and return NULL_TREE.  */
1576 	  if (elt == NULL_TREE)
1577 	    return NULL_TREE;
1578 	  elts.quick_push (elt);
1579 	}
1580 
1581       return elts.build ();
1582     }
1583   return NULL_TREE;
1584 }
1585 
1586 /* Overload that adds a TYPE parameter to be able to dispatch
1587    to fold_relational_const.  */
1588 
1589 tree
const_binop(enum tree_code code,tree type,tree arg1,tree arg2)1590 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1591 {
1592   if (TREE_CODE_CLASS (code) == tcc_comparison)
1593     return fold_relational_const (code, type, arg1, arg2);
1594 
1595   /* ???  Until we make the const_binop worker take the type of the
1596      result as argument put those cases that need it here.  */
1597   switch (code)
1598     {
1599     case VEC_SERIES_EXPR:
1600       if (CONSTANT_CLASS_P (arg1)
1601 	  && CONSTANT_CLASS_P (arg2))
1602 	return build_vec_series (type, arg1, arg2);
1603       return NULL_TREE;
1604 
1605     case COMPLEX_EXPR:
1606       if ((TREE_CODE (arg1) == REAL_CST
1607 	   && TREE_CODE (arg2) == REAL_CST)
1608 	  || (TREE_CODE (arg1) == INTEGER_CST
1609 	      && TREE_CODE (arg2) == INTEGER_CST))
1610 	return build_complex (type, arg1, arg2);
1611       return NULL_TREE;
1612 
1613     case POINTER_DIFF_EXPR:
1614       if (poly_int_tree_p (arg1) && poly_int_tree_p (arg2))
1615 	{
1616 	  poly_offset_int res = (wi::to_poly_offset (arg1)
1617 				 - wi::to_poly_offset (arg2));
1618 	  return force_fit_type (type, res, 1,
1619 				 TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1620 	}
1621       return NULL_TREE;
1622 
1623     case VEC_PACK_TRUNC_EXPR:
1624     case VEC_PACK_FIX_TRUNC_EXPR:
1625     case VEC_PACK_FLOAT_EXPR:
1626       {
1627 	unsigned int HOST_WIDE_INT out_nelts, in_nelts, i;
1628 
1629 	if (TREE_CODE (arg1) != VECTOR_CST
1630 	    || TREE_CODE (arg2) != VECTOR_CST)
1631 	  return NULL_TREE;
1632 
1633 	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1634 	  return NULL_TREE;
1635 
1636 	out_nelts = in_nelts * 2;
1637 	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1638 		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1639 
1640 	tree_vector_builder elts (type, out_nelts, 1);
1641 	for (i = 0; i < out_nelts; i++)
1642 	  {
1643 	    tree elt = (i < in_nelts
1644 			? VECTOR_CST_ELT (arg1, i)
1645 			: VECTOR_CST_ELT (arg2, i - in_nelts));
1646 	    elt = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1647 				      ? NOP_EXPR
1648 				      : code == VEC_PACK_FLOAT_EXPR
1649 				      ? FLOAT_EXPR : FIX_TRUNC_EXPR,
1650 				      TREE_TYPE (type), elt);
1651 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1652 	      return NULL_TREE;
1653 	    elts.quick_push (elt);
1654 	  }
1655 
1656 	return elts.build ();
1657       }
1658 
1659     case VEC_WIDEN_MULT_LO_EXPR:
1660     case VEC_WIDEN_MULT_HI_EXPR:
1661     case VEC_WIDEN_MULT_EVEN_EXPR:
1662     case VEC_WIDEN_MULT_ODD_EXPR:
1663       {
1664 	unsigned HOST_WIDE_INT out_nelts, in_nelts, out, ofs, scale;
1665 
1666 	if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1667 	  return NULL_TREE;
1668 
1669 	if (!VECTOR_CST_NELTS (arg1).is_constant (&in_nelts))
1670 	  return NULL_TREE;
1671 	out_nelts = in_nelts / 2;
1672 	gcc_assert (known_eq (in_nelts, VECTOR_CST_NELTS (arg2))
1673 		    && known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1674 
1675 	if (code == VEC_WIDEN_MULT_LO_EXPR)
1676 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? out_nelts : 0;
1677 	else if (code == VEC_WIDEN_MULT_HI_EXPR)
1678 	  scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : out_nelts;
1679 	else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1680 	  scale = 1, ofs = 0;
1681 	else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1682 	  scale = 1, ofs = 1;
1683 
1684 	tree_vector_builder elts (type, out_nelts, 1);
1685 	for (out = 0; out < out_nelts; out++)
1686 	  {
1687 	    unsigned int in = (out << scale) + ofs;
1688 	    tree t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1689 					  VECTOR_CST_ELT (arg1, in));
1690 	    tree t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type),
1691 					  VECTOR_CST_ELT (arg2, in));
1692 
1693 	    if (t1 == NULL_TREE || t2 == NULL_TREE)
1694 	      return NULL_TREE;
1695 	    tree elt = const_binop (MULT_EXPR, t1, t2);
1696 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1697 	      return NULL_TREE;
1698 	    elts.quick_push (elt);
1699 	  }
1700 
1701 	return elts.build ();
1702       }
1703 
1704     default:;
1705     }
1706 
1707   if (TREE_CODE_CLASS (code) != tcc_binary)
1708     return NULL_TREE;
1709 
1710   /* Make sure type and arg0 have the same saturating flag.  */
1711   gcc_checking_assert (TYPE_SATURATING (type)
1712 		       == TYPE_SATURATING (TREE_TYPE (arg1)));
1713 
1714   return const_binop (code, arg1, arg2);
1715 }
1716 
1717 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1718    Return zero if computing the constants is not possible.  */
1719 
1720 tree
const_unop(enum tree_code code,tree type,tree arg0)1721 const_unop (enum tree_code code, tree type, tree arg0)
1722 {
1723   /* Don't perform the operation, other than NEGATE and ABS, if
1724      flag_signaling_nans is on and the operand is a signaling NaN.  */
1725   if (TREE_CODE (arg0) == REAL_CST
1726       && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1727       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1728       && code != NEGATE_EXPR
1729       && code != ABS_EXPR
1730       && code != ABSU_EXPR)
1731     return NULL_TREE;
1732 
1733   switch (code)
1734     {
1735     CASE_CONVERT:
1736     case FLOAT_EXPR:
1737     case FIX_TRUNC_EXPR:
1738     case FIXED_CONVERT_EXPR:
1739       return fold_convert_const (code, type, arg0);
1740 
1741     case ADDR_SPACE_CONVERT_EXPR:
1742       /* If the source address is 0, and the source address space
1743 	 cannot have a valid object at 0, fold to dest type null.  */
1744       if (integer_zerop (arg0)
1745 	  && !(targetm.addr_space.zero_address_valid
1746 	       (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1747 	return fold_convert_const (code, type, arg0);
1748       break;
1749 
1750     case VIEW_CONVERT_EXPR:
1751       return fold_view_convert_expr (type, arg0);
1752 
1753     case NEGATE_EXPR:
1754       {
1755 	/* Can't call fold_negate_const directly here as that doesn't
1756 	   handle all cases and we might not be able to negate some
1757 	   constants.  */
1758 	tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1759 	if (tem && CONSTANT_CLASS_P (tem))
1760 	  return tem;
1761 	break;
1762       }
1763 
1764     case ABS_EXPR:
1765     case ABSU_EXPR:
1766       if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1767 	return fold_abs_const (arg0, type);
1768       break;
1769 
1770     case CONJ_EXPR:
1771       if (TREE_CODE (arg0) == COMPLEX_CST)
1772 	{
1773 	  tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1774 					  TREE_TYPE (type));
1775 	  return build_complex (type, TREE_REALPART (arg0), ipart);
1776 	}
1777       break;
1778 
1779     case BIT_NOT_EXPR:
1780       if (TREE_CODE (arg0) == INTEGER_CST)
1781 	return fold_not_const (arg0, type);
1782       else if (POLY_INT_CST_P (arg0))
1783 	return wide_int_to_tree (type, -poly_int_cst_value (arg0));
1784       /* Perform BIT_NOT_EXPR on each element individually.  */
1785       else if (TREE_CODE (arg0) == VECTOR_CST)
1786 	{
1787 	  tree elem;
1788 
1789 	  /* This can cope with stepped encodings because ~x == -1 - x.  */
1790 	  tree_vector_builder elements;
1791 	  elements.new_unary_operation (type, arg0, true);
1792 	  unsigned int i, count = elements.encoded_nelts ();
1793 	  for (i = 0; i < count; ++i)
1794 	    {
1795 	      elem = VECTOR_CST_ELT (arg0, i);
1796 	      elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1797 	      if (elem == NULL_TREE)
1798 		break;
1799 	      elements.quick_push (elem);
1800 	    }
1801 	  if (i == count)
1802 	    return elements.build ();
1803 	}
1804       break;
1805 
1806     case TRUTH_NOT_EXPR:
1807       if (TREE_CODE (arg0) == INTEGER_CST)
1808 	return constant_boolean_node (integer_zerop (arg0), type);
1809       break;
1810 
1811     case REALPART_EXPR:
1812       if (TREE_CODE (arg0) == COMPLEX_CST)
1813 	return fold_convert (type, TREE_REALPART (arg0));
1814       break;
1815 
1816     case IMAGPART_EXPR:
1817       if (TREE_CODE (arg0) == COMPLEX_CST)
1818 	return fold_convert (type, TREE_IMAGPART (arg0));
1819       break;
1820 
1821     case VEC_UNPACK_LO_EXPR:
1822     case VEC_UNPACK_HI_EXPR:
1823     case VEC_UNPACK_FLOAT_LO_EXPR:
1824     case VEC_UNPACK_FLOAT_HI_EXPR:
1825     case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
1826     case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
1827       {
1828 	unsigned HOST_WIDE_INT out_nelts, in_nelts, i;
1829 	enum tree_code subcode;
1830 
1831 	if (TREE_CODE (arg0) != VECTOR_CST)
1832 	  return NULL_TREE;
1833 
1834 	if (!VECTOR_CST_NELTS (arg0).is_constant (&in_nelts))
1835 	  return NULL_TREE;
1836 	out_nelts = in_nelts / 2;
1837 	gcc_assert (known_eq (out_nelts, TYPE_VECTOR_SUBPARTS (type)));
1838 
1839 	unsigned int offset = 0;
1840 	if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1841 				   || code == VEC_UNPACK_FLOAT_LO_EXPR
1842 				   || code == VEC_UNPACK_FIX_TRUNC_LO_EXPR))
1843 	  offset = out_nelts;
1844 
1845 	if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1846 	  subcode = NOP_EXPR;
1847 	else if (code == VEC_UNPACK_FLOAT_LO_EXPR
1848 		 || code == VEC_UNPACK_FLOAT_HI_EXPR)
1849 	  subcode = FLOAT_EXPR;
1850 	else
1851 	  subcode = FIX_TRUNC_EXPR;
1852 
1853 	tree_vector_builder elts (type, out_nelts, 1);
1854 	for (i = 0; i < out_nelts; i++)
1855 	  {
1856 	    tree elt = fold_convert_const (subcode, TREE_TYPE (type),
1857 					   VECTOR_CST_ELT (arg0, i + offset));
1858 	    if (elt == NULL_TREE || !CONSTANT_CLASS_P (elt))
1859 	      return NULL_TREE;
1860 	    elts.quick_push (elt);
1861 	  }
1862 
1863 	return elts.build ();
1864       }
1865 
1866     case VEC_DUPLICATE_EXPR:
1867       if (CONSTANT_CLASS_P (arg0))
1868 	return build_vector_from_val (type, arg0);
1869       return NULL_TREE;
1870 
1871     default:
1872       break;
1873     }
1874 
1875   return NULL_TREE;
1876 }
1877 
1878 /* Create a sizetype INT_CST node with NUMBER sign extended.  KIND
1879    indicates which particular sizetype to create.  */
1880 
1881 tree
size_int_kind(poly_int64 number,enum size_type_kind kind)1882 size_int_kind (poly_int64 number, enum size_type_kind kind)
1883 {
1884   return build_int_cst (sizetype_tab[(int) kind], number);
1885 }
1886 
1887 /* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1888    is a tree code.  The type of the result is taken from the operands.
1889    Both must be equivalent integer types, ala int_binop_types_match_p.
1890    If the operands are constant, so is the result.  */
1891 
1892 tree
size_binop_loc(location_t loc,enum tree_code code,tree arg0,tree arg1)1893 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1894 {
1895   tree type = TREE_TYPE (arg0);
1896 
1897   if (arg0 == error_mark_node || arg1 == error_mark_node)
1898     return error_mark_node;
1899 
1900   gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1901                                        TREE_TYPE (arg1)));
1902 
1903   /* Handle the special case of two poly_int constants faster.  */
1904   if (poly_int_tree_p (arg0) && poly_int_tree_p (arg1))
1905     {
1906       /* And some specific cases even faster than that.  */
1907       if (code == PLUS_EXPR)
1908 	{
1909 	  if (integer_zerop (arg0)
1910 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1911 	    return arg1;
1912 	  if (integer_zerop (arg1)
1913 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1914 	    return arg0;
1915 	}
1916       else if (code == MINUS_EXPR)
1917 	{
1918 	  if (integer_zerop (arg1)
1919 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg1)))
1920 	    return arg0;
1921 	}
1922       else if (code == MULT_EXPR)
1923 	{
1924 	  if (integer_onep (arg0)
1925 	      && !TREE_OVERFLOW (tree_strip_any_location_wrapper (arg0)))
1926 	    return arg1;
1927 	}
1928 
1929       /* Handle general case of two integer constants.  For sizetype
1930          constant calculations we always want to know about overflow,
1931 	 even in the unsigned case.  */
1932       tree res = int_const_binop (code, arg0, arg1, -1);
1933       if (res != NULL_TREE)
1934 	return res;
1935     }
1936 
1937   return fold_build2_loc (loc, code, type, arg0, arg1);
1938 }
1939 
1940 /* Given two values, either both of sizetype or both of bitsizetype,
1941    compute the difference between the two values.  Return the value
1942    in signed type corresponding to the type of the operands.  */
1943 
1944 tree
size_diffop_loc(location_t loc,tree arg0,tree arg1)1945 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1946 {
1947   tree type = TREE_TYPE (arg0);
1948   tree ctype;
1949 
1950   gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1951 				       TREE_TYPE (arg1)));
1952 
1953   /* If the type is already signed, just do the simple thing.  */
1954   if (!TYPE_UNSIGNED (type))
1955     return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1956 
1957   if (type == sizetype)
1958     ctype = ssizetype;
1959   else if (type == bitsizetype)
1960     ctype = sbitsizetype;
1961   else
1962     ctype = signed_type_for (type);
1963 
1964   /* If either operand is not a constant, do the conversions to the signed
1965      type and subtract.  The hardware will do the right thing with any
1966      overflow in the subtraction.  */
1967   if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1968     return size_binop_loc (loc, MINUS_EXPR,
1969 			   fold_convert_loc (loc, ctype, arg0),
1970 			   fold_convert_loc (loc, ctype, arg1));
1971 
1972   /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1973      Otherwise, subtract the other way, convert to CTYPE (we know that can't
1974      overflow) and negate (which can't either).  Special-case a result
1975      of zero while we're here.  */
1976   if (tree_int_cst_equal (arg0, arg1))
1977     return build_int_cst (ctype, 0);
1978   else if (tree_int_cst_lt (arg1, arg0))
1979     return fold_convert_loc (loc, ctype,
1980 			     size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1981   else
1982     return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1983 			   fold_convert_loc (loc, ctype,
1984 					     size_binop_loc (loc,
1985 							     MINUS_EXPR,
1986 							     arg1, arg0)));
1987 }
1988 
1989 /* A subroutine of fold_convert_const handling conversions of an
1990    INTEGER_CST to another integer type.  */
1991 
1992 static tree
fold_convert_const_int_from_int(tree type,const_tree arg1)1993 fold_convert_const_int_from_int (tree type, const_tree arg1)
1994 {
1995   /* Given an integer constant, make new constant with new type,
1996      appropriately sign-extended or truncated.  Use widest_int
1997      so that any extension is done according ARG1's type.  */
1998   return force_fit_type (type, wi::to_widest (arg1),
1999 			 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2000 			 TREE_OVERFLOW (arg1));
2001 }
2002 
2003 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2004    to an integer type.  */
2005 
2006 static tree
fold_convert_const_int_from_real(enum tree_code code,tree type,const_tree arg1)2007 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2008 {
2009   bool overflow = false;
2010   tree t;
2011 
2012   /* The following code implements the floating point to integer
2013      conversion rules required by the Java Language Specification,
2014      that IEEE NaNs are mapped to zero and values that overflow
2015      the target precision saturate, i.e. values greater than
2016      INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2017      are mapped to INT_MIN.  These semantics are allowed by the
2018      C and C++ standards that simply state that the behavior of
2019      FP-to-integer conversion is unspecified upon overflow.  */
2020 
2021   wide_int val;
2022   REAL_VALUE_TYPE r;
2023   REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2024 
2025   switch (code)
2026     {
2027     case FIX_TRUNC_EXPR:
2028       real_trunc (&r, VOIDmode, &x);
2029       break;
2030 
2031     default:
2032       gcc_unreachable ();
2033     }
2034 
2035   /* If R is NaN, return zero and show we have an overflow.  */
2036   if (REAL_VALUE_ISNAN (r))
2037     {
2038       overflow = true;
2039       val = wi::zero (TYPE_PRECISION (type));
2040     }
2041 
2042   /* See if R is less than the lower bound or greater than the
2043      upper bound.  */
2044 
2045   if (! overflow)
2046     {
2047       tree lt = TYPE_MIN_VALUE (type);
2048       REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2049       if (real_less (&r, &l))
2050 	{
2051 	  overflow = true;
2052 	  val = wi::to_wide (lt);
2053 	}
2054     }
2055 
2056   if (! overflow)
2057     {
2058       tree ut = TYPE_MAX_VALUE (type);
2059       if (ut)
2060 	{
2061 	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2062 	  if (real_less (&u, &r))
2063 	    {
2064 	      overflow = true;
2065 	      val = wi::to_wide (ut);
2066 	    }
2067 	}
2068     }
2069 
2070   if (! overflow)
2071     val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
2072 
2073   t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
2074   return t;
2075 }
2076 
2077 /* A subroutine of fold_convert_const handling conversions of a
2078    FIXED_CST to an integer type.  */
2079 
2080 static tree
fold_convert_const_int_from_fixed(tree type,const_tree arg1)2081 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2082 {
2083   tree t;
2084   double_int temp, temp_trunc;
2085   scalar_mode mode;
2086 
2087   /* Right shift FIXED_CST to temp by fbit.  */
2088   temp = TREE_FIXED_CST (arg1).data;
2089   mode = TREE_FIXED_CST (arg1).mode;
2090   if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
2091     {
2092       temp = temp.rshift (GET_MODE_FBIT (mode),
2093 			  HOST_BITS_PER_DOUBLE_INT,
2094 			  SIGNED_FIXED_POINT_MODE_P (mode));
2095 
2096       /* Left shift temp to temp_trunc by fbit.  */
2097       temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
2098 				HOST_BITS_PER_DOUBLE_INT,
2099 				SIGNED_FIXED_POINT_MODE_P (mode));
2100     }
2101   else
2102     {
2103       temp = double_int_zero;
2104       temp_trunc = double_int_zero;
2105     }
2106 
2107   /* If FIXED_CST is negative, we need to round the value toward 0.
2108      By checking if the fractional bits are not zero to add 1 to temp.  */
2109   if (SIGNED_FIXED_POINT_MODE_P (mode)
2110       && temp_trunc.is_negative ()
2111       && TREE_FIXED_CST (arg1).data != temp_trunc)
2112     temp += double_int_one;
2113 
2114   /* Given a fixed-point constant, make new constant with new type,
2115      appropriately sign-extended or truncated.  */
2116   t = force_fit_type (type, temp, -1,
2117 		      (temp.is_negative ()
2118 		       && (TYPE_UNSIGNED (type)
2119 			   < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2120 		      | TREE_OVERFLOW (arg1));
2121 
2122   return t;
2123 }
2124 
2125 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2126    to another floating point type.  */
2127 
2128 static tree
fold_convert_const_real_from_real(tree type,const_tree arg1)2129 fold_convert_const_real_from_real (tree type, const_tree arg1)
2130 {
2131   REAL_VALUE_TYPE value;
2132   tree t;
2133 
2134   /* Don't perform the operation if flag_signaling_nans is on
2135      and the operand is a signaling NaN.  */
2136   if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
2137       && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
2138     return NULL_TREE;
2139 
2140   real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2141   t = build_real (type, value);
2142 
2143   /* If converting an infinity or NAN to a representation that doesn't
2144      have one, set the overflow bit so that we can produce some kind of
2145      error message at the appropriate point if necessary.  It's not the
2146      most user-friendly message, but it's better than nothing.  */
2147   if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2148       && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2149     TREE_OVERFLOW (t) = 1;
2150   else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2151 	   && !MODE_HAS_NANS (TYPE_MODE (type)))
2152     TREE_OVERFLOW (t) = 1;
2153   /* Regular overflow, conversion produced an infinity in a mode that
2154      can't represent them.  */
2155   else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2156 	   && REAL_VALUE_ISINF (value)
2157 	   && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2158     TREE_OVERFLOW (t) = 1;
2159   else
2160     TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2161   return t;
2162 }
2163 
2164 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2165    to a floating point type.  */
2166 
2167 static tree
fold_convert_const_real_from_fixed(tree type,const_tree arg1)2168 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2169 {
2170   REAL_VALUE_TYPE value;
2171   tree t;
2172 
2173   real_convert_from_fixed (&value, SCALAR_FLOAT_TYPE_MODE (type),
2174 			   &TREE_FIXED_CST (arg1));
2175   t = build_real (type, value);
2176 
2177   TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2178   return t;
2179 }
2180 
2181 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2182    to another fixed-point type.  */
2183 
2184 static tree
fold_convert_const_fixed_from_fixed(tree type,const_tree arg1)2185 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2186 {
2187   FIXED_VALUE_TYPE value;
2188   tree t;
2189   bool overflow_p;
2190 
2191   overflow_p = fixed_convert (&value, SCALAR_TYPE_MODE (type),
2192 			      &TREE_FIXED_CST (arg1), TYPE_SATURATING (type));
2193   t = build_fixed (type, value);
2194 
2195   /* Propagate overflow flags.  */
2196   if (overflow_p | TREE_OVERFLOW (arg1))
2197     TREE_OVERFLOW (t) = 1;
2198   return t;
2199 }
2200 
2201 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2202    to a fixed-point type.  */
2203 
2204 static tree
fold_convert_const_fixed_from_int(tree type,const_tree arg1)2205 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2206 {
2207   FIXED_VALUE_TYPE value;
2208   tree t;
2209   bool overflow_p;
2210   double_int di;
2211 
2212   gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2213 
2214   di.low = TREE_INT_CST_ELT (arg1, 0);
2215   if (TREE_INT_CST_NUNITS (arg1) == 1)
2216     di.high = (HOST_WIDE_INT) di.low < 0 ? HOST_WIDE_INT_M1 : 0;
2217   else
2218     di.high = TREE_INT_CST_ELT (arg1, 1);
2219 
2220   overflow_p = fixed_convert_from_int (&value, SCALAR_TYPE_MODE (type), di,
2221 				       TYPE_UNSIGNED (TREE_TYPE (arg1)),
2222 				       TYPE_SATURATING (type));
2223   t = build_fixed (type, value);
2224 
2225   /* Propagate overflow flags.  */
2226   if (overflow_p | TREE_OVERFLOW (arg1))
2227     TREE_OVERFLOW (t) = 1;
2228   return t;
2229 }
2230 
2231 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2232    to a fixed-point type.  */
2233 
2234 static tree
fold_convert_const_fixed_from_real(tree type,const_tree arg1)2235 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2236 {
2237   FIXED_VALUE_TYPE value;
2238   tree t;
2239   bool overflow_p;
2240 
2241   overflow_p = fixed_convert_from_real (&value, SCALAR_TYPE_MODE (type),
2242 					&TREE_REAL_CST (arg1),
2243 					TYPE_SATURATING (type));
2244   t = build_fixed (type, value);
2245 
2246   /* Propagate overflow flags.  */
2247   if (overflow_p | TREE_OVERFLOW (arg1))
2248     TREE_OVERFLOW (t) = 1;
2249   return t;
2250 }
2251 
2252 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2253    type TYPE.  If no simplification can be done return NULL_TREE.  */
2254 
2255 static tree
fold_convert_const(enum tree_code code,tree type,tree arg1)2256 fold_convert_const (enum tree_code code, tree type, tree arg1)
2257 {
2258   tree arg_type = TREE_TYPE (arg1);
2259   if (arg_type == type)
2260     return arg1;
2261 
2262   /* We can't widen types, since the runtime value could overflow the
2263      original type before being extended to the new type.  */
2264   if (POLY_INT_CST_P (arg1)
2265       && (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2266       && TYPE_PRECISION (type) <= TYPE_PRECISION (arg_type))
2267     return build_poly_int_cst (type,
2268 			       poly_wide_int::from (poly_int_cst_value (arg1),
2269 						    TYPE_PRECISION (type),
2270 						    TYPE_SIGN (arg_type)));
2271 
2272   if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2273       || TREE_CODE (type) == OFFSET_TYPE)
2274     {
2275       if (TREE_CODE (arg1) == INTEGER_CST)
2276 	return fold_convert_const_int_from_int (type, arg1);
2277       else if (TREE_CODE (arg1) == REAL_CST)
2278 	return fold_convert_const_int_from_real (code, type, arg1);
2279       else if (TREE_CODE (arg1) == FIXED_CST)
2280 	return fold_convert_const_int_from_fixed (type, arg1);
2281     }
2282   else if (TREE_CODE (type) == REAL_TYPE)
2283     {
2284       if (TREE_CODE (arg1) == INTEGER_CST)
2285 	return build_real_from_int_cst (type, arg1);
2286       else if (TREE_CODE (arg1) == REAL_CST)
2287 	return fold_convert_const_real_from_real (type, arg1);
2288       else if (TREE_CODE (arg1) == FIXED_CST)
2289 	return fold_convert_const_real_from_fixed (type, arg1);
2290     }
2291   else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2292     {
2293       if (TREE_CODE (arg1) == FIXED_CST)
2294 	return fold_convert_const_fixed_from_fixed (type, arg1);
2295       else if (TREE_CODE (arg1) == INTEGER_CST)
2296 	return fold_convert_const_fixed_from_int (type, arg1);
2297       else if (TREE_CODE (arg1) == REAL_CST)
2298 	return fold_convert_const_fixed_from_real (type, arg1);
2299     }
2300   else if (TREE_CODE (type) == VECTOR_TYPE)
2301     {
2302       if (TREE_CODE (arg1) == VECTOR_CST
2303 	  && known_eq (TYPE_VECTOR_SUBPARTS (type), VECTOR_CST_NELTS (arg1)))
2304 	{
2305 	  tree elttype = TREE_TYPE (type);
2306 	  tree arg1_elttype = TREE_TYPE (TREE_TYPE (arg1));
2307 	  /* We can't handle steps directly when extending, since the
2308 	     values need to wrap at the original precision first.  */
2309 	  bool step_ok_p
2310 	    = (INTEGRAL_TYPE_P (elttype)
2311 	       && INTEGRAL_TYPE_P (arg1_elttype)
2312 	       && TYPE_PRECISION (elttype) <= TYPE_PRECISION (arg1_elttype));
2313 	  tree_vector_builder v;
2314 	  if (!v.new_unary_operation (type, arg1, step_ok_p))
2315 	    return NULL_TREE;
2316 	  unsigned int len = v.encoded_nelts ();
2317 	  for (unsigned int i = 0; i < len; ++i)
2318 	    {
2319 	      tree elt = VECTOR_CST_ELT (arg1, i);
2320 	      tree cvt = fold_convert_const (code, elttype, elt);
2321 	      if (cvt == NULL_TREE)
2322 		return NULL_TREE;
2323 	      v.quick_push (cvt);
2324 	    }
2325 	  return v.build ();
2326 	}
2327     }
2328   return NULL_TREE;
2329 }
2330 
2331 /* Construct a vector of zero elements of vector type TYPE.  */
2332 
2333 static tree
build_zero_vector(tree type)2334 build_zero_vector (tree type)
2335 {
2336   tree t;
2337 
2338   t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2339   return build_vector_from_val (type, t);
2340 }
2341 
2342 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR.  */
2343 
2344 bool
fold_convertible_p(const_tree type,const_tree arg)2345 fold_convertible_p (const_tree type, const_tree arg)
2346 {
2347   tree orig = TREE_TYPE (arg);
2348 
2349   if (type == orig)
2350     return true;
2351 
2352   if (TREE_CODE (arg) == ERROR_MARK
2353       || TREE_CODE (type) == ERROR_MARK
2354       || TREE_CODE (orig) == ERROR_MARK)
2355     return false;
2356 
2357   if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2358     return true;
2359 
2360   switch (TREE_CODE (type))
2361     {
2362     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2363     case POINTER_TYPE: case REFERENCE_TYPE:
2364     case OFFSET_TYPE:
2365       return (INTEGRAL_TYPE_P (orig)
2366 	      || (POINTER_TYPE_P (orig)
2367 		  && TYPE_PRECISION (type) <= TYPE_PRECISION (orig))
2368 	      || TREE_CODE (orig) == OFFSET_TYPE);
2369 
2370     case REAL_TYPE:
2371     case FIXED_POINT_TYPE:
2372     case VOID_TYPE:
2373       return TREE_CODE (type) == TREE_CODE (orig);
2374 
2375     case VECTOR_TYPE:
2376       return (VECTOR_TYPE_P (orig)
2377 	      && known_eq (TYPE_VECTOR_SUBPARTS (type),
2378 			   TYPE_VECTOR_SUBPARTS (orig))
2379 	      && fold_convertible_p (TREE_TYPE (type), TREE_TYPE (orig)));
2380 
2381     default:
2382       return false;
2383     }
2384 }
2385 
2386 /* Convert expression ARG to type TYPE.  Used by the middle-end for
2387    simple conversions in preference to calling the front-end's convert.  */
2388 
2389 tree
fold_convert_loc(location_t loc,tree type,tree arg)2390 fold_convert_loc (location_t loc, tree type, tree arg)
2391 {
2392   tree orig = TREE_TYPE (arg);
2393   tree tem;
2394 
2395   if (type == orig)
2396     return arg;
2397 
2398   if (TREE_CODE (arg) == ERROR_MARK
2399       || TREE_CODE (type) == ERROR_MARK
2400       || TREE_CODE (orig) == ERROR_MARK)
2401     return error_mark_node;
2402 
2403   switch (TREE_CODE (type))
2404     {
2405     case POINTER_TYPE:
2406     case REFERENCE_TYPE:
2407       /* Handle conversions between pointers to different address spaces.  */
2408       if (POINTER_TYPE_P (orig)
2409 	  && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2410 	      != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2411 	return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2412       /* fall through */
2413 
2414     case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2415     case OFFSET_TYPE:
2416       if (TREE_CODE (arg) == INTEGER_CST)
2417 	{
2418 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2419 	  if (tem != NULL_TREE)
2420 	    return tem;
2421 	}
2422       if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2423 	  || TREE_CODE (orig) == OFFSET_TYPE)
2424 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2425       if (TREE_CODE (orig) == COMPLEX_TYPE)
2426 	return fold_convert_loc (loc, type,
2427 				 fold_build1_loc (loc, REALPART_EXPR,
2428 						  TREE_TYPE (orig), arg));
2429       gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2430 		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2431       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2432 
2433     case REAL_TYPE:
2434       if (TREE_CODE (arg) == INTEGER_CST)
2435 	{
2436 	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
2437 	  if (tem != NULL_TREE)
2438 	    return tem;
2439 	}
2440       else if (TREE_CODE (arg) == REAL_CST)
2441 	{
2442 	  tem = fold_convert_const (NOP_EXPR, type, arg);
2443 	  if (tem != NULL_TREE)
2444 	    return tem;
2445 	}
2446       else if (TREE_CODE (arg) == FIXED_CST)
2447 	{
2448 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2449 	  if (tem != NULL_TREE)
2450 	    return tem;
2451 	}
2452 
2453       switch (TREE_CODE (orig))
2454 	{
2455 	case INTEGER_TYPE:
2456 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2457 	case POINTER_TYPE: case REFERENCE_TYPE:
2458 	  return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2459 
2460 	case REAL_TYPE:
2461 	  return fold_build1_loc (loc, NOP_EXPR, type, arg);
2462 
2463 	case FIXED_POINT_TYPE:
2464 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2465 
2466 	case COMPLEX_TYPE:
2467 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2468 	  return fold_convert_loc (loc, type, tem);
2469 
2470 	default:
2471 	  gcc_unreachable ();
2472 	}
2473 
2474     case FIXED_POINT_TYPE:
2475       if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2476 	  || TREE_CODE (arg) == REAL_CST)
2477 	{
2478 	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2479 	  if (tem != NULL_TREE)
2480 	    goto fold_convert_exit;
2481 	}
2482 
2483       switch (TREE_CODE (orig))
2484 	{
2485 	case FIXED_POINT_TYPE:
2486 	case INTEGER_TYPE:
2487 	case ENUMERAL_TYPE:
2488 	case BOOLEAN_TYPE:
2489 	case REAL_TYPE:
2490 	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2491 
2492 	case COMPLEX_TYPE:
2493 	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2494 	  return fold_convert_loc (loc, type, tem);
2495 
2496 	default:
2497 	  gcc_unreachable ();
2498 	}
2499 
2500     case COMPLEX_TYPE:
2501       switch (TREE_CODE (orig))
2502 	{
2503 	case INTEGER_TYPE:
2504 	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2505 	case POINTER_TYPE: case REFERENCE_TYPE:
2506 	case REAL_TYPE:
2507 	case FIXED_POINT_TYPE:
2508 	  return fold_build2_loc (loc, COMPLEX_EXPR, type,
2509 			      fold_convert_loc (loc, TREE_TYPE (type), arg),
2510 			      fold_convert_loc (loc, TREE_TYPE (type),
2511 					    integer_zero_node));
2512 	case COMPLEX_TYPE:
2513 	  {
2514 	    tree rpart, ipart;
2515 
2516 	    if (TREE_CODE (arg) == COMPLEX_EXPR)
2517 	      {
2518 		rpart = fold_convert_loc (loc, TREE_TYPE (type),
2519 				      TREE_OPERAND (arg, 0));
2520 		ipart = fold_convert_loc (loc, TREE_TYPE (type),
2521 				      TREE_OPERAND (arg, 1));
2522 		return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2523 	      }
2524 
2525 	    arg = save_expr (arg);
2526 	    rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2527 	    ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2528 	    rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2529 	    ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2530 	    return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2531 	  }
2532 
2533 	default:
2534 	  gcc_unreachable ();
2535 	}
2536 
2537     case VECTOR_TYPE:
2538       if (integer_zerop (arg))
2539 	return build_zero_vector (type);
2540       gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2541       gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2542 		  || TREE_CODE (orig) == VECTOR_TYPE);
2543       return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2544 
2545     case VOID_TYPE:
2546       tem = fold_ignored_result (arg);
2547       return fold_build1_loc (loc, NOP_EXPR, type, tem);
2548 
2549     default:
2550       if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2551 	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2552       gcc_unreachable ();
2553     }
2554  fold_convert_exit:
2555   protected_set_expr_location_unshare (tem, loc);
2556   return tem;
2557 }
2558 
2559 /* Return false if expr can be assumed not to be an lvalue, true
2560    otherwise.  */
2561 
2562 static bool
maybe_lvalue_p(const_tree x)2563 maybe_lvalue_p (const_tree x)
2564 {
2565   /* We only need to wrap lvalue tree codes.  */
2566   switch (TREE_CODE (x))
2567   {
2568   case VAR_DECL:
2569   case PARM_DECL:
2570   case RESULT_DECL:
2571   case LABEL_DECL:
2572   case FUNCTION_DECL:
2573   case SSA_NAME:
2574 
2575   case COMPONENT_REF:
2576   case MEM_REF:
2577   case INDIRECT_REF:
2578   case ARRAY_REF:
2579   case ARRAY_RANGE_REF:
2580   case BIT_FIELD_REF:
2581   case OBJ_TYPE_REF:
2582 
2583   case REALPART_EXPR:
2584   case IMAGPART_EXPR:
2585   case PREINCREMENT_EXPR:
2586   case PREDECREMENT_EXPR:
2587   case SAVE_EXPR:
2588   case TRY_CATCH_EXPR:
2589   case WITH_CLEANUP_EXPR:
2590   case COMPOUND_EXPR:
2591   case MODIFY_EXPR:
2592   case TARGET_EXPR:
2593   case COND_EXPR:
2594   case BIND_EXPR:
2595   case VIEW_CONVERT_EXPR:
2596     break;
2597 
2598   default:
2599     /* Assume the worst for front-end tree codes.  */
2600     if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2601       break;
2602     return false;
2603   }
2604 
2605   return true;
2606 }
2607 
2608 /* Return an expr equal to X but certainly not valid as an lvalue.  */
2609 
2610 tree
non_lvalue_loc(location_t loc,tree x)2611 non_lvalue_loc (location_t loc, tree x)
2612 {
2613   /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2614      us.  */
2615   if (in_gimple_form)
2616     return x;
2617 
2618   if (! maybe_lvalue_p (x))
2619     return x;
2620   return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2621 }
2622 
2623 /* When pedantic, return an expr equal to X but certainly not valid as a
2624    pedantic lvalue.  Otherwise, return X.  */
2625 
2626 static tree
pedantic_non_lvalue_loc(location_t loc,tree x)2627 pedantic_non_lvalue_loc (location_t loc, tree x)
2628 {
2629   return protected_set_expr_location_unshare (x, loc);
2630 }
2631 
2632 /* Given a tree comparison code, return the code that is the logical inverse.
2633    It is generally not safe to do this for floating-point comparisons, except
2634    for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2635    ERROR_MARK in this case.  */
2636 
2637 enum tree_code
invert_tree_comparison(enum tree_code code,bool honor_nans)2638 invert_tree_comparison (enum tree_code code, bool honor_nans)
2639 {
2640   if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2641       && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2642     return ERROR_MARK;
2643 
2644   switch (code)
2645     {
2646     case EQ_EXPR:
2647       return NE_EXPR;
2648     case NE_EXPR:
2649       return EQ_EXPR;
2650     case GT_EXPR:
2651       return honor_nans ? UNLE_EXPR : LE_EXPR;
2652     case GE_EXPR:
2653       return honor_nans ? UNLT_EXPR : LT_EXPR;
2654     case LT_EXPR:
2655       return honor_nans ? UNGE_EXPR : GE_EXPR;
2656     case LE_EXPR:
2657       return honor_nans ? UNGT_EXPR : GT_EXPR;
2658     case LTGT_EXPR:
2659       return UNEQ_EXPR;
2660     case UNEQ_EXPR:
2661       return LTGT_EXPR;
2662     case UNGT_EXPR:
2663       return LE_EXPR;
2664     case UNGE_EXPR:
2665       return LT_EXPR;
2666     case UNLT_EXPR:
2667       return GE_EXPR;
2668     case UNLE_EXPR:
2669       return GT_EXPR;
2670     case ORDERED_EXPR:
2671       return UNORDERED_EXPR;
2672     case UNORDERED_EXPR:
2673       return ORDERED_EXPR;
2674     default:
2675       gcc_unreachable ();
2676     }
2677 }
2678 
2679 /* Similar, but return the comparison that results if the operands are
2680    swapped.  This is safe for floating-point.  */
2681 
2682 enum tree_code
swap_tree_comparison(enum tree_code code)2683 swap_tree_comparison (enum tree_code code)
2684 {
2685   switch (code)
2686     {
2687     case EQ_EXPR:
2688     case NE_EXPR:
2689     case ORDERED_EXPR:
2690     case UNORDERED_EXPR:
2691     case LTGT_EXPR:
2692     case UNEQ_EXPR:
2693       return code;
2694     case GT_EXPR:
2695       return LT_EXPR;
2696     case GE_EXPR:
2697       return LE_EXPR;
2698     case LT_EXPR:
2699       return GT_EXPR;
2700     case LE_EXPR:
2701       return GE_EXPR;
2702     case UNGT_EXPR:
2703       return UNLT_EXPR;
2704     case UNGE_EXPR:
2705       return UNLE_EXPR;
2706     case UNLT_EXPR:
2707       return UNGT_EXPR;
2708     case UNLE_EXPR:
2709       return UNGE_EXPR;
2710     default:
2711       gcc_unreachable ();
2712     }
2713 }
2714 
2715 
2716 /* Convert a comparison tree code from an enum tree_code representation
2717    into a compcode bit-based encoding.  This function is the inverse of
2718    compcode_to_comparison.  */
2719 
2720 static enum comparison_code
comparison_to_compcode(enum tree_code code)2721 comparison_to_compcode (enum tree_code code)
2722 {
2723   switch (code)
2724     {
2725     case LT_EXPR:
2726       return COMPCODE_LT;
2727     case EQ_EXPR:
2728       return COMPCODE_EQ;
2729     case LE_EXPR:
2730       return COMPCODE_LE;
2731     case GT_EXPR:
2732       return COMPCODE_GT;
2733     case NE_EXPR:
2734       return COMPCODE_NE;
2735     case GE_EXPR:
2736       return COMPCODE_GE;
2737     case ORDERED_EXPR:
2738       return COMPCODE_ORD;
2739     case UNORDERED_EXPR:
2740       return COMPCODE_UNORD;
2741     case UNLT_EXPR:
2742       return COMPCODE_UNLT;
2743     case UNEQ_EXPR:
2744       return COMPCODE_UNEQ;
2745     case UNLE_EXPR:
2746       return COMPCODE_UNLE;
2747     case UNGT_EXPR:
2748       return COMPCODE_UNGT;
2749     case LTGT_EXPR:
2750       return COMPCODE_LTGT;
2751     case UNGE_EXPR:
2752       return COMPCODE_UNGE;
2753     default:
2754       gcc_unreachable ();
2755     }
2756 }
2757 
2758 /* Convert a compcode bit-based encoding of a comparison operator back
2759    to GCC's enum tree_code representation.  This function is the
2760    inverse of comparison_to_compcode.  */
2761 
2762 static enum tree_code
compcode_to_comparison(enum comparison_code code)2763 compcode_to_comparison (enum comparison_code code)
2764 {
2765   switch (code)
2766     {
2767     case COMPCODE_LT:
2768       return LT_EXPR;
2769     case COMPCODE_EQ:
2770       return EQ_EXPR;
2771     case COMPCODE_LE:
2772       return LE_EXPR;
2773     case COMPCODE_GT:
2774       return GT_EXPR;
2775     case COMPCODE_NE:
2776       return NE_EXPR;
2777     case COMPCODE_GE:
2778       return GE_EXPR;
2779     case COMPCODE_ORD:
2780       return ORDERED_EXPR;
2781     case COMPCODE_UNORD:
2782       return UNORDERED_EXPR;
2783     case COMPCODE_UNLT:
2784       return UNLT_EXPR;
2785     case COMPCODE_UNEQ:
2786       return UNEQ_EXPR;
2787     case COMPCODE_UNLE:
2788       return UNLE_EXPR;
2789     case COMPCODE_UNGT:
2790       return UNGT_EXPR;
2791     case COMPCODE_LTGT:
2792       return LTGT_EXPR;
2793     case COMPCODE_UNGE:
2794       return UNGE_EXPR;
2795     default:
2796       gcc_unreachable ();
2797     }
2798 }
2799 
2800 /* Return true if COND1 tests the opposite condition of COND2.  */
2801 
2802 bool
inverse_conditions_p(const_tree cond1,const_tree cond2)2803 inverse_conditions_p (const_tree cond1, const_tree cond2)
2804 {
2805   return (COMPARISON_CLASS_P (cond1)
2806 	  && COMPARISON_CLASS_P (cond2)
2807 	  && (invert_tree_comparison
2808 	      (TREE_CODE (cond1),
2809 	       HONOR_NANS (TREE_OPERAND (cond1, 0))) == TREE_CODE (cond2))
2810 	  && operand_equal_p (TREE_OPERAND (cond1, 0),
2811 			      TREE_OPERAND (cond2, 0), 0)
2812 	  && operand_equal_p (TREE_OPERAND (cond1, 1),
2813 			      TREE_OPERAND (cond2, 1), 0));
2814 }
2815 
2816 /* Return a tree for the comparison which is the combination of
2817    doing the AND or OR (depending on CODE) of the two operations LCODE
2818    and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2819    the possibility of trapping if the mode has NaNs, and return NULL_TREE
2820    if this makes the transformation invalid.  */
2821 
2822 tree
combine_comparisons(location_t loc,enum tree_code code,enum tree_code lcode,enum tree_code rcode,tree truth_type,tree ll_arg,tree lr_arg)2823 combine_comparisons (location_t loc,
2824 		     enum tree_code code, enum tree_code lcode,
2825 		     enum tree_code rcode, tree truth_type,
2826 		     tree ll_arg, tree lr_arg)
2827 {
2828   bool honor_nans = HONOR_NANS (ll_arg);
2829   enum comparison_code lcompcode = comparison_to_compcode (lcode);
2830   enum comparison_code rcompcode = comparison_to_compcode (rcode);
2831   int compcode;
2832 
2833   switch (code)
2834     {
2835     case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2836       compcode = lcompcode & rcompcode;
2837       break;
2838 
2839     case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2840       compcode = lcompcode | rcompcode;
2841       break;
2842 
2843     default:
2844       return NULL_TREE;
2845     }
2846 
2847   if (!honor_nans)
2848     {
2849       /* Eliminate unordered comparisons, as well as LTGT and ORD
2850 	 which are not used unless the mode has NaNs.  */
2851       compcode &= ~COMPCODE_UNORD;
2852       if (compcode == COMPCODE_LTGT)
2853 	compcode = COMPCODE_NE;
2854       else if (compcode == COMPCODE_ORD)
2855 	compcode = COMPCODE_TRUE;
2856     }
2857    else if (flag_trapping_math)
2858      {
2859 	/* Check that the original operation and the optimized ones will trap
2860 	   under the same condition.  */
2861 	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2862 		     && (lcompcode != COMPCODE_EQ)
2863 		     && (lcompcode != COMPCODE_ORD);
2864 	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2865 		     && (rcompcode != COMPCODE_EQ)
2866 		     && (rcompcode != COMPCODE_ORD);
2867 	bool trap = (compcode & COMPCODE_UNORD) == 0
2868 		    && (compcode != COMPCODE_EQ)
2869 		    && (compcode != COMPCODE_ORD);
2870 
2871         /* In a short-circuited boolean expression the LHS might be
2872 	   such that the RHS, if evaluated, will never trap.  For
2873 	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2874 	   if neither x nor y is NaN.  (This is a mixed blessing: for
2875 	   example, the expression above will never trap, hence
2876 	   optimizing it to x < y would be invalid).  */
2877         if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2878             || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2879           rtrap = false;
2880 
2881         /* If the comparison was short-circuited, and only the RHS
2882 	   trapped, we may now generate a spurious trap.  */
2883 	if (rtrap && !ltrap
2884 	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2885 	  return NULL_TREE;
2886 
2887 	/* If we changed the conditions that cause a trap, we lose.  */
2888 	if ((ltrap || rtrap) != trap)
2889 	  return NULL_TREE;
2890       }
2891 
2892   if (compcode == COMPCODE_TRUE)
2893     return constant_boolean_node (true, truth_type);
2894   else if (compcode == COMPCODE_FALSE)
2895     return constant_boolean_node (false, truth_type);
2896   else
2897     {
2898       enum tree_code tcode;
2899 
2900       tcode = compcode_to_comparison ((enum comparison_code) compcode);
2901       return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2902     }
2903 }
2904 
2905 /* Return nonzero if two operands (typically of the same tree node)
2906    are necessarily equal. FLAGS modifies behavior as follows:
2907 
2908    If OEP_ONLY_CONST is set, only return nonzero for constants.
2909    This function tests whether the operands are indistinguishable;
2910    it does not test whether they are equal using C's == operation.
2911    The distinction is important for IEEE floating point, because
2912    (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2913    (2) two NaNs may be indistinguishable, but NaN!=NaN.
2914 
2915    If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2916    even though it may hold multiple values during a function.
2917    This is because a GCC tree node guarantees that nothing else is
2918    executed between the evaluation of its "operands" (which may often
2919    be evaluated in arbitrary order).  Hence if the operands themselves
2920    don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2921    same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2922    unset means assuming isochronic (or instantaneous) tree equivalence.
2923    Unless comparing arbitrary expression trees, such as from different
2924    statements, this flag can usually be left unset.
2925 
2926    If OEP_PURE_SAME is set, then pure functions with identical arguments
2927    are considered the same.  It is used when the caller has other ways
2928    to ensure that global memory is unchanged in between.
2929 
2930    If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2931    not values of expressions.
2932 
2933    If OEP_LEXICOGRAPHIC is set, then also handle expressions with side-effects
2934    such as MODIFY_EXPR, RETURN_EXPR, as well as STATEMENT_LISTs.
2935 
2936    If OEP_BITWISE is set, then require the values to be bitwise identical
2937    rather than simply numerically equal.  Do not take advantage of things
2938    like math-related flags or undefined behavior; only return true for
2939    values that are provably bitwise identical in all circumstances.
2940 
2941    Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2942    any operand with side effect.  This is unnecesarily conservative in the
2943    case we know that arg0 and arg1 are in disjoint code paths (such as in
2944    ?: operator).  In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2945    addresses with TREE_CONSTANT flag set so we know that &var == &var
2946    even if var is volatile.  */
2947 
2948 bool
operand_equal_p(const_tree arg0,const_tree arg1,unsigned int flags)2949 operand_compare::operand_equal_p (const_tree arg0, const_tree arg1,
2950 				  unsigned int flags)
2951 {
2952   bool r;
2953   if (verify_hash_value (arg0, arg1, flags, &r))
2954     return r;
2955 
2956   STRIP_ANY_LOCATION_WRAPPER (arg0);
2957   STRIP_ANY_LOCATION_WRAPPER (arg1);
2958 
2959   /* If either is ERROR_MARK, they aren't equal.  */
2960   if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2961       || TREE_TYPE (arg0) == error_mark_node
2962       || TREE_TYPE (arg1) == error_mark_node)
2963     return false;
2964 
2965   /* Similar, if either does not have a type (like a template id),
2966      they aren't equal.  */
2967   if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2968     return false;
2969 
2970   /* Bitwise identity makes no sense if the values have different layouts.  */
2971   if ((flags & OEP_BITWISE)
2972       && !tree_nop_conversion_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
2973     return false;
2974 
2975   /* We cannot consider pointers to different address space equal.  */
2976   if (POINTER_TYPE_P (TREE_TYPE (arg0))
2977       && POINTER_TYPE_P (TREE_TYPE (arg1))
2978       && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2979 	  != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2980     return false;
2981 
2982   /* Check equality of integer constants before bailing out due to
2983      precision differences.  */
2984   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2985     {
2986       /* Address of INTEGER_CST is not defined; check that we did not forget
2987 	 to drop the OEP_ADDRESS_OF flags.  */
2988       gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2989       return tree_int_cst_equal (arg0, arg1);
2990     }
2991 
2992   if (!(flags & OEP_ADDRESS_OF))
2993     {
2994       /* If both types don't have the same signedness, then we can't consider
2995 	 them equal.  We must check this before the STRIP_NOPS calls
2996 	 because they may change the signedness of the arguments.  As pointers
2997 	 strictly don't have a signedness, require either two pointers or
2998 	 two non-pointers as well.  */
2999       if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3000 	  || POINTER_TYPE_P (TREE_TYPE (arg0))
3001 			     != POINTER_TYPE_P (TREE_TYPE (arg1)))
3002 	return false;
3003 
3004       /* If both types don't have the same precision, then it is not safe
3005 	 to strip NOPs.  */
3006       if (element_precision (TREE_TYPE (arg0))
3007 	  != element_precision (TREE_TYPE (arg1)))
3008 	return false;
3009 
3010       STRIP_NOPS (arg0);
3011       STRIP_NOPS (arg1);
3012     }
3013 #if 0
3014   /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
3015      sanity check once the issue is solved.  */
3016   else
3017     /* Addresses of conversions and SSA_NAMEs (and many other things)
3018        are not defined.  Check that we did not forget to drop the
3019        OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags.  */
3020     gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
3021 			 && TREE_CODE (arg0) != SSA_NAME);
3022 #endif
3023 
3024   /* In case both args are comparisons but with different comparison
3025      code, try to swap the comparison operands of one arg to produce
3026      a match and compare that variant.  */
3027   if (TREE_CODE (arg0) != TREE_CODE (arg1)
3028       && COMPARISON_CLASS_P (arg0)
3029       && COMPARISON_CLASS_P (arg1))
3030     {
3031       enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3032 
3033       if (TREE_CODE (arg0) == swap_code)
3034 	return operand_equal_p (TREE_OPERAND (arg0, 0),
3035 			        TREE_OPERAND (arg1, 1), flags)
3036 	       && operand_equal_p (TREE_OPERAND (arg0, 1),
3037 				   TREE_OPERAND (arg1, 0), flags);
3038     }
3039 
3040   if (TREE_CODE (arg0) != TREE_CODE (arg1))
3041     {
3042       /* NOP_EXPR and CONVERT_EXPR are considered equal.  */
3043       if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
3044 	;
3045       else if (flags & OEP_ADDRESS_OF)
3046 	{
3047 	  /* If we are interested in comparing addresses ignore
3048 	     MEM_REF wrappings of the base that can appear just for
3049 	     TBAA reasons.  */
3050 	  if (TREE_CODE (arg0) == MEM_REF
3051 	      && DECL_P (arg1)
3052 	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
3053 	      && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
3054 	      && integer_zerop (TREE_OPERAND (arg0, 1)))
3055 	    return true;
3056 	  else if (TREE_CODE (arg1) == MEM_REF
3057 		   && DECL_P (arg0)
3058 		   && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
3059 		   && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
3060 		   && integer_zerop (TREE_OPERAND (arg1, 1)))
3061 	    return true;
3062 	  return false;
3063 	}
3064       else
3065 	return false;
3066     }
3067 
3068   /* When not checking adddresses, this is needed for conversions and for
3069      COMPONENT_REF.  Might as well play it safe and always test this.  */
3070   if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3071       || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3072       || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
3073 	  && !(flags & OEP_ADDRESS_OF)))
3074     return false;
3075 
3076   /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3077      We don't care about side effects in that case because the SAVE_EXPR
3078      takes care of that for us. In all other cases, two expressions are
3079      equal if they have no side effects.  If we have two identical
3080      expressions with side effects that should be treated the same due
3081      to the only side effects being identical SAVE_EXPR's, that will
3082      be detected in the recursive calls below.
3083      If we are taking an invariant address of two identical objects
3084      they are necessarily equal as well.  */
3085   if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3086       && (TREE_CODE (arg0) == SAVE_EXPR
3087 	  || (flags & OEP_MATCH_SIDE_EFFECTS)
3088 	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3089     return true;
3090 
3091   /* Next handle constant cases, those for which we can return 1 even
3092      if ONLY_CONST is set.  */
3093   if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3094     switch (TREE_CODE (arg0))
3095       {
3096       case INTEGER_CST:
3097 	return tree_int_cst_equal (arg0, arg1);
3098 
3099       case FIXED_CST:
3100 	return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3101 				       TREE_FIXED_CST (arg1));
3102 
3103       case REAL_CST:
3104 	if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
3105 	  return true;
3106 
3107 	if (!(flags & OEP_BITWISE) && !HONOR_SIGNED_ZEROS (arg0))
3108 	  {
3109 	    /* If we do not distinguish between signed and unsigned zero,
3110 	       consider them equal.  */
3111 	    if (real_zerop (arg0) && real_zerop (arg1))
3112 	      return true;
3113 	  }
3114 	return false;
3115 
3116       case VECTOR_CST:
3117 	{
3118 	  if (VECTOR_CST_LOG2_NPATTERNS (arg0)
3119 	      != VECTOR_CST_LOG2_NPATTERNS (arg1))
3120 	    return false;
3121 
3122 	  if (VECTOR_CST_NELTS_PER_PATTERN (arg0)
3123 	      != VECTOR_CST_NELTS_PER_PATTERN (arg1))
3124 	    return false;
3125 
3126 	  unsigned int count = vector_cst_encoded_nelts (arg0);
3127 	  for (unsigned int i = 0; i < count; ++i)
3128 	    if (!operand_equal_p (VECTOR_CST_ENCODED_ELT (arg0, i),
3129 				  VECTOR_CST_ENCODED_ELT (arg1, i), flags))
3130 	      return false;
3131 	  return true;
3132 	}
3133 
3134       case COMPLEX_CST:
3135 	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3136 				 flags)
3137 		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3138 				    flags));
3139 
3140       case STRING_CST:
3141 	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3142 		&& ! memcmp (TREE_STRING_POINTER (arg0),
3143 			      TREE_STRING_POINTER (arg1),
3144 			      TREE_STRING_LENGTH (arg0)));
3145 
3146       case ADDR_EXPR:
3147 	gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3148 	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3149 				flags | OEP_ADDRESS_OF
3150 				| OEP_MATCH_SIDE_EFFECTS);
3151       case CONSTRUCTOR:
3152 	/* In GIMPLE empty constructors are allowed in initializers of
3153 	   aggregates.  */
3154 	return !CONSTRUCTOR_NELTS (arg0) && !CONSTRUCTOR_NELTS (arg1);
3155       default:
3156 	break;
3157       }
3158 
3159   /* Don't handle more cases for OEP_BITWISE, since we can't guarantee that
3160      two instances of undefined behavior will give identical results.  */
3161   if (flags & (OEP_ONLY_CONST | OEP_BITWISE))
3162     return false;
3163 
3164 /* Define macros to test an operand from arg0 and arg1 for equality and a
3165    variant that allows null and views null as being different from any
3166    non-null value.  In the latter case, if either is null, the both
3167    must be; otherwise, do the normal comparison.  */
3168 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
3169 				    TREE_OPERAND (arg1, N), flags)
3170 
3171 #define OP_SAME_WITH_NULL(N)				\
3172   ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
3173    ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3174 
3175   switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3176     {
3177     case tcc_unary:
3178       /* Two conversions are equal only if signedness and modes match.  */
3179       switch (TREE_CODE (arg0))
3180         {
3181 	CASE_CONVERT:
3182         case FIX_TRUNC_EXPR:
3183 	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3184 	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3185 	    return false;
3186 	  break;
3187 	default:
3188 	  break;
3189 	}
3190 
3191       return OP_SAME (0);
3192 
3193 
3194     case tcc_comparison:
3195     case tcc_binary:
3196       if (OP_SAME (0) && OP_SAME (1))
3197 	return true;
3198 
3199       /* For commutative ops, allow the other order.  */
3200       return (commutative_tree_code (TREE_CODE (arg0))
3201 	      && operand_equal_p (TREE_OPERAND (arg0, 0),
3202 				  TREE_OPERAND (arg1, 1), flags)
3203 	      && operand_equal_p (TREE_OPERAND (arg0, 1),
3204 				  TREE_OPERAND (arg1, 0), flags));
3205 
3206     case tcc_reference:
3207       /* If either of the pointer (or reference) expressions we are
3208 	 dereferencing contain a side effect, these cannot be equal,
3209 	 but their addresses can be.  */
3210       if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3211 	  && (TREE_SIDE_EFFECTS (arg0)
3212 	      || TREE_SIDE_EFFECTS (arg1)))
3213 	return false;
3214 
3215       switch (TREE_CODE (arg0))
3216 	{
3217 	case INDIRECT_REF:
3218 	  if (!(flags & OEP_ADDRESS_OF))
3219 	    {
3220 	      if (TYPE_ALIGN (TREE_TYPE (arg0))
3221 		  != TYPE_ALIGN (TREE_TYPE (arg1)))
3222 		return false;
3223 	      /* Verify that the access types are compatible.  */
3224 	      if (TYPE_MAIN_VARIANT (TREE_TYPE (arg0))
3225 		  != TYPE_MAIN_VARIANT (TREE_TYPE (arg1)))
3226 		return false;
3227 	    }
3228 	  flags &= ~OEP_ADDRESS_OF;
3229 	  return OP_SAME (0);
3230 
3231 	case IMAGPART_EXPR:
3232 	  /* Require the same offset.  */
3233 	  if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3234 				TYPE_SIZE (TREE_TYPE (arg1)),
3235 				flags & ~OEP_ADDRESS_OF))
3236 	    return false;
3237 
3238 	/* Fallthru.  */
3239 	case REALPART_EXPR:
3240 	case VIEW_CONVERT_EXPR:
3241 	  return OP_SAME (0);
3242 
3243 	case TARGET_MEM_REF:
3244 	case MEM_REF:
3245 	  if (!(flags & OEP_ADDRESS_OF))
3246 	    {
3247 	      /* Require equal access sizes */
3248 	      if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3249 		  && (!TYPE_SIZE (TREE_TYPE (arg0))
3250 		      || !TYPE_SIZE (TREE_TYPE (arg1))
3251 		      || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3252 					   TYPE_SIZE (TREE_TYPE (arg1)),
3253 					   flags)))
3254 		return false;
3255 	      /* Verify that access happens in similar types.  */
3256 	      if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3257 		return false;
3258 	      /* Verify that accesses are TBAA compatible.  */
3259 	      if (!alias_ptr_types_compatible_p
3260 		    (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3261 		     TREE_TYPE (TREE_OPERAND (arg1, 1)))
3262 		  || (MR_DEPENDENCE_CLIQUE (arg0)
3263 		      != MR_DEPENDENCE_CLIQUE (arg1))
3264 		  || (MR_DEPENDENCE_BASE (arg0)
3265 		      != MR_DEPENDENCE_BASE (arg1)))
3266 		return false;
3267 	     /* Verify that alignment is compatible.  */
3268 	     if (TYPE_ALIGN (TREE_TYPE (arg0))
3269 		 != TYPE_ALIGN (TREE_TYPE (arg1)))
3270 		return false;
3271 	    }
3272 	  flags &= ~OEP_ADDRESS_OF;
3273 	  return (OP_SAME (0) && OP_SAME (1)
3274 		  /* TARGET_MEM_REF require equal extra operands.  */
3275 		  && (TREE_CODE (arg0) != TARGET_MEM_REF
3276 		      || (OP_SAME_WITH_NULL (2)
3277 			  && OP_SAME_WITH_NULL (3)
3278 			  && OP_SAME_WITH_NULL (4))));
3279 
3280 	case ARRAY_REF:
3281 	case ARRAY_RANGE_REF:
3282 	  if (!OP_SAME (0))
3283 	    return false;
3284 	  flags &= ~OEP_ADDRESS_OF;
3285 	  /* Compare the array index by value if it is constant first as we
3286 	     may have different types but same value here.  */
3287 	  return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3288 				       TREE_OPERAND (arg1, 1))
3289 		   || OP_SAME (1))
3290 		  && OP_SAME_WITH_NULL (2)
3291 		  && OP_SAME_WITH_NULL (3)
3292 		  /* Compare low bound and element size as with OEP_ADDRESS_OF
3293 		     we have to account for the offset of the ref.  */
3294 		  && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3295 		      == TREE_TYPE (TREE_OPERAND (arg1, 0))
3296 		      || (operand_equal_p (array_ref_low_bound
3297 					     (CONST_CAST_TREE (arg0)),
3298 					   array_ref_low_bound
3299 					     (CONST_CAST_TREE (arg1)), flags)
3300 			  && operand_equal_p (array_ref_element_size
3301 					        (CONST_CAST_TREE (arg0)),
3302 					      array_ref_element_size
3303 					        (CONST_CAST_TREE (arg1)),
3304 					      flags))));
3305 
3306 	case COMPONENT_REF:
3307 	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
3308 	     may be NULL when we're called to compare MEM_EXPRs.  */
3309 	  if (!OP_SAME_WITH_NULL (0))
3310 	    return false;
3311 	  {
3312 	    bool compare_address = flags & OEP_ADDRESS_OF;
3313 
3314 	    /* Most of time we only need to compare FIELD_DECLs for equality.
3315 	       However when determining address look into actual offsets.
3316 	       These may match for unions and unshared record types.  */
3317 	    flags &= ~OEP_ADDRESS_OF;
3318 	    if (!OP_SAME (1))
3319 	      {
3320 		if (compare_address
3321 		    && (flags & OEP_ADDRESS_OF_SAME_FIELD) == 0)
3322 		  {
3323 		    if (TREE_OPERAND (arg0, 2)
3324 			|| TREE_OPERAND (arg1, 2))
3325 		      return OP_SAME_WITH_NULL (2);
3326 		    tree field0 = TREE_OPERAND (arg0, 1);
3327 		    tree field1 = TREE_OPERAND (arg1, 1);
3328 
3329 		    if (!operand_equal_p (DECL_FIELD_OFFSET (field0),
3330 					  DECL_FIELD_OFFSET (field1), flags)
3331 			|| !operand_equal_p (DECL_FIELD_BIT_OFFSET (field0),
3332 					     DECL_FIELD_BIT_OFFSET (field1),
3333 					     flags))
3334 		      return false;
3335 		  }
3336 		else
3337 		  return false;
3338 	      }
3339 	  }
3340 	  return OP_SAME_WITH_NULL (2);
3341 
3342 	case BIT_FIELD_REF:
3343 	  if (!OP_SAME (0))
3344 	    return false;
3345 	  flags &= ~OEP_ADDRESS_OF;
3346 	  return OP_SAME (1) && OP_SAME (2);
3347 
3348 	default:
3349 	  return false;
3350 	}
3351 
3352     case tcc_expression:
3353       switch (TREE_CODE (arg0))
3354 	{
3355 	case ADDR_EXPR:
3356 	  /* Be sure we pass right ADDRESS_OF flag.  */
3357 	  gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3358 	  return operand_equal_p (TREE_OPERAND (arg0, 0),
3359 				  TREE_OPERAND (arg1, 0),
3360 				  flags | OEP_ADDRESS_OF);
3361 
3362 	case TRUTH_NOT_EXPR:
3363 	  return OP_SAME (0);
3364 
3365 	case TRUTH_ANDIF_EXPR:
3366 	case TRUTH_ORIF_EXPR:
3367 	  return OP_SAME (0) && OP_SAME (1);
3368 
3369 	case WIDEN_MULT_PLUS_EXPR:
3370 	case WIDEN_MULT_MINUS_EXPR:
3371 	  if (!OP_SAME (2))
3372 	    return false;
3373 	  /* The multiplcation operands are commutative.  */
3374 	  /* FALLTHRU */
3375 
3376 	case TRUTH_AND_EXPR:
3377 	case TRUTH_OR_EXPR:
3378 	case TRUTH_XOR_EXPR:
3379 	  if (OP_SAME (0) && OP_SAME (1))
3380 	    return true;
3381 
3382 	  /* Otherwise take into account this is a commutative operation.  */
3383 	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
3384 				   TREE_OPERAND (arg1, 1), flags)
3385 		  && operand_equal_p (TREE_OPERAND (arg0, 1),
3386 				      TREE_OPERAND (arg1, 0), flags));
3387 
3388 	case COND_EXPR:
3389 	  if (! OP_SAME (1) || ! OP_SAME_WITH_NULL (2))
3390 	    return false;
3391 	  flags &= ~OEP_ADDRESS_OF;
3392 	  return OP_SAME (0);
3393 
3394 	case BIT_INSERT_EXPR:
3395 	  /* BIT_INSERT_EXPR has an implict operand as the type precision
3396 	     of op1.  Need to check to make sure they are the same.  */
3397 	  if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3398 	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3399 	      && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 1)))
3400 		 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 1))))
3401 	    return false;
3402 	  /* FALLTHRU */
3403 
3404 	case VEC_COND_EXPR:
3405 	case DOT_PROD_EXPR:
3406 	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3407 
3408 	case MODIFY_EXPR:
3409 	case INIT_EXPR:
3410 	case COMPOUND_EXPR:
3411 	case PREDECREMENT_EXPR:
3412 	case PREINCREMENT_EXPR:
3413 	case POSTDECREMENT_EXPR:
3414 	case POSTINCREMENT_EXPR:
3415 	  if (flags & OEP_LEXICOGRAPHIC)
3416 	    return OP_SAME (0) && OP_SAME (1);
3417 	  return false;
3418 
3419 	case CLEANUP_POINT_EXPR:
3420 	case EXPR_STMT:
3421 	case SAVE_EXPR:
3422 	  if (flags & OEP_LEXICOGRAPHIC)
3423 	    return OP_SAME (0);
3424 	  return false;
3425 
3426 	case OBJ_TYPE_REF:
3427 	/* Virtual table reference.  */
3428 	if (!operand_equal_p (OBJ_TYPE_REF_EXPR (arg0),
3429 			      OBJ_TYPE_REF_EXPR (arg1), flags))
3430 	  return false;
3431 	flags &= ~OEP_ADDRESS_OF;
3432 	if (tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg0))
3433 	    != tree_to_uhwi (OBJ_TYPE_REF_TOKEN (arg1)))
3434 	  return false;
3435 	if (!operand_equal_p (OBJ_TYPE_REF_OBJECT (arg0),
3436 			      OBJ_TYPE_REF_OBJECT (arg1), flags))
3437 	  return false;
3438 	if (virtual_method_call_p (arg0))
3439 	  {
3440 	    if (!virtual_method_call_p (arg1))
3441 	      return false;
3442 	    return types_same_for_odr (obj_type_ref_class (arg0),
3443 				       obj_type_ref_class (arg1));
3444 	  }
3445 	return false;
3446 
3447 	default:
3448 	  return false;
3449 	}
3450 
3451     case tcc_vl_exp:
3452       switch (TREE_CODE (arg0))
3453 	{
3454 	case CALL_EXPR:
3455 	  if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3456 	      != (CALL_EXPR_FN (arg1) == NULL_TREE))
3457 	    /* If not both CALL_EXPRs are either internal or normal function
3458 	       functions, then they are not equal.  */
3459 	    return false;
3460 	  else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3461 	    {
3462 	      /* If the CALL_EXPRs call different internal functions, then they
3463 		 are not equal.  */
3464 	      if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3465 		return false;
3466 	    }
3467 	  else
3468 	    {
3469 	      /* If the CALL_EXPRs call different functions, then they are not
3470 		 equal.  */
3471 	      if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3472 				     flags))
3473 		return false;
3474 	    }
3475 
3476 	  /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS.  */
3477 	  {
3478 	    unsigned int cef = call_expr_flags (arg0);
3479 	    if (flags & OEP_PURE_SAME)
3480 	      cef &= ECF_CONST | ECF_PURE;
3481 	    else
3482 	      cef &= ECF_CONST;
3483 	    if (!cef && !(flags & OEP_LEXICOGRAPHIC))
3484 	      return false;
3485 	  }
3486 
3487 	  /* Now see if all the arguments are the same.  */
3488 	  {
3489 	    const_call_expr_arg_iterator iter0, iter1;
3490 	    const_tree a0, a1;
3491 	    for (a0 = first_const_call_expr_arg (arg0, &iter0),
3492 		   a1 = first_const_call_expr_arg (arg1, &iter1);
3493 		 a0 && a1;
3494 		 a0 = next_const_call_expr_arg (&iter0),
3495 		   a1 = next_const_call_expr_arg (&iter1))
3496 	      if (! operand_equal_p (a0, a1, flags))
3497 		return false;
3498 
3499 	    /* If we get here and both argument lists are exhausted
3500 	       then the CALL_EXPRs are equal.  */
3501 	    return ! (a0 || a1);
3502 	  }
3503 	default:
3504 	  return false;
3505 	}
3506 
3507     case tcc_declaration:
3508       /* Consider __builtin_sqrt equal to sqrt.  */
3509       return (TREE_CODE (arg0) == FUNCTION_DECL
3510 	      && fndecl_built_in_p (arg0) && fndecl_built_in_p (arg1)
3511 	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3512 	      && (DECL_UNCHECKED_FUNCTION_CODE (arg0)
3513 		  == DECL_UNCHECKED_FUNCTION_CODE (arg1)));
3514 
3515     case tcc_exceptional:
3516       if (TREE_CODE (arg0) == CONSTRUCTOR)
3517 	{
3518 	  if (CONSTRUCTOR_NO_CLEARING (arg0) != CONSTRUCTOR_NO_CLEARING (arg1))
3519 	    return false;
3520 
3521 	  /* In GIMPLE constructors are used only to build vectors from
3522 	     elements.  Individual elements in the constructor must be
3523 	     indexed in increasing order and form an initial sequence.
3524 
3525 	     We make no effort to compare constructors in generic.
3526 	     (see sem_variable::equals in ipa-icf which can do so for
3527 	      constants).  */
3528 	  if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3529 	      || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3530 	    return false;
3531 
3532 	  /* Be sure that vectors constructed have the same representation.
3533 	     We only tested element precision and modes to match.
3534 	     Vectors may be BLKmode and thus also check that the number of
3535 	     parts match.  */
3536 	  if (maybe_ne (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)),
3537 			TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1))))
3538 	    return false;
3539 
3540 	  vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3541 	  vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3542 	  unsigned int len = vec_safe_length (v0);
3543 
3544 	  if (len != vec_safe_length (v1))
3545 	    return false;
3546 
3547 	  for (unsigned int i = 0; i < len; i++)
3548 	    {
3549 	      constructor_elt *c0 = &(*v0)[i];
3550 	      constructor_elt *c1 = &(*v1)[i];
3551 
3552 	      if (!operand_equal_p (c0->value, c1->value, flags)
3553 		  /* In GIMPLE the indexes can be either NULL or matching i.
3554 		     Double check this so we won't get false
3555 		     positives for GENERIC.  */
3556 		  || (c0->index
3557 		      && (TREE_CODE (c0->index) != INTEGER_CST
3558 			  || compare_tree_int (c0->index, i)))
3559 		  || (c1->index
3560 		      && (TREE_CODE (c1->index) != INTEGER_CST
3561 			  || compare_tree_int (c1->index, i))))
3562 		return false;
3563 	    }
3564 	  return true;
3565 	}
3566       else if (TREE_CODE (arg0) == STATEMENT_LIST
3567 	       && (flags & OEP_LEXICOGRAPHIC))
3568 	{
3569 	  /* Compare the STATEMENT_LISTs.  */
3570 	  tree_stmt_iterator tsi1, tsi2;
3571 	  tree body1 = CONST_CAST_TREE (arg0);
3572 	  tree body2 = CONST_CAST_TREE (arg1);
3573 	  for (tsi1 = tsi_start (body1), tsi2 = tsi_start (body2); ;
3574 	       tsi_next (&tsi1), tsi_next (&tsi2))
3575 	    {
3576 	      /* The lists don't have the same number of statements.  */
3577 	      if (tsi_end_p (tsi1) ^ tsi_end_p (tsi2))
3578 		return false;
3579 	      if (tsi_end_p (tsi1) && tsi_end_p (tsi2))
3580 		return true;
3581 	      if (!operand_equal_p (tsi_stmt (tsi1), tsi_stmt (tsi2),
3582 				    flags & (OEP_LEXICOGRAPHIC
3583 					     | OEP_NO_HASH_CHECK)))
3584 		return false;
3585 	    }
3586 	}
3587       return false;
3588 
3589     case tcc_statement:
3590       switch (TREE_CODE (arg0))
3591 	{
3592 	case RETURN_EXPR:
3593 	  if (flags & OEP_LEXICOGRAPHIC)
3594 	    return OP_SAME_WITH_NULL (0);
3595 	  return false;
3596 	case DEBUG_BEGIN_STMT:
3597 	  if (flags & OEP_LEXICOGRAPHIC)
3598 	    return true;
3599 	  return false;
3600 	default:
3601 	  return false;
3602 	 }
3603 
3604     default:
3605       return false;
3606     }
3607 
3608 #undef OP_SAME
3609 #undef OP_SAME_WITH_NULL
3610 }
3611 
3612 /* Generate a hash value for an expression.  This can be used iteratively
3613    by passing a previous result as the HSTATE argument.  */
3614 
3615 void
hash_operand(const_tree t,inchash::hash & hstate,unsigned int flags)3616 operand_compare::hash_operand (const_tree t, inchash::hash &hstate,
3617 			       unsigned int flags)
3618 {
3619   int i;
3620   enum tree_code code;
3621   enum tree_code_class tclass;
3622 
3623   if (t == NULL_TREE || t == error_mark_node)
3624     {
3625       hstate.merge_hash (0);
3626       return;
3627     }
3628 
3629   STRIP_ANY_LOCATION_WRAPPER (t);
3630 
3631   if (!(flags & OEP_ADDRESS_OF))
3632     STRIP_NOPS (t);
3633 
3634   code = TREE_CODE (t);
3635 
3636   switch (code)
3637     {
3638     /* Alas, constants aren't shared, so we can't rely on pointer
3639        identity.  */
3640     case VOID_CST:
3641       hstate.merge_hash (0);
3642       return;
3643     case INTEGER_CST:
3644       gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3645       for (i = 0; i < TREE_INT_CST_EXT_NUNITS (t); i++)
3646 	hstate.add_hwi (TREE_INT_CST_ELT (t, i));
3647       return;
3648     case REAL_CST:
3649       {
3650 	unsigned int val2;
3651 	if (!HONOR_SIGNED_ZEROS (t) && real_zerop (t))
3652 	  val2 = rvc_zero;
3653 	else
3654 	  val2 = real_hash (TREE_REAL_CST_PTR (t));
3655 	hstate.merge_hash (val2);
3656 	return;
3657       }
3658     case FIXED_CST:
3659       {
3660 	unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t));
3661 	hstate.merge_hash (val2);
3662 	return;
3663       }
3664     case STRING_CST:
3665       hstate.add ((const void *) TREE_STRING_POINTER (t),
3666 		  TREE_STRING_LENGTH (t));
3667       return;
3668     case COMPLEX_CST:
3669       hash_operand (TREE_REALPART (t), hstate, flags);
3670       hash_operand (TREE_IMAGPART (t), hstate, flags);
3671       return;
3672     case VECTOR_CST:
3673       {
3674 	hstate.add_int (VECTOR_CST_NPATTERNS (t));
3675 	hstate.add_int (VECTOR_CST_NELTS_PER_PATTERN (t));
3676 	unsigned int count = vector_cst_encoded_nelts (t);
3677 	for (unsigned int i = 0; i < count; ++i)
3678 	  hash_operand (VECTOR_CST_ENCODED_ELT (t, i), hstate, flags);
3679 	return;
3680       }
3681     case SSA_NAME:
3682       /* We can just compare by pointer.  */
3683       hstate.add_hwi (SSA_NAME_VERSION (t));
3684       return;
3685     case PLACEHOLDER_EXPR:
3686       /* The node itself doesn't matter.  */
3687       return;
3688     case BLOCK:
3689     case OMP_CLAUSE:
3690       /* Ignore.  */
3691       return;
3692     case TREE_LIST:
3693       /* A list of expressions, for a CALL_EXPR or as the elements of a
3694 	 VECTOR_CST.  */
3695       for (; t; t = TREE_CHAIN (t))
3696 	hash_operand (TREE_VALUE (t), hstate, flags);
3697       return;
3698     case CONSTRUCTOR:
3699       {
3700 	unsigned HOST_WIDE_INT idx;
3701 	tree field, value;
3702 	flags &= ~OEP_ADDRESS_OF;
3703 	hstate.add_int (CONSTRUCTOR_NO_CLEARING (t));
3704 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value)
3705 	  {
3706 	    /* In GIMPLE the indexes can be either NULL or matching i.  */
3707 	    if (field == NULL_TREE)
3708 	      field = bitsize_int (idx);
3709 	    hash_operand (field, hstate, flags);
3710 	    hash_operand (value, hstate, flags);
3711 	  }
3712 	return;
3713       }
3714     case STATEMENT_LIST:
3715       {
3716 	tree_stmt_iterator i;
3717 	for (i = tsi_start (CONST_CAST_TREE (t));
3718 	     !tsi_end_p (i); tsi_next (&i))
3719 	  hash_operand (tsi_stmt (i), hstate, flags);
3720 	return;
3721       }
3722     case TREE_VEC:
3723       for (i = 0; i < TREE_VEC_LENGTH (t); ++i)
3724 	hash_operand (TREE_VEC_ELT (t, i), hstate, flags);
3725       return;
3726     case IDENTIFIER_NODE:
3727       hstate.add_object (IDENTIFIER_HASH_VALUE (t));
3728       return;
3729     case FUNCTION_DECL:
3730       /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form.
3731 	 Otherwise nodes that compare equal according to operand_equal_p might
3732 	 get different hash codes.  However, don't do this for machine specific
3733 	 or front end builtins, since the function code is overloaded in those
3734 	 cases.  */
3735       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL
3736 	  && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t)))
3737 	{
3738 	  t = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
3739 	  code = TREE_CODE (t);
3740 	}
3741       /* FALL THROUGH */
3742     default:
3743       if (POLY_INT_CST_P (t))
3744 	{
3745 	  for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
3746 	    hstate.add_wide_int (wi::to_wide (POLY_INT_CST_COEFF (t, i)));
3747 	  return;
3748 	}
3749       tclass = TREE_CODE_CLASS (code);
3750 
3751       if (tclass == tcc_declaration)
3752 	{
3753 	  /* DECL's have a unique ID */
3754 	  hstate.add_hwi (DECL_UID (t));
3755 	}
3756       else if (tclass == tcc_comparison && !commutative_tree_code (code))
3757 	{
3758 	  /* For comparisons that can be swapped, use the lower
3759 	     tree code.  */
3760 	  enum tree_code ccode = swap_tree_comparison (code);
3761 	  if (code < ccode)
3762 	    ccode = code;
3763 	  hstate.add_object (ccode);
3764 	  hash_operand (TREE_OPERAND (t, ccode != code), hstate, flags);
3765 	  hash_operand (TREE_OPERAND (t, ccode == code), hstate, flags);
3766 	}
3767       else if (CONVERT_EXPR_CODE_P (code))
3768 	{
3769 	  /* NOP_EXPR and CONVERT_EXPR are considered equal by
3770 	     operand_equal_p.  */
3771 	  enum tree_code ccode = NOP_EXPR;
3772 	  hstate.add_object (ccode);
3773 
3774 	  /* Don't hash the type, that can lead to having nodes which
3775 	     compare equal according to operand_equal_p, but which
3776 	     have different hash codes.  Make sure to include signedness
3777 	     in the hash computation.  */
3778 	  hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3779 	  hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3780 	}
3781       /* For OEP_ADDRESS_OF, hash MEM_EXPR[&decl, 0] the same as decl.  */
3782       else if (code == MEM_REF
3783 	       && (flags & OEP_ADDRESS_OF) != 0
3784 	       && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
3785 	       && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
3786 	       && integer_zerop (TREE_OPERAND (t, 1)))
3787 	hash_operand (TREE_OPERAND (TREE_OPERAND (t, 0), 0),
3788 		      hstate, flags);
3789       /* Don't ICE on FE specific trees, or their arguments etc.
3790 	 during operand_equal_p hash verification.  */
3791       else if (!IS_EXPR_CODE_CLASS (tclass))
3792 	gcc_assert (flags & OEP_HASH_CHECK);
3793       else
3794 	{
3795 	  unsigned int sflags = flags;
3796 
3797 	  hstate.add_object (code);
3798 
3799 	  switch (code)
3800 	    {
3801 	    case ADDR_EXPR:
3802 	      gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3803 	      flags |= OEP_ADDRESS_OF;
3804 	      sflags = flags;
3805 	      break;
3806 
3807 	    case INDIRECT_REF:
3808 	    case MEM_REF:
3809 	    case TARGET_MEM_REF:
3810 	      flags &= ~OEP_ADDRESS_OF;
3811 	      sflags = flags;
3812 	      break;
3813 
3814 	    case COMPONENT_REF:
3815 	      if (sflags & OEP_ADDRESS_OF)
3816 		{
3817 		  hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3818 		  if (TREE_OPERAND (t, 2))
3819 		    hash_operand (TREE_OPERAND (t, 2), hstate,
3820 				  flags & ~OEP_ADDRESS_OF);
3821 		  else
3822 		    {
3823 		      tree field = TREE_OPERAND (t, 1);
3824 		      hash_operand (DECL_FIELD_OFFSET (field),
3825 				    hstate, flags & ~OEP_ADDRESS_OF);
3826 		      hash_operand (DECL_FIELD_BIT_OFFSET (field),
3827 				    hstate, flags & ~OEP_ADDRESS_OF);
3828 		    }
3829 		  return;
3830 		}
3831 	      break;
3832 	    case ARRAY_REF:
3833 	    case ARRAY_RANGE_REF:
3834 	    case BIT_FIELD_REF:
3835 	      sflags &= ~OEP_ADDRESS_OF;
3836 	      break;
3837 
3838 	    case COND_EXPR:
3839 	      flags &= ~OEP_ADDRESS_OF;
3840 	      break;
3841 
3842 	    case WIDEN_MULT_PLUS_EXPR:
3843 	    case WIDEN_MULT_MINUS_EXPR:
3844 	      {
3845 		/* The multiplication operands are commutative.  */
3846 		inchash::hash one, two;
3847 		hash_operand (TREE_OPERAND (t, 0), one, flags);
3848 		hash_operand (TREE_OPERAND (t, 1), two, flags);
3849 		hstate.add_commutative (one, two);
3850 		hash_operand (TREE_OPERAND (t, 2), two, flags);
3851 		return;
3852 	      }
3853 
3854 	    case CALL_EXPR:
3855 	      if (CALL_EXPR_FN (t) == NULL_TREE)
3856 		hstate.add_int (CALL_EXPR_IFN (t));
3857 	      break;
3858 
3859 	    case TARGET_EXPR:
3860 	      /* For TARGET_EXPR, just hash on the TARGET_EXPR_SLOT.
3861 		 Usually different TARGET_EXPRs just should use
3862 		 different temporaries in their slots.  */
3863 	      hash_operand (TARGET_EXPR_SLOT (t), hstate, flags);
3864 	      return;
3865 
3866 	    case OBJ_TYPE_REF:
3867 	    /* Virtual table reference.  */
3868 	      inchash::add_expr (OBJ_TYPE_REF_EXPR (t), hstate, flags);
3869 	      flags &= ~OEP_ADDRESS_OF;
3870 	      inchash::add_expr (OBJ_TYPE_REF_TOKEN (t), hstate, flags);
3871 	      inchash::add_expr (OBJ_TYPE_REF_OBJECT (t), hstate, flags);
3872 	      if (!virtual_method_call_p (t))
3873 		return;
3874 	      if (tree c = obj_type_ref_class (t))
3875 		{
3876 		  c = TYPE_NAME (TYPE_MAIN_VARIANT (c));
3877 		  /* We compute mangled names only when free_lang_data is run.
3878 		     In that case we can hash precisely.  */
3879 		  if (TREE_CODE (c) == TYPE_DECL
3880 		      && DECL_ASSEMBLER_NAME_SET_P (c))
3881 		    hstate.add_object
3882 			   (IDENTIFIER_HASH_VALUE
3883 				   (DECL_ASSEMBLER_NAME (c)));
3884 		}
3885 	      return;
3886 	    default:
3887 	      break;
3888 	    }
3889 
3890 	  /* Don't hash the type, that can lead to having nodes which
3891 	     compare equal according to operand_equal_p, but which
3892 	     have different hash codes.  */
3893 	  if (code == NON_LVALUE_EXPR)
3894 	    {
3895 	      /* Make sure to include signness in the hash computation.  */
3896 	      hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t)));
3897 	      hash_operand (TREE_OPERAND (t, 0), hstate, flags);
3898 	    }
3899 
3900 	  else if (commutative_tree_code (code))
3901 	    {
3902 	      /* It's a commutative expression.  We want to hash it the same
3903 		 however it appears.  We do this by first hashing both operands
3904 		 and then rehashing based on the order of their independent
3905 		 hashes.  */
3906 	      inchash::hash one, two;
3907 	      hash_operand (TREE_OPERAND (t, 0), one, flags);
3908 	      hash_operand (TREE_OPERAND (t, 1), two, flags);
3909 	      hstate.add_commutative (one, two);
3910 	    }
3911 	  else
3912 	    for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i)
3913 	      hash_operand (TREE_OPERAND (t, i), hstate,
3914 			    i == 0 ? flags : sflags);
3915 	}
3916       return;
3917     }
3918 }
3919 
3920 bool
verify_hash_value(const_tree arg0,const_tree arg1,unsigned int flags,bool * ret)3921 operand_compare::verify_hash_value (const_tree arg0, const_tree arg1,
3922 				    unsigned int flags, bool *ret)
3923 {
3924   /* When checking, verify at the outermost operand_equal_p call that
3925      if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
3926      hash value.  */
3927   if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
3928     {
3929       if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
3930 	{
3931 	  if (arg0 != arg1)
3932 	    {
3933 	      inchash::hash hstate0 (0), hstate1 (0);
3934 	      hash_operand (arg0, hstate0, flags | OEP_HASH_CHECK);
3935 	      hash_operand (arg1, hstate1, flags | OEP_HASH_CHECK);
3936 	      hashval_t h0 = hstate0.end ();
3937 	      hashval_t h1 = hstate1.end ();
3938 	      gcc_assert (h0 == h1);
3939 	    }
3940 	  *ret = true;
3941 	}
3942       else
3943 	*ret = false;
3944 
3945       return true;
3946     }
3947 
3948   return false;
3949 }
3950 
3951 
3952 static operand_compare default_compare_instance;
3953 
3954 /* Conveinece wrapper around operand_compare class because usually we do
3955    not need to play with the valueizer.  */
3956 
3957 bool
operand_equal_p(const_tree arg0,const_tree arg1,unsigned int flags)3958 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3959 {
3960   return default_compare_instance.operand_equal_p (arg0, arg1, flags);
3961 }
3962 
3963 namespace inchash
3964 {
3965 
3966 /* Generate a hash value for an expression.  This can be used iteratively
3967    by passing a previous result as the HSTATE argument.
3968 
3969    This function is intended to produce the same hash for expressions which
3970    would compare equal using operand_equal_p.  */
3971 void
add_expr(const_tree t,inchash::hash & hstate,unsigned int flags)3972 add_expr (const_tree t, inchash::hash &hstate, unsigned int flags)
3973 {
3974   default_compare_instance.hash_operand (t, hstate, flags);
3975 }
3976 
3977 }
3978 
3979 /* Similar to operand_equal_p, but see if ARG0 might be a variant of ARG1
3980    with a different signedness or a narrower precision.  */
3981 
3982 static bool
operand_equal_for_comparison_p(tree arg0,tree arg1)3983 operand_equal_for_comparison_p (tree arg0, tree arg1)
3984 {
3985   if (operand_equal_p (arg0, arg1, 0))
3986     return true;
3987 
3988   if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3989       || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3990     return false;
3991 
3992   /* Discard any conversions that don't change the modes of ARG0 and ARG1
3993      and see if the inner values are the same.  This removes any
3994      signedness comparison, which doesn't matter here.  */
3995   tree op0 = arg0;
3996   tree op1 = arg1;
3997   STRIP_NOPS (op0);
3998   STRIP_NOPS (op1);
3999   if (operand_equal_p (op0, op1, 0))
4000     return true;
4001 
4002   /* Discard a single widening conversion from ARG1 and see if the inner
4003      value is the same as ARG0.  */
4004   if (CONVERT_EXPR_P (arg1)
4005       && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4006       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))
4007          < TYPE_PRECISION (TREE_TYPE (arg1))
4008       && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
4009     return true;
4010 
4011   return false;
4012 }
4013 
4014 /* See if ARG is an expression that is either a comparison or is performing
4015    arithmetic on comparisons.  The comparisons must only be comparing
4016    two different values, which will be stored in *CVAL1 and *CVAL2; if
4017    they are nonzero it means that some operands have already been found.
4018    No variables may be used anywhere else in the expression except in the
4019    comparisons.
4020 
4021    If this is true, return 1.  Otherwise, return zero.  */
4022 
4023 static bool
twoval_comparison_p(tree arg,tree * cval1,tree * cval2)4024 twoval_comparison_p (tree arg, tree *cval1, tree *cval2)
4025 {
4026   enum tree_code code = TREE_CODE (arg);
4027   enum tree_code_class tclass = TREE_CODE_CLASS (code);
4028 
4029   /* We can handle some of the tcc_expression cases here.  */
4030   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4031     tclass = tcc_unary;
4032   else if (tclass == tcc_expression
4033 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
4034 	       || code == COMPOUND_EXPR))
4035     tclass = tcc_binary;
4036 
4037   switch (tclass)
4038     {
4039     case tcc_unary:
4040       return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
4041 
4042     case tcc_binary:
4043       return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4044 	      && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
4045 
4046     case tcc_constant:
4047       return true;
4048 
4049     case tcc_expression:
4050       if (code == COND_EXPR)
4051 	return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
4052 		&& twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
4053 		&& twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2));
4054       return false;
4055 
4056     case tcc_comparison:
4057       /* First see if we can handle the first operand, then the second.  For
4058 	 the second operand, we know *CVAL1 can't be zero.  It must be that
4059 	 one side of the comparison is each of the values; test for the
4060 	 case where this isn't true by failing if the two operands
4061 	 are the same.  */
4062 
4063       if (operand_equal_p (TREE_OPERAND (arg, 0),
4064 			   TREE_OPERAND (arg, 1), 0))
4065 	return false;
4066 
4067       if (*cval1 == 0)
4068 	*cval1 = TREE_OPERAND (arg, 0);
4069       else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
4070 	;
4071       else if (*cval2 == 0)
4072 	*cval2 = TREE_OPERAND (arg, 0);
4073       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
4074 	;
4075       else
4076 	return false;
4077 
4078       if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
4079 	;
4080       else if (*cval2 == 0)
4081 	*cval2 = TREE_OPERAND (arg, 1);
4082       else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
4083 	;
4084       else
4085 	return false;
4086 
4087       return true;
4088 
4089     default:
4090       return false;
4091     }
4092 }
4093 
4094 /* ARG is a tree that is known to contain just arithmetic operations and
4095    comparisons.  Evaluate the operations in the tree substituting NEW0 for
4096    any occurrence of OLD0 as an operand of a comparison and likewise for
4097    NEW1 and OLD1.  */
4098 
4099 static tree
eval_subst(location_t loc,tree arg,tree old0,tree new0,tree old1,tree new1)4100 eval_subst (location_t loc, tree arg, tree old0, tree new0,
4101 	    tree old1, tree new1)
4102 {
4103   tree type = TREE_TYPE (arg);
4104   enum tree_code code = TREE_CODE (arg);
4105   enum tree_code_class tclass = TREE_CODE_CLASS (code);
4106 
4107   /* We can handle some of the tcc_expression cases here.  */
4108   if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
4109     tclass = tcc_unary;
4110   else if (tclass == tcc_expression
4111 	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
4112     tclass = tcc_binary;
4113 
4114   switch (tclass)
4115     {
4116     case tcc_unary:
4117       return fold_build1_loc (loc, code, type,
4118 			  eval_subst (loc, TREE_OPERAND (arg, 0),
4119 				      old0, new0, old1, new1));
4120 
4121     case tcc_binary:
4122       return fold_build2_loc (loc, code, type,
4123 			  eval_subst (loc, TREE_OPERAND (arg, 0),
4124 				      old0, new0, old1, new1),
4125 			  eval_subst (loc, TREE_OPERAND (arg, 1),
4126 				      old0, new0, old1, new1));
4127 
4128     case tcc_expression:
4129       switch (code)
4130 	{
4131 	case SAVE_EXPR:
4132 	  return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
4133 			     old1, new1);
4134 
4135 	case COMPOUND_EXPR:
4136 	  return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
4137 			     old1, new1);
4138 
4139 	case COND_EXPR:
4140 	  return fold_build3_loc (loc, code, type,
4141 			      eval_subst (loc, TREE_OPERAND (arg, 0),
4142 					  old0, new0, old1, new1),
4143 			      eval_subst (loc, TREE_OPERAND (arg, 1),
4144 					  old0, new0, old1, new1),
4145 			      eval_subst (loc, TREE_OPERAND (arg, 2),
4146 					  old0, new0, old1, new1));
4147 	default:
4148 	  break;
4149 	}
4150       /* Fall through - ???  */
4151 
4152     case tcc_comparison:
4153       {
4154 	tree arg0 = TREE_OPERAND (arg, 0);
4155 	tree arg1 = TREE_OPERAND (arg, 1);
4156 
4157 	/* We need to check both for exact equality and tree equality.  The
4158 	   former will be true if the operand has a side-effect.  In that
4159 	   case, we know the operand occurred exactly once.  */
4160 
4161 	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
4162 	  arg0 = new0;
4163 	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
4164 	  arg0 = new1;
4165 
4166 	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
4167 	  arg1 = new0;
4168 	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
4169 	  arg1 = new1;
4170 
4171 	return fold_build2_loc (loc, code, type, arg0, arg1);
4172       }
4173 
4174     default:
4175       return arg;
4176     }
4177 }
4178 
4179 /* Return a tree for the case when the result of an expression is RESULT
4180    converted to TYPE and OMITTED was previously an operand of the expression
4181    but is now not needed (e.g., we folded OMITTED * 0).
4182 
4183    If OMITTED has side effects, we must evaluate it.  Otherwise, just do
4184    the conversion of RESULT to TYPE.  */
4185 
4186 tree
omit_one_operand_loc(location_t loc,tree type,tree result,tree omitted)4187 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
4188 {
4189   tree t = fold_convert_loc (loc, type, result);
4190 
4191   /* If the resulting operand is an empty statement, just return the omitted
4192      statement casted to void. */
4193   if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
4194     return build1_loc (loc, NOP_EXPR, void_type_node,
4195 		       fold_ignored_result (omitted));
4196 
4197   if (TREE_SIDE_EFFECTS (omitted))
4198     return build2_loc (loc, COMPOUND_EXPR, type,
4199 		       fold_ignored_result (omitted), t);
4200 
4201   return non_lvalue_loc (loc, t);
4202 }
4203 
4204 /* Return a tree for the case when the result of an expression is RESULT
4205    converted to TYPE and OMITTED1 and OMITTED2 were previously operands
4206    of the expression but are now not needed.
4207 
4208    If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
4209    If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
4210    evaluated before OMITTED2.  Otherwise, if neither has side effects,
4211    just do the conversion of RESULT to TYPE.  */
4212 
4213 tree
omit_two_operands_loc(location_t loc,tree type,tree result,tree omitted1,tree omitted2)4214 omit_two_operands_loc (location_t loc, tree type, tree result,
4215 		       tree omitted1, tree omitted2)
4216 {
4217   tree t = fold_convert_loc (loc, type, result);
4218 
4219   if (TREE_SIDE_EFFECTS (omitted2))
4220     t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
4221   if (TREE_SIDE_EFFECTS (omitted1))
4222     t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
4223 
4224   return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
4225 }
4226 
4227 
4228 /* Return a simplified tree node for the truth-negation of ARG.  This
4229    never alters ARG itself.  We assume that ARG is an operation that
4230    returns a truth value (0 or 1).
4231 
4232    FIXME: one would think we would fold the result, but it causes
4233    problems with the dominator optimizer.  */
4234 
4235 static tree
fold_truth_not_expr(location_t loc,tree arg)4236 fold_truth_not_expr (location_t loc, tree arg)
4237 {
4238   tree type = TREE_TYPE (arg);
4239   enum tree_code code = TREE_CODE (arg);
4240   location_t loc1, loc2;
4241 
4242   /* If this is a comparison, we can simply invert it, except for
4243      floating-point non-equality comparisons, in which case we just
4244      enclose a TRUTH_NOT_EXPR around what we have.  */
4245 
4246   if (TREE_CODE_CLASS (code) == tcc_comparison)
4247     {
4248       tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
4249       if (FLOAT_TYPE_P (op_type)
4250 	  && flag_trapping_math
4251 	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
4252 	  && code != NE_EXPR && code != EQ_EXPR)
4253 	return NULL_TREE;
4254 
4255       code = invert_tree_comparison (code, HONOR_NANS (op_type));
4256       if (code == ERROR_MARK)
4257 	return NULL_TREE;
4258 
4259       tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
4260 			     TREE_OPERAND (arg, 1));
4261       if (TREE_NO_WARNING (arg))
4262 	TREE_NO_WARNING (ret) = 1;
4263       return ret;
4264     }
4265 
4266   switch (code)
4267     {
4268     case INTEGER_CST:
4269       return constant_boolean_node (integer_zerop (arg), type);
4270 
4271     case TRUTH_AND_EXPR:
4272       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4273       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4274       return build2_loc (loc, TRUTH_OR_EXPR, type,
4275 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4276 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4277 
4278     case TRUTH_OR_EXPR:
4279       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4280       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4281       return build2_loc (loc, TRUTH_AND_EXPR, type,
4282 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4283 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4284 
4285     case TRUTH_XOR_EXPR:
4286       /* Here we can invert either operand.  We invert the first operand
4287 	 unless the second operand is a TRUTH_NOT_EXPR in which case our
4288 	 result is the XOR of the first operand with the inside of the
4289 	 negation of the second operand.  */
4290 
4291       if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
4292 	return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
4293 			   TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
4294       else
4295 	return build2_loc (loc, TRUTH_XOR_EXPR, type,
4296 			   invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
4297 			   TREE_OPERAND (arg, 1));
4298 
4299     case TRUTH_ANDIF_EXPR:
4300       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4301       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4302       return build2_loc (loc, TRUTH_ORIF_EXPR, type,
4303 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4304 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4305 
4306     case TRUTH_ORIF_EXPR:
4307       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4308       loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4309       return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
4310 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
4311 			 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
4312 
4313     case TRUTH_NOT_EXPR:
4314       return TREE_OPERAND (arg, 0);
4315 
4316     case COND_EXPR:
4317       {
4318 	tree arg1 = TREE_OPERAND (arg, 1);
4319 	tree arg2 = TREE_OPERAND (arg, 2);
4320 
4321 	loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4322 	loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
4323 
4324 	/* A COND_EXPR may have a throw as one operand, which
4325 	   then has void type.  Just leave void operands
4326 	   as they are.  */
4327 	return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
4328 			   VOID_TYPE_P (TREE_TYPE (arg1))
4329 			   ? arg1 : invert_truthvalue_loc (loc1, arg1),
4330 			   VOID_TYPE_P (TREE_TYPE (arg2))
4331 			   ? arg2 : invert_truthvalue_loc (loc2, arg2));
4332       }
4333 
4334     case COMPOUND_EXPR:
4335       loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
4336       return build2_loc (loc, COMPOUND_EXPR, type,
4337 			 TREE_OPERAND (arg, 0),
4338 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
4339 
4340     case NON_LVALUE_EXPR:
4341       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4342       return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
4343 
4344     CASE_CONVERT:
4345       if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
4346 	return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4347 
4348       /* fall through */
4349 
4350     case FLOAT_EXPR:
4351       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4352       return build1_loc (loc, TREE_CODE (arg), type,
4353 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4354 
4355     case BIT_AND_EXPR:
4356       if (!integer_onep (TREE_OPERAND (arg, 1)))
4357 	return NULL_TREE;
4358       return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
4359 
4360     case SAVE_EXPR:
4361       return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
4362 
4363     case CLEANUP_POINT_EXPR:
4364       loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
4365       return build1_loc (loc, CLEANUP_POINT_EXPR, type,
4366 			 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
4367 
4368     default:
4369       return NULL_TREE;
4370     }
4371 }
4372 
4373 /* Fold the truth-negation of ARG.  This never alters ARG itself.  We
4374    assume that ARG is an operation that returns a truth value (0 or 1
4375    for scalars, 0 or -1 for vectors).  Return the folded expression if
4376    folding is successful.  Otherwise, return NULL_TREE.  */
4377 
4378 static tree
fold_invert_truthvalue(location_t loc,tree arg)4379 fold_invert_truthvalue (location_t loc, tree arg)
4380 {
4381   tree type = TREE_TYPE (arg);
4382   return fold_unary_loc (loc, VECTOR_TYPE_P (type)
4383 			      ? BIT_NOT_EXPR
4384 			      : TRUTH_NOT_EXPR,
4385 			 type, arg);
4386 }
4387 
4388 /* Return a simplified tree node for the truth-negation of ARG.  This
4389    never alters ARG itself.  We assume that ARG is an operation that
4390    returns a truth value (0 or 1 for scalars, 0 or -1 for vectors).  */
4391 
4392 tree
invert_truthvalue_loc(location_t loc,tree arg)4393 invert_truthvalue_loc (location_t loc, tree arg)
4394 {
4395   if (TREE_CODE (arg) == ERROR_MARK)
4396     return arg;
4397 
4398   tree type = TREE_TYPE (arg);
4399   return fold_build1_loc (loc, VECTOR_TYPE_P (type)
4400 			       ? BIT_NOT_EXPR
4401 			       : TRUTH_NOT_EXPR,
4402 			  type, arg);
4403 }
4404 
4405 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4406    starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero
4407    and uses reverse storage order if REVERSEP is nonzero.  ORIG_INNER
4408    is the original memory reference used to preserve the alias set of
4409    the access.  */
4410 
4411 static tree
make_bit_field_ref(location_t loc,tree inner,tree orig_inner,tree type,HOST_WIDE_INT bitsize,poly_int64 bitpos,int unsignedp,int reversep)4412 make_bit_field_ref (location_t loc, tree inner, tree orig_inner, tree type,
4413 		    HOST_WIDE_INT bitsize, poly_int64 bitpos,
4414 		    int unsignedp, int reversep)
4415 {
4416   tree result, bftype;
4417 
4418   /* Attempt not to lose the access path if possible.  */
4419   if (TREE_CODE (orig_inner) == COMPONENT_REF)
4420     {
4421       tree ninner = TREE_OPERAND (orig_inner, 0);
4422       machine_mode nmode;
4423       poly_int64 nbitsize, nbitpos;
4424       tree noffset;
4425       int nunsignedp, nreversep, nvolatilep = 0;
4426       tree base = get_inner_reference (ninner, &nbitsize, &nbitpos,
4427 				       &noffset, &nmode, &nunsignedp,
4428 				       &nreversep, &nvolatilep);
4429       if (base == inner
4430 	  && noffset == NULL_TREE
4431 	  && known_subrange_p (bitpos, bitsize, nbitpos, nbitsize)
4432 	  && !reversep
4433 	  && !nreversep
4434 	  && !nvolatilep)
4435 	{
4436 	  inner = ninner;
4437 	  bitpos -= nbitpos;
4438 	}
4439     }
4440 
4441   alias_set_type iset = get_alias_set (orig_inner);
4442   if (iset == 0 && get_alias_set (inner) != iset)
4443     inner = fold_build2 (MEM_REF, TREE_TYPE (inner),
4444 			 build_fold_addr_expr (inner),
4445 			 build_int_cst (ptr_type_node, 0));
4446 
4447   if (known_eq (bitpos, 0) && !reversep)
4448     {
4449       tree size = TYPE_SIZE (TREE_TYPE (inner));
4450       if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4451 	   || POINTER_TYPE_P (TREE_TYPE (inner)))
4452 	  && tree_fits_shwi_p (size)
4453 	  && tree_to_shwi (size) == bitsize)
4454 	return fold_convert_loc (loc, type, inner);
4455     }
4456 
4457   bftype = type;
4458   if (TYPE_PRECISION (bftype) != bitsize
4459       || TYPE_UNSIGNED (bftype) == !unsignedp)
4460     bftype = build_nonstandard_integer_type (bitsize, 0);
4461 
4462   result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
4463 		       bitsize_int (bitsize), bitsize_int (bitpos));
4464   REF_REVERSE_STORAGE_ORDER (result) = reversep;
4465 
4466   if (bftype != type)
4467     result = fold_convert_loc (loc, type, result);
4468 
4469   return result;
4470 }
4471 
4472 /* Optimize a bit-field compare.
4473 
4474    There are two cases:  First is a compare against a constant and the
4475    second is a comparison of two items where the fields are at the same
4476    bit position relative to the start of a chunk (byte, halfword, word)
4477    large enough to contain it.  In these cases we can avoid the shift
4478    implicit in bitfield extractions.
4479 
4480    For constants, we emit a compare of the shifted constant with the
4481    BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4482    compared.  For two fields at the same position, we do the ANDs with the
4483    similar mask and compare the result of the ANDs.
4484 
4485    CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4486    COMPARE_TYPE is the type of the comparison, and LHS and RHS
4487    are the left and right operands of the comparison, respectively.
4488 
4489    If the optimization described above can be done, we return the resulting
4490    tree.  Otherwise we return zero.  */
4491 
4492 static tree
optimize_bit_field_compare(location_t loc,enum tree_code code,tree compare_type,tree lhs,tree rhs)4493 optimize_bit_field_compare (location_t loc, enum tree_code code,
4494 			    tree compare_type, tree lhs, tree rhs)
4495 {
4496   poly_int64 plbitpos, plbitsize, rbitpos, rbitsize;
4497   HOST_WIDE_INT lbitpos, lbitsize, nbitpos, nbitsize;
4498   tree type = TREE_TYPE (lhs);
4499   tree unsigned_type;
4500   int const_p = TREE_CODE (rhs) == INTEGER_CST;
4501   machine_mode lmode, rmode;
4502   scalar_int_mode nmode;
4503   int lunsignedp, runsignedp;
4504   int lreversep, rreversep;
4505   int lvolatilep = 0, rvolatilep = 0;
4506   tree linner, rinner = NULL_TREE;
4507   tree mask;
4508   tree offset;
4509 
4510   /* Get all the information about the extractions being done.  If the bit size
4511      is the same as the size of the underlying object, we aren't doing an
4512      extraction at all and so can do nothing.  We also don't want to
4513      do anything if the inner expression is a PLACEHOLDER_EXPR since we
4514      then will no longer be able to replace it.  */
4515   linner = get_inner_reference (lhs, &plbitsize, &plbitpos, &offset, &lmode,
4516 				&lunsignedp, &lreversep, &lvolatilep);
4517   if (linner == lhs
4518       || !known_size_p (plbitsize)
4519       || !plbitsize.is_constant (&lbitsize)
4520       || !plbitpos.is_constant (&lbitpos)
4521       || known_eq (lbitsize, GET_MODE_BITSIZE (lmode))
4522       || offset != 0
4523       || TREE_CODE (linner) == PLACEHOLDER_EXPR
4524       || lvolatilep)
4525     return 0;
4526 
4527   if (const_p)
4528     rreversep = lreversep;
4529   else
4530    {
4531      /* If this is not a constant, we can only do something if bit positions,
4532 	sizes, signedness and storage order are the same.  */
4533      rinner
4534        = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4535 			      &runsignedp, &rreversep, &rvolatilep);
4536 
4537      if (rinner == rhs
4538 	 || maybe_ne (lbitpos, rbitpos)
4539 	 || maybe_ne (lbitsize, rbitsize)
4540 	 || lunsignedp != runsignedp
4541 	 || lreversep != rreversep
4542 	 || offset != 0
4543 	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR
4544 	 || rvolatilep)
4545        return 0;
4546    }
4547 
4548   /* Honor the C++ memory model and mimic what RTL expansion does.  */
4549   poly_uint64 bitstart = 0;
4550   poly_uint64 bitend = 0;
4551   if (TREE_CODE (lhs) == COMPONENT_REF)
4552     {
4553       get_bit_range (&bitstart, &bitend, lhs, &plbitpos, &offset);
4554       if (!plbitpos.is_constant (&lbitpos) || offset != NULL_TREE)
4555 	return 0;
4556     }
4557 
4558   /* See if we can find a mode to refer to this field.  We should be able to,
4559      but fail if we can't.  */
4560   if (!get_best_mode (lbitsize, lbitpos, bitstart, bitend,
4561 		      const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4562 		      : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4563 			     TYPE_ALIGN (TREE_TYPE (rinner))),
4564 		      BITS_PER_WORD, false, &nmode))
4565     return 0;
4566 
4567   /* Set signed and unsigned types of the precision of this mode for the
4568      shifts below.  */
4569   unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4570 
4571   /* Compute the bit position and size for the new reference and our offset
4572      within it. If the new reference is the same size as the original, we
4573      won't optimize anything, so return zero.  */
4574   nbitsize = GET_MODE_BITSIZE (nmode);
4575   nbitpos = lbitpos & ~ (nbitsize - 1);
4576   lbitpos -= nbitpos;
4577   if (nbitsize == lbitsize)
4578     return 0;
4579 
4580   if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4581     lbitpos = nbitsize - lbitsize - lbitpos;
4582 
4583   /* Make the mask to be used against the extracted field.  */
4584   mask = build_int_cst_type (unsigned_type, -1);
4585   mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
4586   mask = const_binop (RSHIFT_EXPR, mask,
4587 		      size_int (nbitsize - lbitsize - lbitpos));
4588 
4589   if (! const_p)
4590     {
4591       if (nbitpos < 0)
4592 	return 0;
4593 
4594       /* If not comparing with constant, just rework the comparison
4595 	 and return.  */
4596       tree t1 = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4597 				    nbitsize, nbitpos, 1, lreversep);
4598       t1 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t1, mask);
4599       tree t2 = make_bit_field_ref (loc, rinner, rhs, unsigned_type,
4600 				    nbitsize, nbitpos, 1, rreversep);
4601       t2 = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type, t2, mask);
4602       return fold_build2_loc (loc, code, compare_type, t1, t2);
4603     }
4604 
4605   /* Otherwise, we are handling the constant case.  See if the constant is too
4606      big for the field.  Warn and return a tree for 0 (false) if so.  We do
4607      this not only for its own sake, but to avoid having to test for this
4608      error case below.  If we didn't, we might generate wrong code.
4609 
4610      For unsigned fields, the constant shifted right by the field length should
4611      be all zero.  For signed fields, the high-order bits should agree with
4612      the sign bit.  */
4613 
4614   if (lunsignedp)
4615     {
4616       if (wi::lrshift (wi::to_wide (rhs), lbitsize) != 0)
4617 	{
4618 	  warning (0, "comparison is always %d due to width of bit-field",
4619 		   code == NE_EXPR);
4620 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4621 	}
4622     }
4623   else
4624     {
4625       wide_int tem = wi::arshift (wi::to_wide (rhs), lbitsize - 1);
4626       if (tem != 0 && tem != -1)
4627 	{
4628 	  warning (0, "comparison is always %d due to width of bit-field",
4629 		   code == NE_EXPR);
4630 	  return constant_boolean_node (code == NE_EXPR, compare_type);
4631 	}
4632     }
4633 
4634   if (nbitpos < 0)
4635     return 0;
4636 
4637   /* Single-bit compares should always be against zero.  */
4638   if (lbitsize == 1 && ! integer_zerop (rhs))
4639     {
4640       code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4641       rhs = build_int_cst (type, 0);
4642     }
4643 
4644   /* Make a new bitfield reference, shift the constant over the
4645      appropriate number of bits and mask it with the computed mask
4646      (in case this was a signed field).  If we changed it, make a new one.  */
4647   lhs = make_bit_field_ref (loc, linner, lhs, unsigned_type,
4648 			    nbitsize, nbitpos, 1, lreversep);
4649 
4650   rhs = const_binop (BIT_AND_EXPR,
4651 		     const_binop (LSHIFT_EXPR,
4652 				  fold_convert_loc (loc, unsigned_type, rhs),
4653 				  size_int (lbitpos)),
4654 		     mask);
4655 
4656   lhs = build2_loc (loc, code, compare_type,
4657 		    build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4658   return lhs;
4659 }
4660 
4661 /* Subroutine for fold_truth_andor_1: decode a field reference.
4662 
4663    If EXP is a comparison reference, we return the innermost reference.
4664 
4665    *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4666    set to the starting bit number.
4667 
4668    If the innermost field can be completely contained in a mode-sized
4669    unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
4670 
4671    *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4672    otherwise it is not changed.
4673 
4674    *PUNSIGNEDP is set to the signedness of the field.
4675 
4676    *PREVERSEP is set to the storage order of the field.
4677 
4678    *PMASK is set to the mask used.  This is either contained in a
4679    BIT_AND_EXPR or derived from the width of the field.
4680 
4681    *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4682 
4683    Return 0 if this is not a component reference or is one that we can't
4684    do anything with.  */
4685 
4686 static tree
decode_field_reference(location_t loc,tree * exp_,HOST_WIDE_INT * pbitsize,HOST_WIDE_INT * pbitpos,machine_mode * pmode,int * punsignedp,int * preversep,int * pvolatilep,tree * pmask,tree * pand_mask)4687 decode_field_reference (location_t loc, tree *exp_, HOST_WIDE_INT *pbitsize,
4688 			HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4689 			int *punsignedp, int *preversep, int *pvolatilep,
4690 			tree *pmask, tree *pand_mask)
4691 {
4692   tree exp = *exp_;
4693   tree outer_type = 0;
4694   tree and_mask = 0;
4695   tree mask, inner, offset;
4696   tree unsigned_type;
4697   unsigned int precision;
4698 
4699   /* All the optimizations using this function assume integer fields.
4700      There are problems with FP fields since the type_for_size call
4701      below can fail for, e.g., XFmode.  */
4702   if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4703     return NULL_TREE;
4704 
4705   /* We are interested in the bare arrangement of bits, so strip everything
4706      that doesn't affect the machine mode.  However, record the type of the
4707      outermost expression if it may matter below.  */
4708   if (CONVERT_EXPR_P (exp)
4709       || TREE_CODE (exp) == NON_LVALUE_EXPR)
4710     outer_type = TREE_TYPE (exp);
4711   STRIP_NOPS (exp);
4712 
4713   if (TREE_CODE (exp) == BIT_AND_EXPR)
4714     {
4715       and_mask = TREE_OPERAND (exp, 1);
4716       exp = TREE_OPERAND (exp, 0);
4717       STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4718       if (TREE_CODE (and_mask) != INTEGER_CST)
4719 	return NULL_TREE;
4720     }
4721 
4722   poly_int64 poly_bitsize, poly_bitpos;
4723   inner = get_inner_reference (exp, &poly_bitsize, &poly_bitpos, &offset,
4724 			       pmode, punsignedp, preversep, pvolatilep);
4725   if ((inner == exp && and_mask == 0)
4726       || !poly_bitsize.is_constant (pbitsize)
4727       || !poly_bitpos.is_constant (pbitpos)
4728       || *pbitsize < 0
4729       || offset != 0
4730       || TREE_CODE (inner) == PLACEHOLDER_EXPR
4731       /* Reject out-of-bound accesses (PR79731).  */
4732       || (! AGGREGATE_TYPE_P (TREE_TYPE (inner))
4733 	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (inner)),
4734 			       *pbitpos + *pbitsize) < 0))
4735     return NULL_TREE;
4736 
4737   unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4738   if (unsigned_type == NULL_TREE)
4739     return NULL_TREE;
4740 
4741   *exp_ = exp;
4742 
4743   /* If the number of bits in the reference is the same as the bitsize of
4744      the outer type, then the outer type gives the signedness. Otherwise
4745      (in case of a small bitfield) the signedness is unchanged.  */
4746   if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4747     *punsignedp = TYPE_UNSIGNED (outer_type);
4748 
4749   /* Compute the mask to access the bitfield.  */
4750   precision = TYPE_PRECISION (unsigned_type);
4751 
4752   mask = build_int_cst_type (unsigned_type, -1);
4753 
4754   mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4755   mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4756 
4757   /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
4758   if (and_mask != 0)
4759     mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4760 			fold_convert_loc (loc, unsigned_type, and_mask), mask);
4761 
4762   *pmask = mask;
4763   *pand_mask = and_mask;
4764   return inner;
4765 }
4766 
4767 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4768    bit positions and MASK is SIGNED.  */
4769 
4770 static bool
all_ones_mask_p(const_tree mask,unsigned int size)4771 all_ones_mask_p (const_tree mask, unsigned int size)
4772 {
4773   tree type = TREE_TYPE (mask);
4774   unsigned int precision = TYPE_PRECISION (type);
4775 
4776   /* If this function returns true when the type of the mask is
4777      UNSIGNED, then there will be errors.  In particular see
4778      gcc.c-torture/execute/990326-1.c.  There does not appear to be
4779      any documentation paper trail as to why this is so.  But the pre
4780      wide-int worked with that restriction and it has been preserved
4781      here.  */
4782   if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4783     return false;
4784 
4785   return wi::mask (size, false, precision) == wi::to_wide (mask);
4786 }
4787 
4788 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4789    represents the sign bit of EXP's type.  If EXP represents a sign
4790    or zero extension, also test VAL against the unextended type.
4791    The return value is the (sub)expression whose sign bit is VAL,
4792    or NULL_TREE otherwise.  */
4793 
4794 tree
sign_bit_p(tree exp,const_tree val)4795 sign_bit_p (tree exp, const_tree val)
4796 {
4797   int width;
4798   tree t;
4799 
4800   /* Tree EXP must have an integral type.  */
4801   t = TREE_TYPE (exp);
4802   if (! INTEGRAL_TYPE_P (t))
4803     return NULL_TREE;
4804 
4805   /* Tree VAL must be an integer constant.  */
4806   if (TREE_CODE (val) != INTEGER_CST
4807       || TREE_OVERFLOW (val))
4808     return NULL_TREE;
4809 
4810   width = TYPE_PRECISION (t);
4811   if (wi::only_sign_bit_p (wi::to_wide (val), width))
4812     return exp;
4813 
4814   /* Handle extension from a narrower type.  */
4815   if (TREE_CODE (exp) == NOP_EXPR
4816       && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4817     return sign_bit_p (TREE_OPERAND (exp, 0), val);
4818 
4819   return NULL_TREE;
4820 }
4821 
4822 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4823    to be evaluated unconditionally.  */
4824 
4825 static bool
simple_operand_p(const_tree exp)4826 simple_operand_p (const_tree exp)
4827 {
4828   /* Strip any conversions that don't change the machine mode.  */
4829   STRIP_NOPS (exp);
4830 
4831   return (CONSTANT_CLASS_P (exp)
4832   	  || TREE_CODE (exp) == SSA_NAME
4833 	  || (DECL_P (exp)
4834 	      && ! TREE_ADDRESSABLE (exp)
4835 	      && ! TREE_THIS_VOLATILE (exp)
4836 	      && ! DECL_NONLOCAL (exp)
4837 	      /* Don't regard global variables as simple.  They may be
4838 		 allocated in ways unknown to the compiler (shared memory,
4839 		 #pragma weak, etc).  */
4840 	      && ! TREE_PUBLIC (exp)
4841 	      && ! DECL_EXTERNAL (exp)
4842 	      /* Weakrefs are not safe to be read, since they can be NULL.
4843  		 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4844 		 have DECL_WEAK flag set.  */
4845 	      && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4846 	      /* Loading a static variable is unduly expensive, but global
4847 		 registers aren't expensive.  */
4848 	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4849 }
4850 
4851 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4852    to be evaluated unconditionally.
4853    I addition to simple_operand_p, we assume that comparisons, conversions,
4854    and logic-not operations are simple, if their operands are simple, too.  */
4855 
4856 static bool
simple_operand_p_2(tree exp)4857 simple_operand_p_2 (tree exp)
4858 {
4859   enum tree_code code;
4860 
4861   if (TREE_SIDE_EFFECTS (exp) || generic_expr_could_trap_p (exp))
4862     return false;
4863 
4864   while (CONVERT_EXPR_P (exp))
4865     exp = TREE_OPERAND (exp, 0);
4866 
4867   code = TREE_CODE (exp);
4868 
4869   if (TREE_CODE_CLASS (code) == tcc_comparison)
4870     return (simple_operand_p (TREE_OPERAND (exp, 0))
4871 	    && simple_operand_p (TREE_OPERAND (exp, 1)));
4872 
4873   if (code == TRUTH_NOT_EXPR)
4874       return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4875 
4876   return simple_operand_p (exp);
4877 }
4878 
4879 
4880 /* The following functions are subroutines to fold_range_test and allow it to
4881    try to change a logical combination of comparisons into a range test.
4882 
4883    For example, both
4884 	X == 2 || X == 3 || X == 4 || X == 5
4885    and
4886 	X >= 2 && X <= 5
4887    are converted to
4888 	(unsigned) (X - 2) <= 3
4889 
4890    We describe each set of comparisons as being either inside or outside
4891    a range, using a variable named like IN_P, and then describe the
4892    range with a lower and upper bound.  If one of the bounds is omitted,
4893    it represents either the highest or lowest value of the type.
4894 
4895    In the comments below, we represent a range by two numbers in brackets
4896    preceded by a "+" to designate being inside that range, or a "-" to
4897    designate being outside that range, so the condition can be inverted by
4898    flipping the prefix.  An omitted bound is represented by a "-".  For
4899    example, "- [-, 10]" means being outside the range starting at the lowest
4900    possible value and ending at 10, in other words, being greater than 10.
4901    The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4902    always false.
4903 
4904    We set up things so that the missing bounds are handled in a consistent
4905    manner so neither a missing bound nor "true" and "false" need to be
4906    handled using a special case.  */
4907 
4908 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4909    of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4910    and UPPER1_P are nonzero if the respective argument is an upper bound
4911    and zero for a lower.  TYPE, if nonzero, is the type of the result; it
4912    must be specified for a comparison.  ARG1 will be converted to ARG0's
4913    type if both are specified.  */
4914 
4915 static tree
range_binop(enum tree_code code,tree type,tree arg0,int upper0_p,tree arg1,int upper1_p)4916 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4917 	     tree arg1, int upper1_p)
4918 {
4919   tree tem;
4920   int result;
4921   int sgn0, sgn1;
4922 
4923   /* If neither arg represents infinity, do the normal operation.
4924      Else, if not a comparison, return infinity.  Else handle the special
4925      comparison rules. Note that most of the cases below won't occur, but
4926      are handled for consistency.  */
4927 
4928   if (arg0 != 0 && arg1 != 0)
4929     {
4930       tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4931 			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4932       STRIP_NOPS (tem);
4933       return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4934     }
4935 
4936   if (TREE_CODE_CLASS (code) != tcc_comparison)
4937     return 0;
4938 
4939   /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4940      for neither.  In real maths, we cannot assume open ended ranges are
4941      the same. But, this is computer arithmetic, where numbers are finite.
4942      We can therefore make the transformation of any unbounded range with
4943      the value Z, Z being greater than any representable number. This permits
4944      us to treat unbounded ranges as equal.  */
4945   sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4946   sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4947   switch (code)
4948     {
4949     case EQ_EXPR:
4950       result = sgn0 == sgn1;
4951       break;
4952     case NE_EXPR:
4953       result = sgn0 != sgn1;
4954       break;
4955     case LT_EXPR:
4956       result = sgn0 < sgn1;
4957       break;
4958     case LE_EXPR:
4959       result = sgn0 <= sgn1;
4960       break;
4961     case GT_EXPR:
4962       result = sgn0 > sgn1;
4963       break;
4964     case GE_EXPR:
4965       result = sgn0 >= sgn1;
4966       break;
4967     default:
4968       gcc_unreachable ();
4969     }
4970 
4971   return constant_boolean_node (result, type);
4972 }
4973 
4974 /* Helper routine for make_range.  Perform one step for it, return
4975    new expression if the loop should continue or NULL_TREE if it should
4976    stop.  */
4977 
4978 tree
make_range_step(location_t loc,enum tree_code code,tree arg0,tree arg1,tree exp_type,tree * p_low,tree * p_high,int * p_in_p,bool * strict_overflow_p)4979 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4980 		 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4981 		 bool *strict_overflow_p)
4982 {
4983   tree arg0_type = TREE_TYPE (arg0);
4984   tree n_low, n_high, low = *p_low, high = *p_high;
4985   int in_p = *p_in_p, n_in_p;
4986 
4987   switch (code)
4988     {
4989     case TRUTH_NOT_EXPR:
4990       /* We can only do something if the range is testing for zero.  */
4991       if (low == NULL_TREE || high == NULL_TREE
4992 	  || ! integer_zerop (low) || ! integer_zerop (high))
4993 	return NULL_TREE;
4994       *p_in_p = ! in_p;
4995       return arg0;
4996 
4997     case EQ_EXPR: case NE_EXPR:
4998     case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4999       /* We can only do something if the range is testing for zero
5000 	 and if the second operand is an integer constant.  Note that
5001 	 saying something is "in" the range we make is done by
5002 	 complementing IN_P since it will set in the initial case of
5003 	 being not equal to zero; "out" is leaving it alone.  */
5004       if (low == NULL_TREE || high == NULL_TREE
5005 	  || ! integer_zerop (low) || ! integer_zerop (high)
5006 	  || TREE_CODE (arg1) != INTEGER_CST)
5007 	return NULL_TREE;
5008 
5009       switch (code)
5010 	{
5011 	case NE_EXPR:  /* - [c, c]  */
5012 	  low = high = arg1;
5013 	  break;
5014 	case EQ_EXPR:  /* + [c, c]  */
5015 	  in_p = ! in_p, low = high = arg1;
5016 	  break;
5017 	case GT_EXPR:  /* - [-, c] */
5018 	  low = 0, high = arg1;
5019 	  break;
5020 	case GE_EXPR:  /* + [c, -] */
5021 	  in_p = ! in_p, low = arg1, high = 0;
5022 	  break;
5023 	case LT_EXPR:  /* - [c, -] */
5024 	  low = arg1, high = 0;
5025 	  break;
5026 	case LE_EXPR:  /* + [-, c] */
5027 	  in_p = ! in_p, low = 0, high = arg1;
5028 	  break;
5029 	default:
5030 	  gcc_unreachable ();
5031 	}
5032 
5033       /* If this is an unsigned comparison, we also know that EXP is
5034 	 greater than or equal to zero.  We base the range tests we make
5035 	 on that fact, so we record it here so we can parse existing
5036 	 range tests.  We test arg0_type since often the return type
5037 	 of, e.g. EQ_EXPR, is boolean.  */
5038       if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
5039 	{
5040 	  if (! merge_ranges (&n_in_p, &n_low, &n_high,
5041 			      in_p, low, high, 1,
5042 			      build_int_cst (arg0_type, 0),
5043 			      NULL_TREE))
5044 	    return NULL_TREE;
5045 
5046 	  in_p = n_in_p, low = n_low, high = n_high;
5047 
5048 	  /* If the high bound is missing, but we have a nonzero low
5049 	     bound, reverse the range so it goes from zero to the low bound
5050 	     minus 1.  */
5051 	  if (high == 0 && low && ! integer_zerop (low))
5052 	    {
5053 	      in_p = ! in_p;
5054 	      high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
5055 				  build_int_cst (TREE_TYPE (low), 1), 0);
5056 	      low = build_int_cst (arg0_type, 0);
5057 	    }
5058 	}
5059 
5060       *p_low = low;
5061       *p_high = high;
5062       *p_in_p = in_p;
5063       return arg0;
5064 
5065     case NEGATE_EXPR:
5066       /* If flag_wrapv and ARG0_TYPE is signed, make sure
5067 	 low and high are non-NULL, then normalize will DTRT.  */
5068       if (!TYPE_UNSIGNED (arg0_type)
5069 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5070 	{
5071 	  if (low == NULL_TREE)
5072 	    low = TYPE_MIN_VALUE (arg0_type);
5073 	  if (high == NULL_TREE)
5074 	    high = TYPE_MAX_VALUE (arg0_type);
5075 	}
5076 
5077       /* (-x) IN [a,b] -> x in [-b, -a]  */
5078       n_low = range_binop (MINUS_EXPR, exp_type,
5079 			   build_int_cst (exp_type, 0),
5080 			   0, high, 1);
5081       n_high = range_binop (MINUS_EXPR, exp_type,
5082 			    build_int_cst (exp_type, 0),
5083 			    0, low, 0);
5084       if (n_high != 0 && TREE_OVERFLOW (n_high))
5085 	return NULL_TREE;
5086       goto normalize;
5087 
5088     case BIT_NOT_EXPR:
5089       /* ~ X -> -X - 1  */
5090       return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
5091 			 build_int_cst (exp_type, 1));
5092 
5093     case PLUS_EXPR:
5094     case MINUS_EXPR:
5095       if (TREE_CODE (arg1) != INTEGER_CST)
5096 	return NULL_TREE;
5097 
5098       /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
5099 	 move a constant to the other side.  */
5100       if (!TYPE_UNSIGNED (arg0_type)
5101 	  && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
5102 	return NULL_TREE;
5103 
5104       /* If EXP is signed, any overflow in the computation is undefined,
5105 	 so we don't worry about it so long as our computations on
5106 	 the bounds don't overflow.  For unsigned, overflow is defined
5107 	 and this is exactly the right thing.  */
5108       n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5109 			   arg0_type, low, 0, arg1, 0);
5110       n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
5111 			    arg0_type, high, 1, arg1, 0);
5112       if ((n_low != 0 && TREE_OVERFLOW (n_low))
5113 	  || (n_high != 0 && TREE_OVERFLOW (n_high)))
5114 	return NULL_TREE;
5115 
5116       if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
5117 	*strict_overflow_p = true;
5118 
5119       normalize:
5120 	/* Check for an unsigned range which has wrapped around the maximum
5121 	   value thus making n_high < n_low, and normalize it.  */
5122 	if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
5123 	  {
5124 	    low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
5125 			       build_int_cst (TREE_TYPE (n_high), 1), 0);
5126 	    high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
5127 				build_int_cst (TREE_TYPE (n_low), 1), 0);
5128 
5129 	    /* If the range is of the form +/- [ x+1, x ], we won't
5130 	       be able to normalize it.  But then, it represents the
5131 	       whole range or the empty set, so make it
5132 	       +/- [ -, - ].  */
5133 	    if (tree_int_cst_equal (n_low, low)
5134 		&& tree_int_cst_equal (n_high, high))
5135 	      low = high = 0;
5136 	    else
5137 	      in_p = ! in_p;
5138 	  }
5139 	else
5140 	  low = n_low, high = n_high;
5141 
5142 	*p_low = low;
5143 	*p_high = high;
5144 	*p_in_p = in_p;
5145 	return arg0;
5146 
5147     CASE_CONVERT:
5148     case NON_LVALUE_EXPR:
5149       if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
5150 	return NULL_TREE;
5151 
5152       if (! INTEGRAL_TYPE_P (arg0_type)
5153 	  || (low != 0 && ! int_fits_type_p (low, arg0_type))
5154 	  || (high != 0 && ! int_fits_type_p (high, arg0_type)))
5155 	return NULL_TREE;
5156 
5157       n_low = low, n_high = high;
5158 
5159       if (n_low != 0)
5160 	n_low = fold_convert_loc (loc, arg0_type, n_low);
5161 
5162       if (n_high != 0)
5163 	n_high = fold_convert_loc (loc, arg0_type, n_high);
5164 
5165       /* If we're converting arg0 from an unsigned type, to exp,
5166 	 a signed type,  we will be doing the comparison as unsigned.
5167 	 The tests above have already verified that LOW and HIGH
5168 	 are both positive.
5169 
5170 	 So we have to ensure that we will handle large unsigned
5171 	 values the same way that the current signed bounds treat
5172 	 negative values.  */
5173 
5174       if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
5175 	{
5176 	  tree high_positive;
5177 	  tree equiv_type;
5178 	  /* For fixed-point modes, we need to pass the saturating flag
5179 	     as the 2nd parameter.  */
5180 	  if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
5181 	    equiv_type
5182 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
5183 						TYPE_SATURATING (arg0_type));
5184 	  else
5185 	    equiv_type
5186 	      = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
5187 
5188 	  /* A range without an upper bound is, naturally, unbounded.
5189 	     Since convert would have cropped a very large value, use
5190 	     the max value for the destination type.  */
5191 	  high_positive
5192 	    = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
5193 	      : TYPE_MAX_VALUE (arg0_type);
5194 
5195 	  if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
5196 	    high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
5197 					     fold_convert_loc (loc, arg0_type,
5198 							       high_positive),
5199 					     build_int_cst (arg0_type, 1));
5200 
5201 	  /* If the low bound is specified, "and" the range with the
5202 	     range for which the original unsigned value will be
5203 	     positive.  */
5204 	  if (low != 0)
5205 	    {
5206 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
5207 				  1, fold_convert_loc (loc, arg0_type,
5208 						       integer_zero_node),
5209 				  high_positive))
5210 		return NULL_TREE;
5211 
5212 	      in_p = (n_in_p == in_p);
5213 	    }
5214 	  else
5215 	    {
5216 	      /* Otherwise, "or" the range with the range of the input
5217 		 that will be interpreted as negative.  */
5218 	      if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
5219 				  1, fold_convert_loc (loc, arg0_type,
5220 						       integer_zero_node),
5221 				  high_positive))
5222 		return NULL_TREE;
5223 
5224 	      in_p = (in_p != n_in_p);
5225 	    }
5226 	}
5227 
5228       *p_low = n_low;
5229       *p_high = n_high;
5230       *p_in_p = in_p;
5231       return arg0;
5232 
5233     default:
5234       return NULL_TREE;
5235     }
5236 }
5237 
5238 /* Given EXP, a logical expression, set the range it is testing into
5239    variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
5240    actually being tested.  *PLOW and *PHIGH will be made of the same
5241    type as the returned expression.  If EXP is not a comparison, we
5242    will most likely not be returning a useful value and range.  Set
5243    *STRICT_OVERFLOW_P to true if the return value is only valid
5244    because signed overflow is undefined; otherwise, do not change
5245    *STRICT_OVERFLOW_P.  */
5246 
5247 tree
make_range(tree exp,int * pin_p,tree * plow,tree * phigh,bool * strict_overflow_p)5248 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
5249 	    bool *strict_overflow_p)
5250 {
5251   enum tree_code code;
5252   tree arg0, arg1 = NULL_TREE;
5253   tree exp_type, nexp;
5254   int in_p;
5255   tree low, high;
5256   location_t loc = EXPR_LOCATION (exp);
5257 
5258   /* Start with simply saying "EXP != 0" and then look at the code of EXP
5259      and see if we can refine the range.  Some of the cases below may not
5260      happen, but it doesn't seem worth worrying about this.  We "continue"
5261      the outer loop when we've changed something; otherwise we "break"
5262      the switch, which will "break" the while.  */
5263 
5264   in_p = 0;
5265   low = high = build_int_cst (TREE_TYPE (exp), 0);
5266 
5267   while (1)
5268     {
5269       code = TREE_CODE (exp);
5270       exp_type = TREE_TYPE (exp);
5271       arg0 = NULL_TREE;
5272 
5273       if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
5274 	{
5275 	  if (TREE_OPERAND_LENGTH (exp) > 0)
5276 	    arg0 = TREE_OPERAND (exp, 0);
5277 	  if (TREE_CODE_CLASS (code) == tcc_binary
5278 	      || TREE_CODE_CLASS (code) == tcc_comparison
5279 	      || (TREE_CODE_CLASS (code) == tcc_expression
5280 		  && TREE_OPERAND_LENGTH (exp) > 1))
5281 	    arg1 = TREE_OPERAND (exp, 1);
5282 	}
5283       if (arg0 == NULL_TREE)
5284 	break;
5285 
5286       nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
5287 			      &high, &in_p, strict_overflow_p);
5288       if (nexp == NULL_TREE)
5289 	break;
5290       exp = nexp;
5291     }
5292 
5293   /* If EXP is a constant, we can evaluate whether this is true or false.  */
5294   if (TREE_CODE (exp) == INTEGER_CST)
5295     {
5296       in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
5297 						 exp, 0, low, 0))
5298 		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
5299 						    exp, 1, high, 1)));
5300       low = high = 0;
5301       exp = 0;
5302     }
5303 
5304   *pin_p = in_p, *plow = low, *phigh = high;
5305   return exp;
5306 }
5307 
5308 /* Returns TRUE if [LOW, HIGH] range check can be optimized to
5309    a bitwise check i.e. when
5310      LOW  == 0xXX...X00...0
5311      HIGH == 0xXX...X11...1
5312    Return corresponding mask in MASK and stem in VALUE.  */
5313 
5314 static bool
maskable_range_p(const_tree low,const_tree high,tree type,tree * mask,tree * value)5315 maskable_range_p (const_tree low, const_tree high, tree type, tree *mask,
5316 		  tree *value)
5317 {
5318   if (TREE_CODE (low) != INTEGER_CST
5319       || TREE_CODE (high) != INTEGER_CST)
5320     return false;
5321 
5322   unsigned prec = TYPE_PRECISION (type);
5323   wide_int lo = wi::to_wide (low, prec);
5324   wide_int hi = wi::to_wide (high, prec);
5325 
5326   wide_int end_mask = lo ^ hi;
5327   if ((end_mask & (end_mask + 1)) != 0
5328       || (lo & end_mask) != 0)
5329     return false;
5330 
5331   wide_int stem_mask = ~end_mask;
5332   wide_int stem = lo & stem_mask;
5333   if (stem != (hi & stem_mask))
5334     return false;
5335 
5336   *mask = wide_int_to_tree (type, stem_mask);
5337   *value = wide_int_to_tree (type, stem);
5338 
5339   return true;
5340 }
5341 
5342 /* Helper routine for build_range_check and match.pd.  Return the type to
5343    perform the check or NULL if it shouldn't be optimized.  */
5344 
5345 tree
range_check_type(tree etype)5346 range_check_type (tree etype)
5347 {
5348   /* First make sure that arithmetics in this type is valid, then make sure
5349      that it wraps around.  */
5350   if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5351     etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype), 1);
5352 
5353   if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_UNSIGNED (etype))
5354     {
5355       tree utype, minv, maxv;
5356 
5357       /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5358 	 for the type in question, as we rely on this here.  */
5359       utype = unsigned_type_for (etype);
5360       maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
5361       maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5362 			  build_int_cst (TREE_TYPE (maxv), 1), 1);
5363       minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
5364 
5365       if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5366 				      minv, 1, maxv, 1)))
5367 	etype = utype;
5368       else
5369 	return NULL_TREE;
5370     }
5371   else if (POINTER_TYPE_P (etype))
5372     etype = unsigned_type_for (etype);
5373   return etype;
5374 }
5375 
5376 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
5377    type, TYPE, return an expression to test if EXP is in (or out of, depending
5378    on IN_P) the range.  Return 0 if the test couldn't be created.  */
5379 
5380 tree
build_range_check(location_t loc,tree type,tree exp,int in_p,tree low,tree high)5381 build_range_check (location_t loc, tree type, tree exp, int in_p,
5382 		   tree low, tree high)
5383 {
5384   tree etype = TREE_TYPE (exp), mask, value;
5385 
5386   /* Disable this optimization for function pointer expressions
5387      on targets that require function pointer canonicalization.  */
5388   if (targetm.have_canonicalize_funcptr_for_compare ()
5389       && POINTER_TYPE_P (etype)
5390       && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (etype)))
5391     return NULL_TREE;
5392 
5393   if (! in_p)
5394     {
5395       value = build_range_check (loc, type, exp, 1, low, high);
5396       if (value != 0)
5397         return invert_truthvalue_loc (loc, value);
5398 
5399       return 0;
5400     }
5401 
5402   if (low == 0 && high == 0)
5403     return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
5404 
5405   if (low == 0)
5406     return fold_build2_loc (loc, LE_EXPR, type, exp,
5407 			    fold_convert_loc (loc, etype, high));
5408 
5409   if (high == 0)
5410     return fold_build2_loc (loc, GE_EXPR, type, exp,
5411 			    fold_convert_loc (loc, etype, low));
5412 
5413   if (operand_equal_p (low, high, 0))
5414     return fold_build2_loc (loc, EQ_EXPR, type, exp,
5415 			    fold_convert_loc (loc, etype, low));
5416 
5417   if (TREE_CODE (exp) == BIT_AND_EXPR
5418       && maskable_range_p (low, high, etype, &mask, &value))
5419     return fold_build2_loc (loc, EQ_EXPR, type,
5420 			    fold_build2_loc (loc, BIT_AND_EXPR, etype,
5421 					     exp, mask),
5422 			    value);
5423 
5424   if (integer_zerop (low))
5425     {
5426       if (! TYPE_UNSIGNED (etype))
5427 	{
5428 	  etype = unsigned_type_for (etype);
5429 	  high = fold_convert_loc (loc, etype, high);
5430 	  exp = fold_convert_loc (loc, etype, exp);
5431 	}
5432       return build_range_check (loc, type, exp, 1, 0, high);
5433     }
5434 
5435   /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
5436   if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
5437     {
5438       int prec = TYPE_PRECISION (etype);
5439 
5440       if (wi::mask <widest_int> (prec - 1, false) == wi::to_widest (high))
5441 	{
5442 	  if (TYPE_UNSIGNED (etype))
5443 	    {
5444 	      tree signed_etype = signed_type_for (etype);
5445 	      if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
5446 		etype
5447 		  = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5448 	      else
5449 		etype = signed_etype;
5450 	      exp = fold_convert_loc (loc, etype, exp);
5451 	    }
5452 	  return fold_build2_loc (loc, GT_EXPR, type, exp,
5453 				  build_int_cst (etype, 0));
5454 	}
5455     }
5456 
5457   /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5458      This requires wrap-around arithmetics for the type of the expression.  */
5459   etype = range_check_type (etype);
5460   if (etype == NULL_TREE)
5461     return NULL_TREE;
5462 
5463   high = fold_convert_loc (loc, etype, high);
5464   low = fold_convert_loc (loc, etype, low);
5465   exp = fold_convert_loc (loc, etype, exp);
5466 
5467   value = const_binop (MINUS_EXPR, high, low);
5468 
5469   if (value != 0 && !TREE_OVERFLOW (value))
5470     return build_range_check (loc, type,
5471 			      fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5472 			      1, build_int_cst (etype, 0), value);
5473 
5474   return 0;
5475 }
5476 
5477 /* Return the predecessor of VAL in its type, handling the infinite case.  */
5478 
5479 static tree
range_predecessor(tree val)5480 range_predecessor (tree val)
5481 {
5482   tree type = TREE_TYPE (val);
5483 
5484   if (INTEGRAL_TYPE_P (type)
5485       && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5486     return 0;
5487   else
5488     return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
5489 			build_int_cst (TREE_TYPE (val), 1), 0);
5490 }
5491 
5492 /* Return the successor of VAL in its type, handling the infinite case.  */
5493 
5494 static tree
range_successor(tree val)5495 range_successor (tree val)
5496 {
5497   tree type = TREE_TYPE (val);
5498 
5499   if (INTEGRAL_TYPE_P (type)
5500       && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5501     return 0;
5502   else
5503     return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
5504 			build_int_cst (TREE_TYPE (val), 1), 0);
5505 }
5506 
5507 /* Given two ranges, see if we can merge them into one.  Return 1 if we
5508    can, 0 if we can't.  Set the output range into the specified parameters.  */
5509 
5510 bool
merge_ranges(int * pin_p,tree * plow,tree * phigh,int in0_p,tree low0,tree high0,int in1_p,tree low1,tree high1)5511 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5512 	      tree high0, int in1_p, tree low1, tree high1)
5513 {
5514   int no_overlap;
5515   int subset;
5516   int temp;
5517   tree tem;
5518   int in_p;
5519   tree low, high;
5520   int lowequal = ((low0 == 0 && low1 == 0)
5521 		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5522 						low0, 0, low1, 0)));
5523   int highequal = ((high0 == 0 && high1 == 0)
5524 		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5525 						 high0, 1, high1, 1)));
5526 
5527   /* Make range 0 be the range that starts first, or ends last if they
5528      start at the same value.  Swap them if it isn't.  */
5529   if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5530 				 low0, 0, low1, 0))
5531       || (lowequal
5532 	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
5533 					high1, 1, high0, 1))))
5534     {
5535       temp = in0_p, in0_p = in1_p, in1_p = temp;
5536       tem = low0, low0 = low1, low1 = tem;
5537       tem = high0, high0 = high1, high1 = tem;
5538     }
5539 
5540   /* If the second range is != high1 where high1 is the type maximum of
5541      the type, try first merging with < high1 range.  */
5542   if (low1
5543       && high1
5544       && TREE_CODE (low1) == INTEGER_CST
5545       && (TREE_CODE (TREE_TYPE (low1)) == INTEGER_TYPE
5546 	  || (TREE_CODE (TREE_TYPE (low1)) == ENUMERAL_TYPE
5547 	      && known_eq (TYPE_PRECISION (TREE_TYPE (low1)),
5548 			   GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low1))))))
5549       && operand_equal_p (low1, high1, 0))
5550     {
5551       if (tree_int_cst_equal (low1, TYPE_MAX_VALUE (TREE_TYPE (low1)))
5552 	  && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5553 			   !in1_p, NULL_TREE, range_predecessor (low1)))
5554 	return true;
5555       /* Similarly for the second range != low1 where low1 is the type minimum
5556 	 of the type, try first merging with > low1 range.  */
5557       if (tree_int_cst_equal (low1, TYPE_MIN_VALUE (TREE_TYPE (low1)))
5558 	  && merge_ranges (pin_p, plow, phigh, in0_p, low0, high0,
5559 			   !in1_p, range_successor (low1), NULL_TREE))
5560 	return true;
5561     }
5562 
5563   /* Now flag two cases, whether the ranges are disjoint or whether the
5564      second range is totally subsumed in the first.  Note that the tests
5565      below are simplified by the ones above.  */
5566   no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5567 					  high0, 1, low1, 0));
5568   subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5569 				      high1, 1, high0, 1));
5570 
5571   /* We now have four cases, depending on whether we are including or
5572      excluding the two ranges.  */
5573   if (in0_p && in1_p)
5574     {
5575       /* If they don't overlap, the result is false.  If the second range
5576 	 is a subset it is the result.  Otherwise, the range is from the start
5577 	 of the second to the end of the first.  */
5578       if (no_overlap)
5579 	in_p = 0, low = high = 0;
5580       else if (subset)
5581 	in_p = 1, low = low1, high = high1;
5582       else
5583 	in_p = 1, low = low1, high = high0;
5584     }
5585 
5586   else if (in0_p && ! in1_p)
5587     {
5588       /* If they don't overlap, the result is the first range.  If they are
5589 	 equal, the result is false.  If the second range is a subset of the
5590 	 first, and the ranges begin at the same place, we go from just after
5591 	 the end of the second range to the end of the first.  If the second
5592 	 range is not a subset of the first, or if it is a subset and both
5593 	 ranges end at the same place, the range starts at the start of the
5594 	 first range and ends just before the second range.
5595 	 Otherwise, we can't describe this as a single range.  */
5596       if (no_overlap)
5597 	in_p = 1, low = low0, high = high0;
5598       else if (lowequal && highequal)
5599 	in_p = 0, low = high = 0;
5600       else if (subset && lowequal)
5601 	{
5602 	  low = range_successor (high1);
5603 	  high = high0;
5604 	  in_p = 1;
5605 	  if (low == 0)
5606 	    {
5607 	      /* We are in the weird situation where high0 > high1 but
5608 		 high1 has no successor.  Punt.  */
5609 	      return 0;
5610 	    }
5611 	}
5612       else if (! subset || highequal)
5613 	{
5614 	  low = low0;
5615 	  high = range_predecessor (low1);
5616 	  in_p = 1;
5617 	  if (high == 0)
5618 	    {
5619 	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
5620 	      return 0;
5621 	    }
5622 	}
5623       else
5624 	return 0;
5625     }
5626 
5627   else if (! in0_p && in1_p)
5628     {
5629       /* If they don't overlap, the result is the second range.  If the second
5630 	 is a subset of the first, the result is false.  Otherwise,
5631 	 the range starts just after the first range and ends at the
5632 	 end of the second.  */
5633       if (no_overlap)
5634 	in_p = 1, low = low1, high = high1;
5635       else if (subset || highequal)
5636 	in_p = 0, low = high = 0;
5637       else
5638 	{
5639 	  low = range_successor (high0);
5640 	  high = high1;
5641 	  in_p = 1;
5642 	  if (low == 0)
5643 	    {
5644 	      /* high1 > high0 but high0 has no successor.  Punt.  */
5645 	      return 0;
5646 	    }
5647 	}
5648     }
5649 
5650   else
5651     {
5652       /* The case where we are excluding both ranges.  Here the complex case
5653 	 is if they don't overlap.  In that case, the only time we have a
5654 	 range is if they are adjacent.  If the second is a subset of the
5655 	 first, the result is the first.  Otherwise, the range to exclude
5656 	 starts at the beginning of the first range and ends at the end of the
5657 	 second.  */
5658       if (no_overlap)
5659 	{
5660 	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5661 					 range_successor (high0),
5662 					 1, low1, 0)))
5663 	    in_p = 0, low = low0, high = high1;
5664 	  else
5665 	    {
5666 	      /* Canonicalize - [min, x] into - [-, x].  */
5667 	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
5668 		switch (TREE_CODE (TREE_TYPE (low0)))
5669 		  {
5670 		  case ENUMERAL_TYPE:
5671 		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (low0)),
5672 				  GET_MODE_BITSIZE
5673 				    (TYPE_MODE (TREE_TYPE (low0)))))
5674 		      break;
5675 		    /* FALLTHROUGH */
5676 		  case INTEGER_TYPE:
5677 		    if (tree_int_cst_equal (low0,
5678 					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
5679 		      low0 = 0;
5680 		    break;
5681 		  case POINTER_TYPE:
5682 		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
5683 			&& integer_zerop (low0))
5684 		      low0 = 0;
5685 		    break;
5686 		  default:
5687 		    break;
5688 		  }
5689 
5690 	      /* Canonicalize - [x, max] into - [x, -].  */
5691 	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
5692 		switch (TREE_CODE (TREE_TYPE (high1)))
5693 		  {
5694 		  case ENUMERAL_TYPE:
5695 		    if (maybe_ne (TYPE_PRECISION (TREE_TYPE (high1)),
5696 				  GET_MODE_BITSIZE
5697 				    (TYPE_MODE (TREE_TYPE (high1)))))
5698 		      break;
5699 		    /* FALLTHROUGH */
5700 		  case INTEGER_TYPE:
5701 		    if (tree_int_cst_equal (high1,
5702 					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
5703 		      high1 = 0;
5704 		    break;
5705 		  case POINTER_TYPE:
5706 		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
5707 			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5708 						       high1, 1,
5709 						       build_int_cst (TREE_TYPE (high1), 1),
5710 						       1)))
5711 		      high1 = 0;
5712 		    break;
5713 		  default:
5714 		    break;
5715 		  }
5716 
5717 	      /* The ranges might be also adjacent between the maximum and
5718 	         minimum values of the given type.  For
5719 	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5720 	         return + [x + 1, y - 1].  */
5721 	      if (low0 == 0 && high1 == 0)
5722 	        {
5723 		  low = range_successor (high0);
5724 		  high = range_predecessor (low1);
5725 		  if (low == 0 || high == 0)
5726 		    return 0;
5727 
5728 		  in_p = 1;
5729 		}
5730 	      else
5731 		return 0;
5732 	    }
5733 	}
5734       else if (subset)
5735 	in_p = 0, low = low0, high = high0;
5736       else
5737 	in_p = 0, low = low0, high = high1;
5738     }
5739 
5740   *pin_p = in_p, *plow = low, *phigh = high;
5741   return 1;
5742 }
5743 
5744 
5745 /* Subroutine of fold, looking inside expressions of the form
5746    A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5747    of the COND_EXPR.  This function is being used also to optimize
5748    A op B ? C : A, by reversing the comparison first.
5749 
5750    Return a folded expression whose code is not a COND_EXPR
5751    anymore, or NULL_TREE if no folding opportunity is found.  */
5752 
5753 static tree
fold_cond_expr_with_comparison(location_t loc,tree type,tree arg0,tree arg1,tree arg2)5754 fold_cond_expr_with_comparison (location_t loc, tree type,
5755 				tree arg0, tree arg1, tree arg2)
5756 {
5757   enum tree_code comp_code = TREE_CODE (arg0);
5758   tree arg00 = TREE_OPERAND (arg0, 0);
5759   tree arg01 = TREE_OPERAND (arg0, 1);
5760   tree arg1_type = TREE_TYPE (arg1);
5761   tree tem;
5762 
5763   STRIP_NOPS (arg1);
5764   STRIP_NOPS (arg2);
5765 
5766   /* If we have A op 0 ? A : -A, consider applying the following
5767      transformations:
5768 
5769      A == 0? A : -A    same as -A
5770      A != 0? A : -A    same as A
5771      A >= 0? A : -A    same as abs (A)
5772      A > 0?  A : -A    same as abs (A)
5773      A <= 0? A : -A    same as -abs (A)
5774      A < 0?  A : -A    same as -abs (A)
5775 
5776      None of these transformations work for modes with signed
5777      zeros.  If A is +/-0, the first two transformations will
5778      change the sign of the result (from +0 to -0, or vice
5779      versa).  The last four will fix the sign of the result,
5780      even though the original expressions could be positive or
5781      negative, depending on the sign of A.
5782 
5783      Note that all these transformations are correct if A is
5784      NaN, since the two alternatives (A and -A) are also NaNs.  */
5785   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5786       && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5787 	  ? real_zerop (arg01)
5788 	  : integer_zerop (arg01))
5789       && ((TREE_CODE (arg2) == NEGATE_EXPR
5790 	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5791 	     /* In the case that A is of the form X-Y, '-A' (arg2) may
5792 	        have already been folded to Y-X, check for that. */
5793 	  || (TREE_CODE (arg1) == MINUS_EXPR
5794 	      && TREE_CODE (arg2) == MINUS_EXPR
5795 	      && operand_equal_p (TREE_OPERAND (arg1, 0),
5796 				  TREE_OPERAND (arg2, 1), 0)
5797 	      && operand_equal_p (TREE_OPERAND (arg1, 1),
5798 				  TREE_OPERAND (arg2, 0), 0))))
5799     switch (comp_code)
5800       {
5801       case EQ_EXPR:
5802       case UNEQ_EXPR:
5803 	tem = fold_convert_loc (loc, arg1_type, arg1);
5804 	return fold_convert_loc (loc, type, negate_expr (tem));
5805       case NE_EXPR:
5806       case LTGT_EXPR:
5807 	return fold_convert_loc (loc, type, arg1);
5808       case UNGE_EXPR:
5809       case UNGT_EXPR:
5810 	if (flag_trapping_math)
5811 	  break;
5812 	/* Fall through.  */
5813       case GE_EXPR:
5814       case GT_EXPR:
5815 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5816 	  break;
5817 	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5818 	return fold_convert_loc (loc, type, tem);
5819       case UNLE_EXPR:
5820       case UNLT_EXPR:
5821 	if (flag_trapping_math)
5822 	  break;
5823 	/* FALLTHRU */
5824       case LE_EXPR:
5825       case LT_EXPR:
5826 	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5827 	  break;
5828 	if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5829 	    && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
5830 	  {
5831 	    /* A <= 0 ? A : -A for A INT_MIN is valid, but -abs(INT_MIN)
5832 	       is not, invokes UB both in abs and in the negation of it.
5833 	       So, use ABSU_EXPR instead.  */
5834 	    tree utype = unsigned_type_for (TREE_TYPE (arg1));
5835 	    tem = fold_build1_loc (loc, ABSU_EXPR, utype, arg1);
5836 	    tem = negate_expr (tem);
5837 	    return fold_convert_loc (loc, type, tem);
5838 	  }
5839 	else
5840 	  {
5841 	    tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5842 	    return negate_expr (fold_convert_loc (loc, type, tem));
5843 	  }
5844       default:
5845 	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5846 	break;
5847       }
5848 
5849   /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
5850      A == 0 ? A : 0 is always 0 unless A is -0.  Note that
5851      both transformations are correct when A is NaN: A != 0
5852      is then true, and A == 0 is false.  */
5853 
5854   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5855       && integer_zerop (arg01) && integer_zerop (arg2))
5856     {
5857       if (comp_code == NE_EXPR)
5858 	return fold_convert_loc (loc, type, arg1);
5859       else if (comp_code == EQ_EXPR)
5860 	return build_zero_cst (type);
5861     }
5862 
5863   /* Try some transformations of A op B ? A : B.
5864 
5865      A == B? A : B    same as B
5866      A != B? A : B    same as A
5867      A >= B? A : B    same as max (A, B)
5868      A > B?  A : B    same as max (B, A)
5869      A <= B? A : B    same as min (A, B)
5870      A < B?  A : B    same as min (B, A)
5871 
5872      As above, these transformations don't work in the presence
5873      of signed zeros.  For example, if A and B are zeros of
5874      opposite sign, the first two transformations will change
5875      the sign of the result.  In the last four, the original
5876      expressions give different results for (A=+0, B=-0) and
5877      (A=-0, B=+0), but the transformed expressions do not.
5878 
5879      The first two transformations are correct if either A or B
5880      is a NaN.  In the first transformation, the condition will
5881      be false, and B will indeed be chosen.  In the case of the
5882      second transformation, the condition A != B will be true,
5883      and A will be chosen.
5884 
5885      The conversions to max() and min() are not correct if B is
5886      a number and A is not.  The conditions in the original
5887      expressions will be false, so all four give B.  The min()
5888      and max() versions would give a NaN instead.  */
5889   if (!HONOR_SIGNED_ZEROS (element_mode (type))
5890       && operand_equal_for_comparison_p (arg01, arg2)
5891       /* Avoid these transformations if the COND_EXPR may be used
5892 	 as an lvalue in the C++ front-end.  PR c++/19199.  */
5893       && (in_gimple_form
5894 	  || VECTOR_TYPE_P (type)
5895 	  || (! lang_GNU_CXX ()
5896 	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5897 	  || ! maybe_lvalue_p (arg1)
5898 	  || ! maybe_lvalue_p (arg2)))
5899     {
5900       tree comp_op0 = arg00;
5901       tree comp_op1 = arg01;
5902       tree comp_type = TREE_TYPE (comp_op0);
5903 
5904       switch (comp_code)
5905 	{
5906 	case EQ_EXPR:
5907 	  return fold_convert_loc (loc, type, arg2);
5908 	case NE_EXPR:
5909 	  return fold_convert_loc (loc, type, arg1);
5910 	case LE_EXPR:
5911 	case LT_EXPR:
5912 	case UNLE_EXPR:
5913 	case UNLT_EXPR:
5914 	  /* In C++ a ?: expression can be an lvalue, so put the
5915 	     operand which will be used if they are equal first
5916 	     so that we can convert this back to the
5917 	     corresponding COND_EXPR.  */
5918 	  if (!HONOR_NANS (arg1))
5919 	    {
5920 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5921 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5922 	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5923 		    ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5924 		    : fold_build2_loc (loc, MIN_EXPR, comp_type,
5925 				   comp_op1, comp_op0);
5926 	      return fold_convert_loc (loc, type, tem);
5927 	    }
5928 	  break;
5929 	case GE_EXPR:
5930 	case GT_EXPR:
5931 	case UNGE_EXPR:
5932 	case UNGT_EXPR:
5933 	  if (!HONOR_NANS (arg1))
5934 	    {
5935 	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5936 	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5937 	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5938 		    ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5939 		    : fold_build2_loc (loc, MAX_EXPR, comp_type,
5940 				   comp_op1, comp_op0);
5941 	      return fold_convert_loc (loc, type, tem);
5942 	    }
5943 	  break;
5944 	case UNEQ_EXPR:
5945 	  if (!HONOR_NANS (arg1))
5946 	    return fold_convert_loc (loc, type, arg2);
5947 	  break;
5948 	case LTGT_EXPR:
5949 	  if (!HONOR_NANS (arg1))
5950 	    return fold_convert_loc (loc, type, arg1);
5951 	  break;
5952 	default:
5953 	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5954 	  break;
5955 	}
5956     }
5957 
5958   return NULL_TREE;
5959 }
5960 
5961 
5962 
5963 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5964 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5965   (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5966 		false) >= 2)
5967 #endif
5968 
5969 /* EXP is some logical combination of boolean tests.  See if we can
5970    merge it into some range test.  Return the new tree if so.  */
5971 
5972 static tree
fold_range_test(location_t loc,enum tree_code code,tree type,tree op0,tree op1)5973 fold_range_test (location_t loc, enum tree_code code, tree type,
5974 		 tree op0, tree op1)
5975 {
5976   int or_op = (code == TRUTH_ORIF_EXPR
5977 	       || code == TRUTH_OR_EXPR);
5978   int in0_p, in1_p, in_p;
5979   tree low0, low1, low, high0, high1, high;
5980   bool strict_overflow_p = false;
5981   tree tem, lhs, rhs;
5982   const char * const warnmsg = G_("assuming signed overflow does not occur "
5983 				  "when simplifying range test");
5984 
5985   if (!INTEGRAL_TYPE_P (type))
5986     return 0;
5987 
5988   lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5989   /* If op0 is known true or false and this is a short-circuiting
5990      operation we must not merge with op1 since that makes side-effects
5991      unconditional.  So special-case this.  */
5992   if (!lhs
5993       && ((code == TRUTH_ORIF_EXPR && in0_p)
5994 	  || (code == TRUTH_ANDIF_EXPR && !in0_p)))
5995     return op0;
5996   rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5997 
5998   /* If this is an OR operation, invert both sides; we will invert
5999      again at the end.  */
6000   if (or_op)
6001     in0_p = ! in0_p, in1_p = ! in1_p;
6002 
6003   /* If both expressions are the same, if we can merge the ranges, and we
6004      can build the range test, return it or it inverted.  If one of the
6005      ranges is always true or always false, consider it to be the same
6006      expression as the other.  */
6007   if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
6008       && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
6009 		       in1_p, low1, high1)
6010       && (tem = (build_range_check (loc, type,
6011 				    lhs != 0 ? lhs
6012 				    : rhs != 0 ? rhs : integer_zero_node,
6013 				    in_p, low, high))) != 0)
6014     {
6015       if (strict_overflow_p)
6016 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
6017       return or_op ? invert_truthvalue_loc (loc, tem) : tem;
6018     }
6019 
6020   /* On machines where the branch cost is expensive, if this is a
6021      short-circuited branch and the underlying object on both sides
6022      is the same, make a non-short-circuit operation.  */
6023   bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
6024   if (param_logical_op_non_short_circuit != -1)
6025     logical_op_non_short_circuit
6026       = param_logical_op_non_short_circuit;
6027   if (logical_op_non_short_circuit
6028       && !flag_sanitize_coverage
6029       && lhs != 0 && rhs != 0
6030       && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6031       && operand_equal_p (lhs, rhs, 0))
6032     {
6033       /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
6034 	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
6035 	 which cases we can't do this.  */
6036       if (simple_operand_p (lhs))
6037 	return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6038 			   ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6039 			   type, op0, op1);
6040 
6041       else if (!lang_hooks.decls.global_bindings_p ()
6042 	       && !CONTAINS_PLACEHOLDER_P (lhs))
6043 	{
6044 	  tree common = save_expr (lhs);
6045 
6046 	  if ((lhs = build_range_check (loc, type, common,
6047 					or_op ? ! in0_p : in0_p,
6048 					low0, high0)) != 0
6049 	      && (rhs = build_range_check (loc, type, common,
6050 					   or_op ? ! in1_p : in1_p,
6051 					   low1, high1)) != 0)
6052 	    {
6053 	      if (strict_overflow_p)
6054 		fold_overflow_warning (warnmsg,
6055 				       WARN_STRICT_OVERFLOW_COMPARISON);
6056 	      return build2_loc (loc, code == TRUTH_ANDIF_EXPR
6057 				 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
6058 				 type, lhs, rhs);
6059 	    }
6060 	}
6061     }
6062 
6063   return 0;
6064 }
6065 
6066 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
6067    bit value.  Arrange things so the extra bits will be set to zero if and
6068    only if C is signed-extended to its full width.  If MASK is nonzero,
6069    it is an INTEGER_CST that should be AND'ed with the extra bits.  */
6070 
6071 static tree
unextend(tree c,int p,int unsignedp,tree mask)6072 unextend (tree c, int p, int unsignedp, tree mask)
6073 {
6074   tree type = TREE_TYPE (c);
6075   int modesize = GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (type));
6076   tree temp;
6077 
6078   if (p == modesize || unsignedp)
6079     return c;
6080 
6081   /* We work by getting just the sign bit into the low-order bit, then
6082      into the high-order bit, then sign-extend.  We then XOR that value
6083      with C.  */
6084   temp = build_int_cst (TREE_TYPE (c),
6085 			wi::extract_uhwi (wi::to_wide (c), p - 1, 1));
6086 
6087   /* We must use a signed type in order to get an arithmetic right shift.
6088      However, we must also avoid introducing accidental overflows, so that
6089      a subsequent call to integer_zerop will work.  Hence we must
6090      do the type conversion here.  At this point, the constant is either
6091      zero or one, and the conversion to a signed type can never overflow.
6092      We could get an overflow if this conversion is done anywhere else.  */
6093   if (TYPE_UNSIGNED (type))
6094     temp = fold_convert (signed_type_for (type), temp);
6095 
6096   temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
6097   temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
6098   if (mask != 0)
6099     temp = const_binop (BIT_AND_EXPR, temp,
6100 			fold_convert (TREE_TYPE (c), mask));
6101   /* If necessary, convert the type back to match the type of C.  */
6102   if (TYPE_UNSIGNED (type))
6103     temp = fold_convert (type, temp);
6104 
6105   return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
6106 }
6107 
6108 /* For an expression that has the form
6109      (A && B) || ~B
6110    or
6111      (A || B) && ~B,
6112    we can drop one of the inner expressions and simplify to
6113      A || ~B
6114    or
6115      A && ~B
6116    LOC is the location of the resulting expression.  OP is the inner
6117    logical operation; the left-hand side in the examples above, while CMPOP
6118    is the right-hand side.  RHS_ONLY is used to prevent us from accidentally
6119    removing a condition that guards another, as in
6120      (A != NULL && A->...) || A == NULL
6121    which we must not transform.  If RHS_ONLY is true, only eliminate the
6122    right-most operand of the inner logical operation.  */
6123 
6124 static tree
merge_truthop_with_opposite_arm(location_t loc,tree op,tree cmpop,bool rhs_only)6125 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
6126 				 bool rhs_only)
6127 {
6128   tree type = TREE_TYPE (cmpop);
6129   enum tree_code code = TREE_CODE (cmpop);
6130   enum tree_code truthop_code = TREE_CODE (op);
6131   tree lhs = TREE_OPERAND (op, 0);
6132   tree rhs = TREE_OPERAND (op, 1);
6133   tree orig_lhs = lhs, orig_rhs = rhs;
6134   enum tree_code rhs_code = TREE_CODE (rhs);
6135   enum tree_code lhs_code = TREE_CODE (lhs);
6136   enum tree_code inv_code;
6137 
6138   if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
6139     return NULL_TREE;
6140 
6141   if (TREE_CODE_CLASS (code) != tcc_comparison)
6142     return NULL_TREE;
6143 
6144   if (rhs_code == truthop_code)
6145     {
6146       tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
6147       if (newrhs != NULL_TREE)
6148 	{
6149 	  rhs = newrhs;
6150 	  rhs_code = TREE_CODE (rhs);
6151 	}
6152     }
6153   if (lhs_code == truthop_code && !rhs_only)
6154     {
6155       tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
6156       if (newlhs != NULL_TREE)
6157 	{
6158 	  lhs = newlhs;
6159 	  lhs_code = TREE_CODE (lhs);
6160 	}
6161     }
6162 
6163   inv_code = invert_tree_comparison (code, HONOR_NANS (type));
6164   if (inv_code == rhs_code
6165       && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
6166       && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
6167     return lhs;
6168   if (!rhs_only && inv_code == lhs_code
6169       && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
6170       && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
6171     return rhs;
6172   if (rhs != orig_rhs || lhs != orig_lhs)
6173     return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
6174 			    lhs, rhs);
6175   return NULL_TREE;
6176 }
6177 
6178 /* Find ways of folding logical expressions of LHS and RHS:
6179    Try to merge two comparisons to the same innermost item.
6180    Look for range tests like "ch >= '0' && ch <= '9'".
6181    Look for combinations of simple terms on machines with expensive branches
6182    and evaluate the RHS unconditionally.
6183 
6184    For example, if we have p->a == 2 && p->b == 4 and we can make an
6185    object large enough to span both A and B, we can do this with a comparison
6186    against the object ANDed with the a mask.
6187 
6188    If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
6189    operations to do this with one comparison.
6190 
6191    We check for both normal comparisons and the BIT_AND_EXPRs made this by
6192    function and the one above.
6193 
6194    CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
6195    TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
6196 
6197    TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
6198    two operands.
6199 
6200    We return the simplified tree or 0 if no optimization is possible.  */
6201 
6202 static tree
fold_truth_andor_1(location_t loc,enum tree_code code,tree truth_type,tree lhs,tree rhs)6203 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
6204 		    tree lhs, tree rhs)
6205 {
6206   /* If this is the "or" of two comparisons, we can do something if
6207      the comparisons are NE_EXPR.  If this is the "and", we can do something
6208      if the comparisons are EQ_EXPR.  I.e.,
6209 	(a->b == 2 && a->c == 4) can become (a->new == NEW).
6210 
6211      WANTED_CODE is this operation code.  For single bit fields, we can
6212      convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
6213      comparison for one-bit fields.  */
6214 
6215   enum tree_code wanted_code;
6216   enum tree_code lcode, rcode;
6217   tree ll_arg, lr_arg, rl_arg, rr_arg;
6218   tree ll_inner, lr_inner, rl_inner, rr_inner;
6219   HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
6220   HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
6221   HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
6222   HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
6223   int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
6224   int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
6225   machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
6226   scalar_int_mode lnmode, rnmode;
6227   tree ll_mask, lr_mask, rl_mask, rr_mask;
6228   tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
6229   tree l_const, r_const;
6230   tree lntype, rntype, result;
6231   HOST_WIDE_INT first_bit, end_bit;
6232   int volatilep;
6233 
6234   /* Start by getting the comparison codes.  Fail if anything is volatile.
6235      If one operand is a BIT_AND_EXPR with the constant one, treat it as if
6236      it were surrounded with a NE_EXPR.  */
6237 
6238   if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
6239     return 0;
6240 
6241   lcode = TREE_CODE (lhs);
6242   rcode = TREE_CODE (rhs);
6243 
6244   if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
6245     {
6246       lhs = build2 (NE_EXPR, truth_type, lhs,
6247 		    build_int_cst (TREE_TYPE (lhs), 0));
6248       lcode = NE_EXPR;
6249     }
6250 
6251   if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
6252     {
6253       rhs = build2 (NE_EXPR, truth_type, rhs,
6254 		    build_int_cst (TREE_TYPE (rhs), 0));
6255       rcode = NE_EXPR;
6256     }
6257 
6258   if (TREE_CODE_CLASS (lcode) != tcc_comparison
6259       || TREE_CODE_CLASS (rcode) != tcc_comparison)
6260     return 0;
6261 
6262   ll_arg = TREE_OPERAND (lhs, 0);
6263   lr_arg = TREE_OPERAND (lhs, 1);
6264   rl_arg = TREE_OPERAND (rhs, 0);
6265   rr_arg = TREE_OPERAND (rhs, 1);
6266 
6267   /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
6268   if (simple_operand_p (ll_arg)
6269       && simple_operand_p (lr_arg))
6270     {
6271       if (operand_equal_p (ll_arg, rl_arg, 0)
6272           && operand_equal_p (lr_arg, rr_arg, 0))
6273 	{
6274           result = combine_comparisons (loc, code, lcode, rcode,
6275 					truth_type, ll_arg, lr_arg);
6276 	  if (result)
6277 	    return result;
6278 	}
6279       else if (operand_equal_p (ll_arg, rr_arg, 0)
6280                && operand_equal_p (lr_arg, rl_arg, 0))
6281 	{
6282           result = combine_comparisons (loc, code, lcode,
6283 					swap_tree_comparison (rcode),
6284 					truth_type, ll_arg, lr_arg);
6285 	  if (result)
6286 	    return result;
6287 	}
6288     }
6289 
6290   code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
6291 	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
6292 
6293   /* If the RHS can be evaluated unconditionally and its operands are
6294      simple, it wins to evaluate the RHS unconditionally on machines
6295      with expensive branches.  In this case, this isn't a comparison
6296      that can be merged.  */
6297 
6298   if (BRANCH_COST (optimize_function_for_speed_p (cfun),
6299 		   false) >= 2
6300       && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
6301       && simple_operand_p (rl_arg)
6302       && simple_operand_p (rr_arg))
6303     {
6304       /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
6305       if (code == TRUTH_OR_EXPR
6306 	  && lcode == NE_EXPR && integer_zerop (lr_arg)
6307 	  && rcode == NE_EXPR && integer_zerop (rr_arg)
6308 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6309 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6310 	return build2_loc (loc, NE_EXPR, truth_type,
6311 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6312 				   ll_arg, rl_arg),
6313 			   build_int_cst (TREE_TYPE (ll_arg), 0));
6314 
6315       /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
6316       if (code == TRUTH_AND_EXPR
6317 	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
6318 	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
6319 	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
6320 	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
6321 	return build2_loc (loc, EQ_EXPR, truth_type,
6322 			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
6323 				   ll_arg, rl_arg),
6324 			   build_int_cst (TREE_TYPE (ll_arg), 0));
6325     }
6326 
6327   /* See if the comparisons can be merged.  Then get all the parameters for
6328      each side.  */
6329 
6330   if ((lcode != EQ_EXPR && lcode != NE_EXPR)
6331       || (rcode != EQ_EXPR && rcode != NE_EXPR))
6332     return 0;
6333 
6334   ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
6335   volatilep = 0;
6336   ll_inner = decode_field_reference (loc, &ll_arg,
6337 				     &ll_bitsize, &ll_bitpos, &ll_mode,
6338 				     &ll_unsignedp, &ll_reversep, &volatilep,
6339 				     &ll_mask, &ll_and_mask);
6340   lr_inner = decode_field_reference (loc, &lr_arg,
6341 				     &lr_bitsize, &lr_bitpos, &lr_mode,
6342 				     &lr_unsignedp, &lr_reversep, &volatilep,
6343 				     &lr_mask, &lr_and_mask);
6344   rl_inner = decode_field_reference (loc, &rl_arg,
6345 				     &rl_bitsize, &rl_bitpos, &rl_mode,
6346 				     &rl_unsignedp, &rl_reversep, &volatilep,
6347 				     &rl_mask, &rl_and_mask);
6348   rr_inner = decode_field_reference (loc, &rr_arg,
6349 				     &rr_bitsize, &rr_bitpos, &rr_mode,
6350 				     &rr_unsignedp, &rr_reversep, &volatilep,
6351 				     &rr_mask, &rr_and_mask);
6352 
6353   /* It must be true that the inner operation on the lhs of each
6354      comparison must be the same if we are to be able to do anything.
6355      Then see if we have constants.  If not, the same must be true for
6356      the rhs's.  */
6357   if (volatilep
6358       || ll_reversep != rl_reversep
6359       || ll_inner == 0 || rl_inner == 0
6360       || ! operand_equal_p (ll_inner, rl_inner, 0))
6361     return 0;
6362 
6363   if (TREE_CODE (lr_arg) == INTEGER_CST
6364       && TREE_CODE (rr_arg) == INTEGER_CST)
6365     {
6366       l_const = lr_arg, r_const = rr_arg;
6367       lr_reversep = ll_reversep;
6368     }
6369   else if (lr_reversep != rr_reversep
6370 	   || lr_inner == 0 || rr_inner == 0
6371 	   || ! operand_equal_p (lr_inner, rr_inner, 0))
6372     return 0;
6373   else
6374     l_const = r_const = 0;
6375 
6376   /* If either comparison code is not correct for our logical operation,
6377      fail.  However, we can convert a one-bit comparison against zero into
6378      the opposite comparison against that bit being set in the field.  */
6379 
6380   wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
6381   if (lcode != wanted_code)
6382     {
6383       if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
6384 	{
6385 	  /* Make the left operand unsigned, since we are only interested
6386 	     in the value of one bit.  Otherwise we are doing the wrong
6387 	     thing below.  */
6388 	  ll_unsignedp = 1;
6389 	  l_const = ll_mask;
6390 	}
6391       else
6392 	return 0;
6393     }
6394 
6395   /* This is analogous to the code for l_const above.  */
6396   if (rcode != wanted_code)
6397     {
6398       if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
6399 	{
6400 	  rl_unsignedp = 1;
6401 	  r_const = rl_mask;
6402 	}
6403       else
6404 	return 0;
6405     }
6406 
6407   /* See if we can find a mode that contains both fields being compared on
6408      the left.  If we can't, fail.  Otherwise, update all constants and masks
6409      to be relative to a field of that size.  */
6410   first_bit = MIN (ll_bitpos, rl_bitpos);
6411   end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6412   if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6413 		      TYPE_ALIGN (TREE_TYPE (ll_inner)), BITS_PER_WORD,
6414 		      volatilep, &lnmode))
6415     return 0;
6416 
6417   lnbitsize = GET_MODE_BITSIZE (lnmode);
6418   lnbitpos = first_bit & ~ (lnbitsize - 1);
6419   lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6420   xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6421 
6422   if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6423     {
6424       xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6425       xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6426     }
6427 
6428   ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6429 			 size_int (xll_bitpos));
6430   rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6431 			 size_int (xrl_bitpos));
6432 
6433   if (l_const)
6434     {
6435       l_const = fold_convert_loc (loc, lntype, l_const);
6436       l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6437       l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
6438       if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6439 					fold_build1_loc (loc, BIT_NOT_EXPR,
6440 						     lntype, ll_mask))))
6441 	{
6442 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6443 
6444 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6445 	}
6446     }
6447   if (r_const)
6448     {
6449       r_const = fold_convert_loc (loc, lntype, r_const);
6450       r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6451       r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
6452       if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6453 					fold_build1_loc (loc, BIT_NOT_EXPR,
6454 						     lntype, rl_mask))))
6455 	{
6456 	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6457 
6458 	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6459 	}
6460     }
6461 
6462   /* If the right sides are not constant, do the same for it.  Also,
6463      disallow this optimization if a size, signedness or storage order
6464      mismatch occurs between the left and right sides.  */
6465   if (l_const == 0)
6466     {
6467       if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6468 	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6469 	  || ll_reversep != lr_reversep
6470 	  /* Make sure the two fields on the right
6471 	     correspond to the left without being swapped.  */
6472 	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6473 	return 0;
6474 
6475       first_bit = MIN (lr_bitpos, rr_bitpos);
6476       end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6477       if (!get_best_mode (end_bit - first_bit, first_bit, 0, 0,
6478 			  TYPE_ALIGN (TREE_TYPE (lr_inner)), BITS_PER_WORD,
6479 			  volatilep, &rnmode))
6480 	return 0;
6481 
6482       rnbitsize = GET_MODE_BITSIZE (rnmode);
6483       rnbitpos = first_bit & ~ (rnbitsize - 1);
6484       rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6485       xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6486 
6487       if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
6488 	{
6489 	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6490 	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6491 	}
6492 
6493       lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6494 							    rntype, lr_mask),
6495 			     size_int (xlr_bitpos));
6496       rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6497 							    rntype, rr_mask),
6498 			     size_int (xrr_bitpos));
6499 
6500       /* Make a mask that corresponds to both fields being compared.
6501 	 Do this for both items being compared.  If the operands are the
6502 	 same size and the bits being compared are in the same position
6503 	 then we can do this by masking both and comparing the masked
6504 	 results.  */
6505       ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6506       lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
6507       if (lnbitsize == rnbitsize
6508 	  && xll_bitpos == xlr_bitpos
6509 	  && lnbitpos >= 0
6510 	  && rnbitpos >= 0)
6511 	{
6512 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg,
6513 				    lntype, lnbitsize, lnbitpos,
6514 				    ll_unsignedp || rl_unsignedp, ll_reversep);
6515 	  if (! all_ones_mask_p (ll_mask, lnbitsize))
6516 	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6517 
6518 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg,
6519 				    rntype, rnbitsize, rnbitpos,
6520 				    lr_unsignedp || rr_unsignedp, lr_reversep);
6521 	  if (! all_ones_mask_p (lr_mask, rnbitsize))
6522 	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6523 
6524 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6525 	}
6526 
6527       /* There is still another way we can do something:  If both pairs of
6528 	 fields being compared are adjacent, we may be able to make a wider
6529 	 field containing them both.
6530 
6531 	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
6532 	 the mask must be shifted to account for the shift done by
6533 	 make_bit_field_ref.  */
6534       if (((ll_bitsize + ll_bitpos == rl_bitpos
6535 	    && lr_bitsize + lr_bitpos == rr_bitpos)
6536 	   || (ll_bitpos == rl_bitpos + rl_bitsize
6537 	       && lr_bitpos == rr_bitpos + rr_bitsize))
6538 	  && ll_bitpos >= 0
6539 	  && rl_bitpos >= 0
6540 	  && lr_bitpos >= 0
6541 	  && rr_bitpos >= 0)
6542 	{
6543 	  tree type;
6544 
6545 	  lhs = make_bit_field_ref (loc, ll_inner, ll_arg, lntype,
6546 				    ll_bitsize + rl_bitsize,
6547 				    MIN (ll_bitpos, rl_bitpos),
6548 				    ll_unsignedp, ll_reversep);
6549 	  rhs = make_bit_field_ref (loc, lr_inner, lr_arg, rntype,
6550 				    lr_bitsize + rr_bitsize,
6551 				    MIN (lr_bitpos, rr_bitpos),
6552 				    lr_unsignedp, lr_reversep);
6553 
6554 	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6555 				 size_int (MIN (xll_bitpos, xrl_bitpos)));
6556 	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6557 				 size_int (MIN (xlr_bitpos, xrr_bitpos)));
6558 
6559 	  /* Convert to the smaller type before masking out unwanted bits.  */
6560 	  type = lntype;
6561 	  if (lntype != rntype)
6562 	    {
6563 	      if (lnbitsize > rnbitsize)
6564 		{
6565 		  lhs = fold_convert_loc (loc, rntype, lhs);
6566 		  ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6567 		  type = rntype;
6568 		}
6569 	      else if (lnbitsize < rnbitsize)
6570 		{
6571 		  rhs = fold_convert_loc (loc, lntype, rhs);
6572 		  lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6573 		  type = lntype;
6574 		}
6575 	    }
6576 
6577 	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6578 	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6579 
6580 	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6581 	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6582 
6583 	  return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
6584 	}
6585 
6586       return 0;
6587     }
6588 
6589   /* Handle the case of comparisons with constants.  If there is something in
6590      common between the masks, those bits of the constants must be the same.
6591      If not, the condition is always false.  Test for this to avoid generating
6592      incorrect code below.  */
6593   result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
6594   if (! integer_zerop (result)
6595       && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
6596 			   const_binop (BIT_AND_EXPR, result, r_const)) != 1)
6597     {
6598       if (wanted_code == NE_EXPR)
6599 	{
6600 	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
6601 	  return constant_boolean_node (true, truth_type);
6602 	}
6603       else
6604 	{
6605 	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6606 	  return constant_boolean_node (false, truth_type);
6607 	}
6608     }
6609 
6610   if (lnbitpos < 0)
6611     return 0;
6612 
6613   /* Construct the expression we will return.  First get the component
6614      reference we will make.  Unless the mask is all ones the width of
6615      that field, perform the mask operation.  Then compare with the
6616      merged constant.  */
6617   result = make_bit_field_ref (loc, ll_inner, ll_arg,
6618 			       lntype, lnbitsize, lnbitpos,
6619 			       ll_unsignedp || rl_unsignedp, ll_reversep);
6620 
6621   ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
6622   if (! all_ones_mask_p (ll_mask, lnbitsize))
6623     result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
6624 
6625   return build2_loc (loc, wanted_code, truth_type, result,
6626 		     const_binop (BIT_IOR_EXPR, l_const, r_const));
6627 }
6628 
6629 /* T is an integer expression that is being multiplied, divided, or taken a
6630    modulus (CODE says which and what kind of divide or modulus) by a
6631    constant C.  See if we can eliminate that operation by folding it with
6632    other operations already in T.  WIDE_TYPE, if non-null, is a type that
6633    should be used for the computation if wider than our type.
6634 
6635    For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6636    (X * 2) + (Y * 4).  We must, however, be assured that either the original
6637    expression would not overflow or that overflow is undefined for the type
6638    in the language in question.
6639 
6640    If we return a non-null expression, it is an equivalent form of the
6641    original computation, but need not be in the original type.
6642 
6643    We set *STRICT_OVERFLOW_P to true if the return values depends on
6644    signed overflow being undefined.  Otherwise we do not change
6645    *STRICT_OVERFLOW_P.  */
6646 
6647 static tree
extract_muldiv(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)6648 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6649 		bool *strict_overflow_p)
6650 {
6651   /* To avoid exponential search depth, refuse to allow recursion past
6652      three levels.  Beyond that (1) it's highly unlikely that we'll find
6653      something interesting and (2) we've probably processed it before
6654      when we built the inner expression.  */
6655 
6656   static int depth;
6657   tree ret;
6658 
6659   if (depth > 3)
6660     return NULL;
6661 
6662   depth++;
6663   ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6664   depth--;
6665 
6666   return ret;
6667 }
6668 
6669 static tree
extract_muldiv_1(tree t,tree c,enum tree_code code,tree wide_type,bool * strict_overflow_p)6670 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6671 		  bool *strict_overflow_p)
6672 {
6673   tree type = TREE_TYPE (t);
6674   enum tree_code tcode = TREE_CODE (t);
6675   tree ctype = (wide_type != 0
6676 		&& (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (wide_type))
6677 		    > GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type)))
6678 		? wide_type : type);
6679   tree t1, t2;
6680   int same_p = tcode == code;
6681   tree op0 = NULL_TREE, op1 = NULL_TREE;
6682   bool sub_strict_overflow_p;
6683 
6684   /* Don't deal with constants of zero here; they confuse the code below.  */
6685   if (integer_zerop (c))
6686     return NULL_TREE;
6687 
6688   if (TREE_CODE_CLASS (tcode) == tcc_unary)
6689     op0 = TREE_OPERAND (t, 0);
6690 
6691   if (TREE_CODE_CLASS (tcode) == tcc_binary)
6692     op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6693 
6694   /* Note that we need not handle conditional operations here since fold
6695      already handles those cases.  So just do arithmetic here.  */
6696   switch (tcode)
6697     {
6698     case INTEGER_CST:
6699       /* For a constant, we can always simplify if we are a multiply
6700 	 or (for divide and modulus) if it is a multiple of our constant.  */
6701       if (code == MULT_EXPR
6702 	  || wi::multiple_of_p (wi::to_wide (t), wi::to_wide (c),
6703 				TYPE_SIGN (type)))
6704 	{
6705 	  tree tem = const_binop (code, fold_convert (ctype, t),
6706 				  fold_convert (ctype, c));
6707 	  /* If the multiplication overflowed, we lost information on it.
6708 	     See PR68142 and PR69845.  */
6709 	  if (TREE_OVERFLOW (tem))
6710 	    return NULL_TREE;
6711 	  return tem;
6712 	}
6713       break;
6714 
6715     CASE_CONVERT: case NON_LVALUE_EXPR:
6716       if (!INTEGRAL_TYPE_P (TREE_TYPE (op0)))
6717 	break;
6718       /* If op0 is an expression ...  */
6719       if ((COMPARISON_CLASS_P (op0)
6720 	   || UNARY_CLASS_P (op0)
6721 	   || BINARY_CLASS_P (op0)
6722 	   || VL_EXP_CLASS_P (op0)
6723 	   || EXPRESSION_CLASS_P (op0))
6724 	  /* ... and has wrapping overflow, and its type is smaller
6725 	     than ctype, then we cannot pass through as widening.  */
6726 	  && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6727 	       && (TYPE_PRECISION (ctype)
6728 	           > TYPE_PRECISION (TREE_TYPE (op0))))
6729 	      /* ... or this is a truncation (t is narrower than op0),
6730 		 then we cannot pass through this narrowing.  */
6731 	      || (TYPE_PRECISION (type)
6732 		  < TYPE_PRECISION (TREE_TYPE (op0)))
6733 	      /* ... or signedness changes for division or modulus,
6734 		 then we cannot pass through this conversion.  */
6735 	      || (code != MULT_EXPR
6736 		  && (TYPE_UNSIGNED (ctype)
6737 		      != TYPE_UNSIGNED (TREE_TYPE (op0))))
6738 	      /* ... or has undefined overflow while the converted to
6739 		 type has not, we cannot do the operation in the inner type
6740 		 as that would introduce undefined overflow.  */
6741 	      || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6742 		  && !TYPE_OVERFLOW_UNDEFINED (type))))
6743 	break;
6744 
6745       /* Pass the constant down and see if we can make a simplification.  If
6746 	 we can, replace this expression with the inner simplification for
6747 	 possible later conversion to our or some other type.  */
6748       if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6749 	  && TREE_CODE (t2) == INTEGER_CST
6750 	  && !TREE_OVERFLOW (t2)
6751 	  && (t1 = extract_muldiv (op0, t2, code,
6752 				   code == MULT_EXPR ? ctype : NULL_TREE,
6753 				   strict_overflow_p)) != 0)
6754 	return t1;
6755       break;
6756 
6757     case ABS_EXPR:
6758       /* If widening the type changes it from signed to unsigned, then we
6759          must avoid building ABS_EXPR itself as unsigned.  */
6760       if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6761         {
6762           tree cstype = (*signed_type_for) (ctype);
6763           if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6764 	      != 0)
6765             {
6766               t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6767               return fold_convert (ctype, t1);
6768             }
6769           break;
6770         }
6771       /* If the constant is negative, we cannot simplify this.  */
6772       if (tree_int_cst_sgn (c) == -1)
6773         break;
6774       /* FALLTHROUGH */
6775     case NEGATE_EXPR:
6776       /* For division and modulus, type can't be unsigned, as e.g.
6777 	 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6778 	 For signed types, even with wrapping overflow, this is fine.  */
6779       if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6780 	break;
6781       if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6782 	  != 0)
6783 	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6784       break;
6785 
6786     case MIN_EXPR:  case MAX_EXPR:
6787       /* If widening the type changes the signedness, then we can't perform
6788 	 this optimization as that changes the result.  */
6789       if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6790 	break;
6791 
6792       /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
6793       sub_strict_overflow_p = false;
6794       if ((t1 = extract_muldiv (op0, c, code, wide_type,
6795 				&sub_strict_overflow_p)) != 0
6796 	  && (t2 = extract_muldiv (op1, c, code, wide_type,
6797 				   &sub_strict_overflow_p)) != 0)
6798 	{
6799 	  if (tree_int_cst_sgn (c) < 0)
6800 	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6801 	  if (sub_strict_overflow_p)
6802 	    *strict_overflow_p = true;
6803 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6804 			      fold_convert (ctype, t2));
6805 	}
6806       break;
6807 
6808     case LSHIFT_EXPR:  case RSHIFT_EXPR:
6809       /* If the second operand is constant, this is a multiplication
6810 	 or floor division, by a power of two, so we can treat it that
6811 	 way unless the multiplier or divisor overflows.  Signed
6812 	 left-shift overflow is implementation-defined rather than
6813 	 undefined in C90, so do not convert signed left shift into
6814 	 multiplication.  */
6815       if (TREE_CODE (op1) == INTEGER_CST
6816 	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6817 	  /* const_binop may not detect overflow correctly,
6818 	     so check for it explicitly here.  */
6819 	  && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
6820 			wi::to_wide (op1))
6821 	  && (t1 = fold_convert (ctype,
6822 				 const_binop (LSHIFT_EXPR, size_one_node,
6823 					      op1))) != 0
6824 	  && !TREE_OVERFLOW (t1))
6825 	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6826 				       ? MULT_EXPR : FLOOR_DIV_EXPR,
6827 				       ctype,
6828 				       fold_convert (ctype, op0),
6829 				       t1),
6830 			       c, code, wide_type, strict_overflow_p);
6831       break;
6832 
6833     case PLUS_EXPR:  case MINUS_EXPR:
6834       /* See if we can eliminate the operation on both sides.  If we can, we
6835 	 can return a new PLUS or MINUS.  If we can't, the only remaining
6836 	 cases where we can do anything are if the second operand is a
6837 	 constant.  */
6838       sub_strict_overflow_p = false;
6839       t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6840       t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6841       if (t1 != 0 && t2 != 0
6842 	  && TYPE_OVERFLOW_WRAPS (ctype)
6843 	  && (code == MULT_EXPR
6844 	      /* If not multiplication, we can only do this if both operands
6845 		 are divisible by c.  */
6846 	      || (multiple_of_p (ctype, op0, c)
6847 	          && multiple_of_p (ctype, op1, c))))
6848 	{
6849 	  if (sub_strict_overflow_p)
6850 	    *strict_overflow_p = true;
6851 	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6852 			      fold_convert (ctype, t2));
6853 	}
6854 
6855       /* If this was a subtraction, negate OP1 and set it to be an addition.
6856 	 This simplifies the logic below.  */
6857       if (tcode == MINUS_EXPR)
6858 	{
6859 	  tcode = PLUS_EXPR, op1 = negate_expr (op1);
6860 	  /* If OP1 was not easily negatable, the constant may be OP0.  */
6861 	  if (TREE_CODE (op0) == INTEGER_CST)
6862 	    {
6863 	      std::swap (op0, op1);
6864 	      std::swap (t1, t2);
6865 	    }
6866 	}
6867 
6868       if (TREE_CODE (op1) != INTEGER_CST)
6869 	break;
6870 
6871       /* If either OP1 or C are negative, this optimization is not safe for
6872 	 some of the division and remainder types while for others we need
6873 	 to change the code.  */
6874       if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6875 	{
6876 	  if (code == CEIL_DIV_EXPR)
6877 	    code = FLOOR_DIV_EXPR;
6878 	  else if (code == FLOOR_DIV_EXPR)
6879 	    code = CEIL_DIV_EXPR;
6880 	  else if (code != MULT_EXPR
6881 		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6882 	    break;
6883 	}
6884 
6885       /* If it's a multiply or a division/modulus operation of a multiple
6886          of our constant, do the operation and verify it doesn't overflow.  */
6887       if (code == MULT_EXPR
6888 	  || wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6889 				TYPE_SIGN (type)))
6890 	{
6891 	  op1 = const_binop (code, fold_convert (ctype, op1),
6892 			     fold_convert (ctype, c));
6893 	  /* We allow the constant to overflow with wrapping semantics.  */
6894 	  if (op1 == 0
6895 	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6896 	    break;
6897 	}
6898       else
6899 	break;
6900 
6901       /* If we have an unsigned type, we cannot widen the operation since it
6902 	 will change the result if the original computation overflowed.  */
6903       if (TYPE_UNSIGNED (ctype) && ctype != type)
6904 	break;
6905 
6906       /* The last case is if we are a multiply.  In that case, we can
6907 	 apply the distributive law to commute the multiply and addition
6908 	 if the multiplication of the constants doesn't overflow
6909 	 and overflow is defined.  With undefined overflow
6910 	 op0 * c might overflow, while (op0 + orig_op1) * c doesn't.
6911 	 But fold_plusminus_mult_expr would factor back any power-of-two
6912 	 value so do not distribute in the first place in this case.  */
6913       if (code == MULT_EXPR
6914 	  && TYPE_OVERFLOW_WRAPS (ctype)
6915 	  && !(tree_fits_shwi_p (c) && pow2p_hwi (absu_hwi (tree_to_shwi (c)))))
6916 	return fold_build2 (tcode, ctype,
6917 			    fold_build2 (code, ctype,
6918 					 fold_convert (ctype, op0),
6919 					 fold_convert (ctype, c)),
6920 			    op1);
6921 
6922       break;
6923 
6924     case MULT_EXPR:
6925       /* We have a special case here if we are doing something like
6926 	 (C * 8) % 4 since we know that's zero.  */
6927       if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6928 	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6929 	  /* If the multiplication can overflow we cannot optimize this.  */
6930 	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6931 	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6932 	  && wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6933 				TYPE_SIGN (type)))
6934 	{
6935 	  *strict_overflow_p = true;
6936 	  return omit_one_operand (type, integer_zero_node, op0);
6937 	}
6938 
6939       /* ... fall through ...  */
6940 
6941     case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
6942     case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
6943       /* If we can extract our operation from the LHS, do so and return a
6944 	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
6945 	 do something only if the second operand is a constant.  */
6946       if (same_p
6947 	  && TYPE_OVERFLOW_WRAPS (ctype)
6948 	  && (t1 = extract_muldiv (op0, c, code, wide_type,
6949 				   strict_overflow_p)) != 0)
6950 	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6951 			    fold_convert (ctype, op1));
6952       else if (tcode == MULT_EXPR && code == MULT_EXPR
6953 	       && TYPE_OVERFLOW_WRAPS (ctype)
6954 	       && (t1 = extract_muldiv (op1, c, code, wide_type,
6955 					strict_overflow_p)) != 0)
6956 	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6957 			    fold_convert (ctype, t1));
6958       else if (TREE_CODE (op1) != INTEGER_CST)
6959 	return 0;
6960 
6961       /* If these are the same operation types, we can associate them
6962 	 assuming no overflow.  */
6963       if (tcode == code)
6964 	{
6965 	  bool overflow_p = false;
6966 	  wi::overflow_type overflow_mul;
6967 	  signop sign = TYPE_SIGN (ctype);
6968 	  unsigned prec = TYPE_PRECISION (ctype);
6969 	  wide_int mul = wi::mul (wi::to_wide (op1, prec),
6970 				  wi::to_wide (c, prec),
6971 				  sign, &overflow_mul);
6972 	  overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6973 	  if (overflow_mul
6974 	      && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6975 	    overflow_p = true;
6976 	  if (!overflow_p)
6977 	    return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6978 				wide_int_to_tree (ctype, mul));
6979 	}
6980 
6981       /* If these operations "cancel" each other, we have the main
6982 	 optimizations of this pass, which occur when either constant is a
6983 	 multiple of the other, in which case we replace this with either an
6984 	 operation or CODE or TCODE.
6985 
6986 	 If we have an unsigned type, we cannot do this since it will change
6987 	 the result if the original computation overflowed.  */
6988       if (TYPE_OVERFLOW_UNDEFINED (ctype)
6989 	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6990 	      || (tcode == MULT_EXPR
6991 		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6992 		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6993 		  && code != MULT_EXPR)))
6994 	{
6995 	  if (wi::multiple_of_p (wi::to_wide (op1), wi::to_wide (c),
6996 				 TYPE_SIGN (type)))
6997 	    {
6998 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6999 		*strict_overflow_p = true;
7000 	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
7001 				  fold_convert (ctype,
7002 						const_binop (TRUNC_DIV_EXPR,
7003 							     op1, c)));
7004 	    }
7005 	  else if (wi::multiple_of_p (wi::to_wide (c), wi::to_wide (op1),
7006 				      TYPE_SIGN (type)))
7007 	    {
7008 	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
7009 		*strict_overflow_p = true;
7010 	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
7011 				  fold_convert (ctype,
7012 						const_binop (TRUNC_DIV_EXPR,
7013 							     c, op1)));
7014 	    }
7015 	}
7016       break;
7017 
7018     default:
7019       break;
7020     }
7021 
7022   return 0;
7023 }
7024 
7025 /* Return a node which has the indicated constant VALUE (either 0 or
7026    1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
7027    and is of the indicated TYPE.  */
7028 
7029 tree
constant_boolean_node(bool value,tree type)7030 constant_boolean_node (bool value, tree type)
7031 {
7032   if (type == integer_type_node)
7033     return value ? integer_one_node : integer_zero_node;
7034   else if (type == boolean_type_node)
7035     return value ? boolean_true_node : boolean_false_node;
7036   else if (TREE_CODE (type) == VECTOR_TYPE)
7037     return build_vector_from_val (type,
7038 				  build_int_cst (TREE_TYPE (type),
7039 						 value ? -1 : 0));
7040   else
7041     return fold_convert (type, value ? integer_one_node : integer_zero_node);
7042 }
7043 
7044 
7045 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
7046    Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
7047    CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
7048    expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
7049    COND is the first argument to CODE; otherwise (as in the example
7050    given here), it is the second argument.  TYPE is the type of the
7051    original expression.  Return NULL_TREE if no simplification is
7052    possible.  */
7053 
7054 static tree
fold_binary_op_with_conditional_arg(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree cond,tree arg,int cond_first_p)7055 fold_binary_op_with_conditional_arg (location_t loc,
7056 				     enum tree_code code,
7057 				     tree type, tree op0, tree op1,
7058 				     tree cond, tree arg, int cond_first_p)
7059 {
7060   tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
7061   tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
7062   tree test, true_value, false_value;
7063   tree lhs = NULL_TREE;
7064   tree rhs = NULL_TREE;
7065   enum tree_code cond_code = COND_EXPR;
7066 
7067   /* Do not move possibly trapping operations into the conditional as this
7068      pessimizes code and causes gimplification issues when applied late.  */
7069   if (operation_could_trap_p (code, FLOAT_TYPE_P (type),
7070 			      ANY_INTEGRAL_TYPE_P (type)
7071 			      && TYPE_OVERFLOW_TRAPS (type), op1))
7072     return NULL_TREE;
7073 
7074   if (TREE_CODE (cond) == COND_EXPR
7075       || TREE_CODE (cond) == VEC_COND_EXPR)
7076     {
7077       test = TREE_OPERAND (cond, 0);
7078       true_value = TREE_OPERAND (cond, 1);
7079       false_value = TREE_OPERAND (cond, 2);
7080       /* If this operand throws an expression, then it does not make
7081 	 sense to try to perform a logical or arithmetic operation
7082 	 involving it.  */
7083       if (VOID_TYPE_P (TREE_TYPE (true_value)))
7084 	lhs = true_value;
7085       if (VOID_TYPE_P (TREE_TYPE (false_value)))
7086 	rhs = false_value;
7087     }
7088   else if (!(TREE_CODE (type) != VECTOR_TYPE
7089 	     && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
7090     {
7091       tree testtype = TREE_TYPE (cond);
7092       test = cond;
7093       true_value = constant_boolean_node (true, testtype);
7094       false_value = constant_boolean_node (false, testtype);
7095     }
7096   else
7097     /* Detect the case of mixing vector and scalar types - bail out.  */
7098     return NULL_TREE;
7099 
7100   if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
7101     cond_code = VEC_COND_EXPR;
7102 
7103   /* This transformation is only worthwhile if we don't have to wrap ARG
7104      in a SAVE_EXPR and the operation can be simplified without recursing
7105      on at least one of the branches once its pushed inside the COND_EXPR.  */
7106   if (!TREE_CONSTANT (arg)
7107       && (TREE_SIDE_EFFECTS (arg)
7108 	  || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
7109 	  || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
7110     return NULL_TREE;
7111 
7112   arg = fold_convert_loc (loc, arg_type, arg);
7113   if (lhs == 0)
7114     {
7115       true_value = fold_convert_loc (loc, cond_type, true_value);
7116       if (cond_first_p)
7117 	lhs = fold_build2_loc (loc, code, type, true_value, arg);
7118       else
7119 	lhs = fold_build2_loc (loc, code, type, arg, true_value);
7120     }
7121   if (rhs == 0)
7122     {
7123       false_value = fold_convert_loc (loc, cond_type, false_value);
7124       if (cond_first_p)
7125 	rhs = fold_build2_loc (loc, code, type, false_value, arg);
7126       else
7127 	rhs = fold_build2_loc (loc, code, type, arg, false_value);
7128     }
7129 
7130   /* Check that we have simplified at least one of the branches.  */
7131   if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
7132     return NULL_TREE;
7133 
7134   return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
7135 }
7136 
7137 
7138 /* Subroutine of fold() that checks for the addition of +/- 0.0.
7139 
7140    If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
7141    TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
7142    ADDEND is the same as X.
7143 
7144    X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
7145    and finite.  The problematic cases are when X is zero, and its mode
7146    has signed zeros.  In the case of rounding towards -infinity,
7147    X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
7148    modes, X + 0 is not the same as X because -0 + 0 is 0.  */
7149 
7150 bool
fold_real_zero_addition_p(const_tree type,const_tree addend,int negate)7151 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
7152 {
7153   if (!real_zerop (addend))
7154     return false;
7155 
7156   /* Don't allow the fold with -fsignaling-nans.  */
7157   if (HONOR_SNANS (type))
7158     return false;
7159 
7160   /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
7161   if (!HONOR_SIGNED_ZEROS (type))
7162     return true;
7163 
7164   /* There is no case that is safe for all rounding modes.  */
7165   if (HONOR_SIGN_DEPENDENT_ROUNDING (type))
7166     return false;
7167 
7168   /* In a vector or complex, we would need to check the sign of all zeros.  */
7169   if (TREE_CODE (addend) == VECTOR_CST)
7170     addend = uniform_vector_p (addend);
7171   if (!addend || TREE_CODE (addend) != REAL_CST)
7172     return false;
7173 
7174   /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
7175   if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
7176     negate = !negate;
7177 
7178   /* The mode has signed zeros, and we have to honor their sign.
7179      In this situation, there is only one case we can return true for.
7180      X - 0 is the same as X with default rounding.  */
7181   return negate;
7182 }
7183 
7184 /* Subroutine of match.pd that optimizes comparisons of a division by
7185    a nonzero integer constant against an integer constant, i.e.
7186    X/C1 op C2.
7187 
7188    CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7189    GE_EXPR or LE_EXPR.  ARG01 and ARG1 must be a INTEGER_CST.  */
7190 
7191 enum tree_code
fold_div_compare(enum tree_code code,tree c1,tree c2,tree * lo,tree * hi,bool * neg_overflow)7192 fold_div_compare (enum tree_code code, tree c1, tree c2, tree *lo,
7193 		  tree *hi, bool *neg_overflow)
7194 {
7195   tree prod, tmp, type = TREE_TYPE (c1);
7196   signop sign = TYPE_SIGN (type);
7197   wi::overflow_type overflow;
7198 
7199   /* We have to do this the hard way to detect unsigned overflow.
7200      prod = int_const_binop (MULT_EXPR, c1, c2);  */
7201   wide_int val = wi::mul (wi::to_wide (c1), wi::to_wide (c2), sign, &overflow);
7202   prod = force_fit_type (type, val, -1, overflow);
7203   *neg_overflow = false;
7204 
7205   if (sign == UNSIGNED)
7206     {
7207       tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7208       *lo = prod;
7209 
7210       /* Likewise *hi = int_const_binop (PLUS_EXPR, prod, tmp).  */
7211       val = wi::add (wi::to_wide (prod), wi::to_wide (tmp), sign, &overflow);
7212       *hi = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (prod));
7213     }
7214   else if (tree_int_cst_sgn (c1) >= 0)
7215     {
7216       tmp = int_const_binop (MINUS_EXPR, c1, build_int_cst (type, 1));
7217       switch (tree_int_cst_sgn (c2))
7218 	{
7219 	case -1:
7220 	  *neg_overflow = true;
7221 	  *lo = int_const_binop (MINUS_EXPR, prod, tmp);
7222 	  *hi = prod;
7223 	  break;
7224 
7225 	case 0:
7226 	  *lo = fold_negate_const (tmp, type);
7227 	  *hi = tmp;
7228 	  break;
7229 
7230 	case 1:
7231 	  *hi = int_const_binop (PLUS_EXPR, prod, tmp);
7232 	  *lo = prod;
7233 	  break;
7234 
7235 	default:
7236 	  gcc_unreachable ();
7237 	}
7238     }
7239   else
7240     {
7241       /* A negative divisor reverses the relational operators.  */
7242       code = swap_tree_comparison (code);
7243 
7244       tmp = int_const_binop (PLUS_EXPR, c1, build_int_cst (type, 1));
7245       switch (tree_int_cst_sgn (c2))
7246 	{
7247 	case -1:
7248 	  *hi = int_const_binop (MINUS_EXPR, prod, tmp);
7249 	  *lo = prod;
7250 	  break;
7251 
7252 	case 0:
7253 	  *hi = fold_negate_const (tmp, type);
7254 	  *lo = tmp;
7255 	  break;
7256 
7257 	case 1:
7258 	  *neg_overflow = true;
7259 	  *lo = int_const_binop (PLUS_EXPR, prod, tmp);
7260 	  *hi = prod;
7261 	  break;
7262 
7263 	default:
7264 	  gcc_unreachable ();
7265 	}
7266     }
7267 
7268   if (code != EQ_EXPR && code != NE_EXPR)
7269     return code;
7270 
7271   if (TREE_OVERFLOW (*lo)
7272       || operand_equal_p (*lo, TYPE_MIN_VALUE (type), 0))
7273     *lo = NULL_TREE;
7274   if (TREE_OVERFLOW (*hi)
7275       || operand_equal_p (*hi, TYPE_MAX_VALUE (type), 0))
7276     *hi = NULL_TREE;
7277 
7278   return code;
7279 }
7280 
7281 
7282 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7283    equality/inequality test, then return a simplified form of the test
7284    using a sign testing.  Otherwise return NULL.  TYPE is the desired
7285    result type.  */
7286 
7287 static tree
fold_single_bit_test_into_sign_test(location_t loc,enum tree_code code,tree arg0,tree arg1,tree result_type)7288 fold_single_bit_test_into_sign_test (location_t loc,
7289 				     enum tree_code code, tree arg0, tree arg1,
7290 				     tree result_type)
7291 {
7292   /* If this is testing a single bit, we can optimize the test.  */
7293   if ((code == NE_EXPR || code == EQ_EXPR)
7294       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7295       && integer_pow2p (TREE_OPERAND (arg0, 1)))
7296     {
7297       /* If we have (A & C) != 0 where C is the sign bit of A, convert
7298 	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
7299       tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7300 
7301       if (arg00 != NULL_TREE
7302 	  /* This is only a win if casting to a signed type is cheap,
7303 	     i.e. when arg00's type is not a partial mode.  */
7304 	  && type_has_mode_precision_p (TREE_TYPE (arg00)))
7305 	{
7306 	  tree stype = signed_type_for (TREE_TYPE (arg00));
7307 	  return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7308 			      result_type,
7309 			      fold_convert_loc (loc, stype, arg00),
7310 			      build_int_cst (stype, 0));
7311 	}
7312     }
7313 
7314   return NULL_TREE;
7315 }
7316 
7317 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7318    equality/inequality test, then return a simplified form of
7319    the test using shifts and logical operations.  Otherwise return
7320    NULL.  TYPE is the desired result type.  */
7321 
7322 tree
fold_single_bit_test(location_t loc,enum tree_code code,tree arg0,tree arg1,tree result_type)7323 fold_single_bit_test (location_t loc, enum tree_code code,
7324 		      tree arg0, tree arg1, tree result_type)
7325 {
7326   /* If this is testing a single bit, we can optimize the test.  */
7327   if ((code == NE_EXPR || code == EQ_EXPR)
7328       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7329       && integer_pow2p (TREE_OPERAND (arg0, 1)))
7330     {
7331       tree inner = TREE_OPERAND (arg0, 0);
7332       tree type = TREE_TYPE (arg0);
7333       int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7334       scalar_int_mode operand_mode = SCALAR_INT_TYPE_MODE (type);
7335       int ops_unsigned;
7336       tree signed_type, unsigned_type, intermediate_type;
7337       tree tem, one;
7338 
7339       /* First, see if we can fold the single bit test into a sign-bit
7340 	 test.  */
7341       tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7342 						 result_type);
7343       if (tem)
7344 	return tem;
7345 
7346       /* Otherwise we have (A & C) != 0 where C is a single bit,
7347 	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
7348 	 Similarly for (A & C) == 0.  */
7349 
7350       /* If INNER is a right shift of a constant and it plus BITNUM does
7351 	 not overflow, adjust BITNUM and INNER.  */
7352       if (TREE_CODE (inner) == RSHIFT_EXPR
7353 	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7354 	  && bitnum < TYPE_PRECISION (type)
7355 	  && wi::ltu_p (wi::to_wide (TREE_OPERAND (inner, 1)),
7356 			TYPE_PRECISION (type) - bitnum))
7357 	{
7358 	  bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
7359 	  inner = TREE_OPERAND (inner, 0);
7360 	}
7361 
7362       /* If we are going to be able to omit the AND below, we must do our
7363 	 operations as unsigned.  If we must use the AND, we have a choice.
7364 	 Normally unsigned is faster, but for some machines signed is.  */
7365       ops_unsigned = (load_extend_op (operand_mode) == SIGN_EXTEND
7366 		      && !flag_syntax_only) ? 0 : 1;
7367 
7368       signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7369       unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7370       intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7371       inner = fold_convert_loc (loc, intermediate_type, inner);
7372 
7373       if (bitnum != 0)
7374 	inner = build2 (RSHIFT_EXPR, intermediate_type,
7375 			inner, size_int (bitnum));
7376 
7377       one = build_int_cst (intermediate_type, 1);
7378 
7379       if (code == EQ_EXPR)
7380 	inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7381 
7382       /* Put the AND last so it can combine with more things.  */
7383       inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7384 
7385       /* Make sure to return the proper type.  */
7386       inner = fold_convert_loc (loc, result_type, inner);
7387 
7388       return inner;
7389     }
7390   return NULL_TREE;
7391 }
7392 
7393 /* Test whether it is preferable to swap two operands, ARG0 and
7394    ARG1, for example because ARG0 is an integer constant and ARG1
7395    isn't.  */
7396 
7397 bool
tree_swap_operands_p(const_tree arg0,const_tree arg1)7398 tree_swap_operands_p (const_tree arg0, const_tree arg1)
7399 {
7400   if (CONSTANT_CLASS_P (arg1))
7401     return 0;
7402   if (CONSTANT_CLASS_P (arg0))
7403     return 1;
7404 
7405   STRIP_NOPS (arg0);
7406   STRIP_NOPS (arg1);
7407 
7408   if (TREE_CONSTANT (arg1))
7409     return 0;
7410   if (TREE_CONSTANT (arg0))
7411     return 1;
7412 
7413   /* It is preferable to swap two SSA_NAME to ensure a canonical form
7414      for commutative and comparison operators.  Ensuring a canonical
7415      form allows the optimizers to find additional redundancies without
7416      having to explicitly check for both orderings.  */
7417   if (TREE_CODE (arg0) == SSA_NAME
7418       && TREE_CODE (arg1) == SSA_NAME
7419       && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7420     return 1;
7421 
7422   /* Put SSA_NAMEs last.  */
7423   if (TREE_CODE (arg1) == SSA_NAME)
7424     return 0;
7425   if (TREE_CODE (arg0) == SSA_NAME)
7426     return 1;
7427 
7428   /* Put variables last.  */
7429   if (DECL_P (arg1))
7430     return 0;
7431   if (DECL_P (arg0))
7432     return 1;
7433 
7434   return 0;
7435 }
7436 
7437 
7438 /* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
7439    means A >= Y && A != MAX, but in this case we know that
7440    A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
7441 
7442 static tree
fold_to_nonsharp_ineq_using_bound(location_t loc,tree ineq,tree bound)7443 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7444 {
7445   tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7446 
7447   if (TREE_CODE (bound) == LT_EXPR)
7448     a = TREE_OPERAND (bound, 0);
7449   else if (TREE_CODE (bound) == GT_EXPR)
7450     a = TREE_OPERAND (bound, 1);
7451   else
7452     return NULL_TREE;
7453 
7454   typea = TREE_TYPE (a);
7455   if (!INTEGRAL_TYPE_P (typea)
7456       && !POINTER_TYPE_P (typea))
7457     return NULL_TREE;
7458 
7459   if (TREE_CODE (ineq) == LT_EXPR)
7460     {
7461       a1 = TREE_OPERAND (ineq, 1);
7462       y = TREE_OPERAND (ineq, 0);
7463     }
7464   else if (TREE_CODE (ineq) == GT_EXPR)
7465     {
7466       a1 = TREE_OPERAND (ineq, 0);
7467       y = TREE_OPERAND (ineq, 1);
7468     }
7469   else
7470     return NULL_TREE;
7471 
7472   if (TREE_TYPE (a1) != typea)
7473     return NULL_TREE;
7474 
7475   if (POINTER_TYPE_P (typea))
7476     {
7477       /* Convert the pointer types into integer before taking the difference.  */
7478       tree ta = fold_convert_loc (loc, ssizetype, a);
7479       tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7480       diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7481     }
7482   else
7483     diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7484 
7485   if (!diff || !integer_onep (diff))
7486    return NULL_TREE;
7487 
7488   return fold_build2_loc (loc, GE_EXPR, type, a, y);
7489 }
7490 
7491 /* Fold a sum or difference of at least one multiplication.
7492    Returns the folded tree or NULL if no simplification could be made.  */
7493 
7494 static tree
fold_plusminus_mult_expr(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)7495 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7496 			  tree arg0, tree arg1)
7497 {
7498   tree arg00, arg01, arg10, arg11;
7499   tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7500 
7501   /* (A * C) +- (B * C) -> (A+-B) * C.
7502      (A * C) +- A -> A * (C+-1).
7503      We are most concerned about the case where C is a constant,
7504      but other combinations show up during loop reduction.  Since
7505      it is not difficult, try all four possibilities.  */
7506 
7507   if (TREE_CODE (arg0) == MULT_EXPR)
7508     {
7509       arg00 = TREE_OPERAND (arg0, 0);
7510       arg01 = TREE_OPERAND (arg0, 1);
7511     }
7512   else if (TREE_CODE (arg0) == INTEGER_CST)
7513     {
7514       arg00 = build_one_cst (type);
7515       arg01 = arg0;
7516     }
7517   else
7518     {
7519       /* We cannot generate constant 1 for fract.  */
7520       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7521 	return NULL_TREE;
7522       arg00 = arg0;
7523       arg01 = build_one_cst (type);
7524     }
7525   if (TREE_CODE (arg1) == MULT_EXPR)
7526     {
7527       arg10 = TREE_OPERAND (arg1, 0);
7528       arg11 = TREE_OPERAND (arg1, 1);
7529     }
7530   else if (TREE_CODE (arg1) == INTEGER_CST)
7531     {
7532       arg10 = build_one_cst (type);
7533       /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7534 	 the purpose of this canonicalization.  */
7535       if (wi::neg_p (wi::to_wide (arg1), TYPE_SIGN (TREE_TYPE (arg1)))
7536 	  && negate_expr_p (arg1)
7537 	  && code == PLUS_EXPR)
7538 	{
7539 	  arg11 = negate_expr (arg1);
7540 	  code = MINUS_EXPR;
7541 	}
7542       else
7543 	arg11 = arg1;
7544     }
7545   else
7546     {
7547       /* We cannot generate constant 1 for fract.  */
7548       if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7549 	return NULL_TREE;
7550       arg10 = arg1;
7551       arg11 = build_one_cst (type);
7552     }
7553   same = NULL_TREE;
7554 
7555   /* Prefer factoring a common non-constant.  */
7556   if (operand_equal_p (arg00, arg10, 0))
7557     same = arg00, alt0 = arg01, alt1 = arg11;
7558   else if (operand_equal_p (arg01, arg11, 0))
7559     same = arg01, alt0 = arg00, alt1 = arg10;
7560   else if (operand_equal_p (arg00, arg11, 0))
7561     same = arg00, alt0 = arg01, alt1 = arg10;
7562   else if (operand_equal_p (arg01, arg10, 0))
7563     same = arg01, alt0 = arg00, alt1 = arg11;
7564 
7565   /* No identical multiplicands; see if we can find a common
7566      power-of-two factor in non-power-of-two multiplies.  This
7567      can help in multi-dimensional array access.  */
7568   else if (tree_fits_shwi_p (arg01) && tree_fits_shwi_p (arg11))
7569     {
7570       HOST_WIDE_INT int01 = tree_to_shwi (arg01);
7571       HOST_WIDE_INT int11 = tree_to_shwi (arg11);
7572       HOST_WIDE_INT tmp;
7573       bool swap = false;
7574       tree maybe_same;
7575 
7576       /* Move min of absolute values to int11.  */
7577       if (absu_hwi (int01) < absu_hwi (int11))
7578         {
7579 	  tmp = int01, int01 = int11, int11 = tmp;
7580 	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
7581 	  maybe_same = arg01;
7582 	  swap = true;
7583 	}
7584       else
7585 	maybe_same = arg11;
7586 
7587       const unsigned HOST_WIDE_INT factor = absu_hwi (int11);
7588       if (factor > 1
7589 	  && pow2p_hwi (factor)
7590 	  && (int01 & (factor - 1)) == 0
7591 	  /* The remainder should not be a constant, otherwise we
7592 	     end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7593 	     increased the number of multiplications necessary.  */
7594 	  && TREE_CODE (arg10) != INTEGER_CST)
7595         {
7596 	  alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7597 			      build_int_cst (TREE_TYPE (arg00),
7598 					     int01 / int11));
7599 	  alt1 = arg10;
7600 	  same = maybe_same;
7601 	  if (swap)
7602 	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7603 	}
7604     }
7605 
7606   if (!same)
7607     return NULL_TREE;
7608 
7609   if (! ANY_INTEGRAL_TYPE_P (type)
7610       || TYPE_OVERFLOW_WRAPS (type)
7611       /* We are neither factoring zero nor minus one.  */
7612       || TREE_CODE (same) == INTEGER_CST)
7613     return fold_build2_loc (loc, MULT_EXPR, type,
7614 			fold_build2_loc (loc, code, type,
7615 				     fold_convert_loc (loc, type, alt0),
7616 				     fold_convert_loc (loc, type, alt1)),
7617 			fold_convert_loc (loc, type, same));
7618 
7619   /* Same may be zero and thus the operation 'code' may overflow.  Likewise
7620      same may be minus one and thus the multiplication may overflow.  Perform
7621      the sum operation in an unsigned type.  */
7622   tree utype = unsigned_type_for (type);
7623   tree tem = fold_build2_loc (loc, code, utype,
7624 			      fold_convert_loc (loc, utype, alt0),
7625 			      fold_convert_loc (loc, utype, alt1));
7626   /* If the sum evaluated to a constant that is not -INF the multiplication
7627      cannot overflow.  */
7628   if (TREE_CODE (tem) == INTEGER_CST
7629       && (wi::to_wide (tem)
7630 	  != wi::min_value (TYPE_PRECISION (utype), SIGNED)))
7631     return fold_build2_loc (loc, MULT_EXPR, type,
7632 			    fold_convert (type, tem), same);
7633 
7634   /* Do not resort to unsigned multiplication because
7635      we lose the no-overflow property of the expression.  */
7636   return NULL_TREE;
7637 }
7638 
7639 /* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7640    specified by EXPR into the buffer PTR of length LEN bytes.
7641    Return the number of bytes placed in the buffer, or zero
7642    upon failure.  */
7643 
7644 static int
native_encode_int(const_tree expr,unsigned char * ptr,int len,int off)7645 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7646 {
7647   tree type = TREE_TYPE (expr);
7648   int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
7649   int byte, offset, word, words;
7650   unsigned char value;
7651 
7652   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7653     return 0;
7654   if (off == -1)
7655     off = 0;
7656 
7657   if (ptr == NULL)
7658     /* Dry run.  */
7659     return MIN (len, total_bytes - off);
7660 
7661   words = total_bytes / UNITS_PER_WORD;
7662 
7663   for (byte = 0; byte < total_bytes; byte++)
7664     {
7665       int bitpos = byte * BITS_PER_UNIT;
7666       /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7667 	 number of bytes.  */
7668       value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7669 
7670       if (total_bytes > UNITS_PER_WORD)
7671 	{
7672 	  word = byte / UNITS_PER_WORD;
7673 	  if (WORDS_BIG_ENDIAN)
7674 	    word = (words - 1) - word;
7675 	  offset = word * UNITS_PER_WORD;
7676 	  if (BYTES_BIG_ENDIAN)
7677 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7678 	  else
7679 	    offset += byte % UNITS_PER_WORD;
7680 	}
7681       else
7682 	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7683       if (offset >= off && offset - off < len)
7684 	ptr[offset - off] = value;
7685     }
7686   return MIN (len, total_bytes - off);
7687 }
7688 
7689 
7690 /* Subroutine of native_encode_expr.  Encode the FIXED_CST
7691    specified by EXPR into the buffer PTR of length LEN bytes.
7692    Return the number of bytes placed in the buffer, or zero
7693    upon failure.  */
7694 
7695 static int
native_encode_fixed(const_tree expr,unsigned char * ptr,int len,int off)7696 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7697 {
7698   tree type = TREE_TYPE (expr);
7699   scalar_mode mode = SCALAR_TYPE_MODE (type);
7700   int total_bytes = GET_MODE_SIZE (mode);
7701   FIXED_VALUE_TYPE value;
7702   tree i_value, i_type;
7703 
7704   if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7705     return 0;
7706 
7707   i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7708 
7709   if (NULL_TREE == i_type || TYPE_PRECISION (i_type) != total_bytes)
7710     return 0;
7711 
7712   value = TREE_FIXED_CST (expr);
7713   i_value = double_int_to_tree (i_type, value.data);
7714 
7715   return native_encode_int (i_value, ptr, len, off);
7716 }
7717 
7718 
7719 /* Subroutine of native_encode_expr.  Encode the REAL_CST
7720    specified by EXPR into the buffer PTR of length LEN bytes.
7721    Return the number of bytes placed in the buffer, or zero
7722    upon failure.  */
7723 
7724 static int
native_encode_real(const_tree expr,unsigned char * ptr,int len,int off)7725 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7726 {
7727   tree type = TREE_TYPE (expr);
7728   int total_bytes = GET_MODE_SIZE (SCALAR_FLOAT_TYPE_MODE (type));
7729   int byte, offset, word, words, bitpos;
7730   unsigned char value;
7731 
7732   /* There are always 32 bits in each long, no matter the size of
7733      the hosts long.  We handle floating point representations with
7734      up to 192 bits.  */
7735   long tmp[6];
7736 
7737   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7738     return 0;
7739   if (off == -1)
7740     off = 0;
7741 
7742   if (ptr == NULL)
7743     /* Dry run.  */
7744     return MIN (len, total_bytes - off);
7745 
7746   words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7747 
7748   real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7749 
7750   for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7751        bitpos += BITS_PER_UNIT)
7752     {
7753       byte = (bitpos / BITS_PER_UNIT) & 3;
7754       value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7755 
7756       if (UNITS_PER_WORD < 4)
7757 	{
7758 	  word = byte / UNITS_PER_WORD;
7759 	  if (WORDS_BIG_ENDIAN)
7760 	    word = (words - 1) - word;
7761 	  offset = word * UNITS_PER_WORD;
7762 	  if (BYTES_BIG_ENDIAN)
7763 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7764 	  else
7765 	    offset += byte % UNITS_PER_WORD;
7766 	}
7767       else
7768 	{
7769 	  offset = byte;
7770 	  if (BYTES_BIG_ENDIAN)
7771 	    {
7772 	      /* Reverse bytes within each long, or within the entire float
7773 		 if it's smaller than a long (for HFmode).  */
7774 	      offset = MIN (3, total_bytes - 1) - offset;
7775 	      gcc_assert (offset >= 0);
7776 	    }
7777 	}
7778       offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7779       if (offset >= off
7780 	  && offset - off < len)
7781 	ptr[offset - off] = value;
7782     }
7783   return MIN (len, total_bytes - off);
7784 }
7785 
7786 /* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7787    specified by EXPR into the buffer PTR of length LEN bytes.
7788    Return the number of bytes placed in the buffer, or zero
7789    upon failure.  */
7790 
7791 static int
native_encode_complex(const_tree expr,unsigned char * ptr,int len,int off)7792 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7793 {
7794   int rsize, isize;
7795   tree part;
7796 
7797   part = TREE_REALPART (expr);
7798   rsize = native_encode_expr (part, ptr, len, off);
7799   if (off == -1 && rsize == 0)
7800     return 0;
7801   part = TREE_IMAGPART (expr);
7802   if (off != -1)
7803     off = MAX (0, off - GET_MODE_SIZE (SCALAR_TYPE_MODE (TREE_TYPE (part))));
7804   isize = native_encode_expr (part, ptr ? ptr + rsize : NULL,
7805 			      len - rsize, off);
7806   if (off == -1 && isize != rsize)
7807     return 0;
7808   return rsize + isize;
7809 }
7810 
7811 /* Like native_encode_vector, but only encode the first COUNT elements.
7812    The other arguments are as for native_encode_vector.  */
7813 
7814 static int
native_encode_vector_part(const_tree expr,unsigned char * ptr,int len,int off,unsigned HOST_WIDE_INT count)7815 native_encode_vector_part (const_tree expr, unsigned char *ptr, int len,
7816 			   int off, unsigned HOST_WIDE_INT count)
7817 {
7818   tree itype = TREE_TYPE (TREE_TYPE (expr));
7819   if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (expr))
7820       && TYPE_PRECISION (itype) <= BITS_PER_UNIT)
7821     {
7822       /* This is the only case in which elements can be smaller than a byte.
7823 	 Element 0 is always in the lsb of the containing byte.  */
7824       unsigned int elt_bits = TYPE_PRECISION (itype);
7825       int total_bytes = CEIL (elt_bits * count, BITS_PER_UNIT);
7826       if ((off == -1 && total_bytes > len) || off >= total_bytes)
7827 	return 0;
7828 
7829       if (off == -1)
7830 	off = 0;
7831 
7832       /* Zero the buffer and then set bits later where necessary.  */
7833       int extract_bytes = MIN (len, total_bytes - off);
7834       if (ptr)
7835 	memset (ptr, 0, extract_bytes);
7836 
7837       unsigned int elts_per_byte = BITS_PER_UNIT / elt_bits;
7838       unsigned int first_elt = off * elts_per_byte;
7839       unsigned int extract_elts = extract_bytes * elts_per_byte;
7840       for (unsigned int i = 0; i < extract_elts; ++i)
7841 	{
7842 	  tree elt = VECTOR_CST_ELT (expr, first_elt + i);
7843 	  if (TREE_CODE (elt) != INTEGER_CST)
7844 	    return 0;
7845 
7846 	  if (ptr && wi::extract_uhwi (wi::to_wide (elt), 0, 1))
7847 	    {
7848 	      unsigned int bit = i * elt_bits;
7849 	      ptr[bit / BITS_PER_UNIT] |= 1 << (bit % BITS_PER_UNIT);
7850 	    }
7851 	}
7852       return extract_bytes;
7853     }
7854 
7855   int offset = 0;
7856   int size = GET_MODE_SIZE (SCALAR_TYPE_MODE (itype));
7857   for (unsigned HOST_WIDE_INT i = 0; i < count; i++)
7858     {
7859       if (off >= size)
7860 	{
7861 	  off -= size;
7862 	  continue;
7863 	}
7864       tree elem = VECTOR_CST_ELT (expr, i);
7865       int res = native_encode_expr (elem, ptr ? ptr + offset : NULL,
7866 				    len - offset, off);
7867       if ((off == -1 && res != size) || res == 0)
7868 	return 0;
7869       offset += res;
7870       if (offset >= len)
7871 	return (off == -1 && i < count - 1) ? 0 : offset;
7872       if (off != -1)
7873 	off = 0;
7874     }
7875   return offset;
7876 }
7877 
7878 /* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7879    specified by EXPR into the buffer PTR of length LEN bytes.
7880    Return the number of bytes placed in the buffer, or zero
7881    upon failure.  */
7882 
7883 static int
native_encode_vector(const_tree expr,unsigned char * ptr,int len,int off)7884 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7885 {
7886   unsigned HOST_WIDE_INT count;
7887   if (!VECTOR_CST_NELTS (expr).is_constant (&count))
7888     return 0;
7889   return native_encode_vector_part (expr, ptr, len, off, count);
7890 }
7891 
7892 
7893 /* Subroutine of native_encode_expr.  Encode the STRING_CST
7894    specified by EXPR into the buffer PTR of length LEN bytes.
7895    Return the number of bytes placed in the buffer, or zero
7896    upon failure.  */
7897 
7898 static int
native_encode_string(const_tree expr,unsigned char * ptr,int len,int off)7899 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7900 {
7901   tree type = TREE_TYPE (expr);
7902 
7903   /* Wide-char strings are encoded in target byte-order so native
7904      encoding them is trivial.  */
7905   if (BITS_PER_UNIT != CHAR_BIT
7906       || TREE_CODE (type) != ARRAY_TYPE
7907       || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7908       || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7909     return 0;
7910 
7911   HOST_WIDE_INT total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
7912   if ((off == -1 && total_bytes > len) || off >= total_bytes)
7913     return 0;
7914   if (off == -1)
7915     off = 0;
7916   len = MIN (total_bytes - off, len);
7917   if (ptr == NULL)
7918     /* Dry run.  */;
7919   else
7920     {
7921       int written = 0;
7922       if (off < TREE_STRING_LENGTH (expr))
7923 	{
7924 	  written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7925 	  memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7926 	}
7927       memset (ptr + written, 0, len - written);
7928     }
7929   return len;
7930 }
7931 
7932 
7933 /* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST, REAL_CST,
7934    FIXED_CST, COMPLEX_CST, STRING_CST, or VECTOR_CST specified by EXPR into
7935    the buffer PTR of size LEN bytes.  If PTR is NULL, don't actually store
7936    anything, just do a dry run.  Fail either if OFF is -1 and LEN isn't
7937    sufficient to encode the entire EXPR, or if OFF is out of bounds.
7938    Otherwise, start at byte offset OFF and encode at most LEN bytes.
7939    Return the number of bytes placed in the buffer, or zero upon failure.  */
7940 
7941 int
native_encode_expr(const_tree expr,unsigned char * ptr,int len,int off)7942 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7943 {
7944   /* We don't support starting at negative offset and -1 is special.  */
7945   if (off < -1)
7946     return 0;
7947 
7948   switch (TREE_CODE (expr))
7949     {
7950     case INTEGER_CST:
7951       return native_encode_int (expr, ptr, len, off);
7952 
7953     case REAL_CST:
7954       return native_encode_real (expr, ptr, len, off);
7955 
7956     case FIXED_CST:
7957       return native_encode_fixed (expr, ptr, len, off);
7958 
7959     case COMPLEX_CST:
7960       return native_encode_complex (expr, ptr, len, off);
7961 
7962     case VECTOR_CST:
7963       return native_encode_vector (expr, ptr, len, off);
7964 
7965     case STRING_CST:
7966       return native_encode_string (expr, ptr, len, off);
7967 
7968     default:
7969       return 0;
7970     }
7971 }
7972 
7973 /* Try to find a type whose byte size is smaller or equal to LEN bytes larger
7974    or equal to FIELDSIZE bytes, with underlying mode precision/size multiple
7975    of BITS_PER_UNIT.  As native_{interpret,encode}_int works in term of
7976    machine modes, we can't just use build_nonstandard_integer_type.  */
7977 
7978 tree
find_bitfield_repr_type(int fieldsize,int len)7979 find_bitfield_repr_type (int fieldsize, int len)
7980 {
7981   machine_mode mode;
7982   for (int pass = 0; pass < 2; pass++)
7983     {
7984       enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT;
7985       FOR_EACH_MODE_IN_CLASS (mode, mclass)
7986 	if (known_ge (GET_MODE_SIZE (mode), fieldsize)
7987 	    && known_eq (GET_MODE_PRECISION (mode),
7988 			 GET_MODE_BITSIZE (mode))
7989 	    && known_le (GET_MODE_SIZE (mode), len))
7990 	  {
7991 	    tree ret = lang_hooks.types.type_for_mode (mode, 1);
7992 	    if (ret && TYPE_MODE (ret) == mode)
7993 	      return ret;
7994 	  }
7995     }
7996 
7997   for (int i = 0; i < NUM_INT_N_ENTS; i ++)
7998     if (int_n_enabled_p[i]
7999 	&& int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize)
8000 	&& int_n_trees[i].unsigned_type)
8001       {
8002 	tree ret = int_n_trees[i].unsigned_type;
8003 	mode = TYPE_MODE (ret);
8004 	if (known_ge (GET_MODE_SIZE (mode), fieldsize)
8005 	    && known_eq (GET_MODE_PRECISION (mode),
8006 			 GET_MODE_BITSIZE (mode))
8007 	    && known_le (GET_MODE_SIZE (mode), len))
8008 	  return ret;
8009       }
8010 
8011   return NULL_TREE;
8012 }
8013 
8014 /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs,
8015    NON_LVALUE_EXPRs and nops.  If MASK is non-NULL (then PTR has
8016    to be non-NULL and OFF zero), then in addition to filling the
8017    bytes pointed by PTR with the value also clear any bits pointed
8018    by MASK that are known to be initialized, keep them as is for
8019    e.g. uninitialized padding bits or uninitialized fields.  */
8020 
8021 int
native_encode_initializer(tree init,unsigned char * ptr,int len,int off,unsigned char * mask)8022 native_encode_initializer (tree init, unsigned char *ptr, int len,
8023 			   int off, unsigned char *mask)
8024 {
8025   int r;
8026 
8027   /* We don't support starting at negative offset and -1 is special.  */
8028   if (off < -1 || init == NULL_TREE)
8029     return 0;
8030 
8031   gcc_assert (mask == NULL || (off == 0 && ptr));
8032 
8033   STRIP_NOPS (init);
8034   switch (TREE_CODE (init))
8035     {
8036     case VIEW_CONVERT_EXPR:
8037     case NON_LVALUE_EXPR:
8038       return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off,
8039 					mask);
8040     default:
8041       r = native_encode_expr (init, ptr, len, off);
8042       if (mask)
8043 	memset (mask, 0, r);
8044       return r;
8045     case CONSTRUCTOR:
8046       tree type = TREE_TYPE (init);
8047       HOST_WIDE_INT total_bytes = int_size_in_bytes (type);
8048       if (total_bytes < 0)
8049 	return 0;
8050       if ((off == -1 && total_bytes > len) || off >= total_bytes)
8051 	return 0;
8052       int o = off == -1 ? 0 : off;
8053       if (TREE_CODE (type) == ARRAY_TYPE)
8054 	{
8055 	  tree min_index;
8056 	  unsigned HOST_WIDE_INT cnt;
8057 	  HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1;
8058 	  constructor_elt *ce;
8059 
8060 	  if (!TYPE_DOMAIN (type)
8061 	      || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
8062 	    return 0;
8063 
8064 	  fieldsize = int_size_in_bytes (TREE_TYPE (type));
8065 	  if (fieldsize <= 0)
8066 	    return 0;
8067 
8068 	  min_index = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8069 	  if (ptr)
8070 	    memset (ptr, '\0', MIN (total_bytes - off, len));
8071 
8072 	  for (cnt = 0; ; cnt++)
8073 	    {
8074 	      tree val = NULL_TREE, index = NULL_TREE;
8075 	      HOST_WIDE_INT pos = curpos, count = 0;
8076 	      bool full = false;
8077 	      if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8078 		{
8079 		  val = ce->value;
8080 		  index = ce->index;
8081 		}
8082 	      else if (mask == NULL
8083 		       || CONSTRUCTOR_NO_CLEARING (init)
8084 		       || curpos >= total_bytes)
8085 		break;
8086 	      else
8087 		pos = total_bytes;
8088 
8089 	      if (index && TREE_CODE (index) == RANGE_EXPR)
8090 		{
8091 		  if (TREE_CODE (TREE_OPERAND (index, 0)) != INTEGER_CST
8092 		      || TREE_CODE (TREE_OPERAND (index, 1)) != INTEGER_CST)
8093 		    return 0;
8094 		  offset_int wpos
8095 		    = wi::sext (wi::to_offset (TREE_OPERAND (index, 0))
8096 				- wi::to_offset (min_index),
8097 				TYPE_PRECISION (sizetype));
8098 		  wpos *= fieldsize;
8099 		  if (!wi::fits_shwi_p (pos))
8100 		    return 0;
8101 		  pos = wpos.to_shwi ();
8102 		  offset_int wcount
8103 		    = wi::sext (wi::to_offset (TREE_OPERAND (index, 1))
8104 				- wi::to_offset (TREE_OPERAND (index, 0)),
8105 				TYPE_PRECISION (sizetype));
8106 		  if (!wi::fits_shwi_p (wcount))
8107 		    return 0;
8108 		  count = wcount.to_shwi ();
8109 		}
8110 	      else if (index)
8111 		{
8112 		  if (TREE_CODE (index) != INTEGER_CST)
8113 		    return 0;
8114 		  offset_int wpos
8115 		    = wi::sext (wi::to_offset (index)
8116 				- wi::to_offset (min_index),
8117 				TYPE_PRECISION (sizetype));
8118 		  wpos *= fieldsize;
8119 		  if (!wi::fits_shwi_p (wpos))
8120 		    return 0;
8121 		  pos = wpos.to_shwi ();
8122 		}
8123 
8124 	      if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos)
8125 		{
8126 		  if (valueinit == -1)
8127 		    {
8128 		      tree zero = build_zero_cst (TREE_TYPE (type));
8129 		      r = native_encode_initializer (zero, ptr + curpos,
8130 						     fieldsize, 0,
8131 						     mask + curpos);
8132 		      if (TREE_CODE (zero) == CONSTRUCTOR)
8133 			ggc_free (zero);
8134 		      if (!r)
8135 			return 0;
8136 		      valueinit = curpos;
8137 		      curpos += fieldsize;
8138 		    }
8139 		  while (curpos != pos)
8140 		    {
8141 		      memcpy (ptr + curpos, ptr + valueinit, fieldsize);
8142 		      memcpy (mask + curpos, mask + valueinit, fieldsize);
8143 		      curpos += fieldsize;
8144 		    }
8145 		}
8146 
8147 	      curpos = pos;
8148 	      if (val)
8149 		do
8150 		  {
8151 		    if (off == -1
8152 			|| (curpos >= off
8153 			    && (curpos + fieldsize
8154 				<= (HOST_WIDE_INT) off + len)))
8155 		      {
8156 			if (full)
8157 			  {
8158 			    if (ptr)
8159 			      memcpy (ptr + (curpos - o), ptr + (pos - o),
8160 				      fieldsize);
8161 			    if (mask)
8162 			      memcpy (mask + curpos, mask + pos, fieldsize);
8163 			  }
8164 			else if (!native_encode_initializer (val,
8165 							     ptr
8166 							     ? ptr + curpos - o
8167 							     : NULL,
8168 							     fieldsize,
8169 							     off == -1 ? -1
8170 								       : 0,
8171 							     mask
8172 							     ? mask + curpos
8173 							     : NULL))
8174 			  return 0;
8175 			else
8176 			  {
8177 			    full = true;
8178 			    pos = curpos;
8179 			  }
8180 		      }
8181 		    else if (curpos + fieldsize > off
8182 			     && curpos < (HOST_WIDE_INT) off + len)
8183 		      {
8184 			/* Partial overlap.  */
8185 			unsigned char *p = NULL;
8186 			int no = 0;
8187 			int l;
8188 			gcc_assert (mask == NULL);
8189 			if (curpos >= off)
8190 			  {
8191 			    if (ptr)
8192 			      p = ptr + curpos - off;
8193 			    l = MIN ((HOST_WIDE_INT) off + len - curpos,
8194 				     fieldsize);
8195 			  }
8196 			else
8197 			  {
8198 			    p = ptr;
8199 			    no = off - curpos;
8200 			    l = len;
8201 			  }
8202 			if (!native_encode_initializer (val, p, l, no, NULL))
8203 			  return 0;
8204 		      }
8205 		    curpos += fieldsize;
8206 		  }
8207 		while (count-- != 0);
8208 	    }
8209 	  return MIN (total_bytes - off, len);
8210 	}
8211       else if (TREE_CODE (type) == RECORD_TYPE
8212 	       || TREE_CODE (type) == UNION_TYPE)
8213 	{
8214 	  unsigned HOST_WIDE_INT cnt;
8215 	  constructor_elt *ce;
8216 	  tree fld_base = TYPE_FIELDS (type);
8217 	  tree to_free = NULL_TREE;
8218 
8219 	  gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL);
8220 	  if (ptr != NULL)
8221 	    memset (ptr, '\0', MIN (total_bytes - o, len));
8222 	  for (cnt = 0; ; cnt++)
8223 	    {
8224 	      tree val = NULL_TREE, field = NULL_TREE;
8225 	      HOST_WIDE_INT pos = 0, fieldsize;
8226 	      unsigned HOST_WIDE_INT bpos = 0, epos = 0;
8227 
8228 	      if (to_free)
8229 		{
8230 		  ggc_free (to_free);
8231 		  to_free = NULL_TREE;
8232 		}
8233 
8234 	      if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce))
8235 		{
8236 		  val = ce->value;
8237 		  field = ce->index;
8238 		  if (field == NULL_TREE)
8239 		    return 0;
8240 
8241 		  pos = int_byte_position (field);
8242 		  if (off != -1 && (HOST_WIDE_INT) off + len <= pos)
8243 		    continue;
8244 		}
8245 	      else if (mask == NULL
8246 		       || CONSTRUCTOR_NO_CLEARING (init))
8247 		break;
8248 	      else
8249 		pos = total_bytes;
8250 
8251 	      if (mask && !CONSTRUCTOR_NO_CLEARING (init))
8252 		{
8253 		  tree fld;
8254 		  for (fld = fld_base; fld; fld = DECL_CHAIN (fld))
8255 		    {
8256 		      if (TREE_CODE (fld) != FIELD_DECL)
8257 			continue;
8258 		      if (fld == field)
8259 			break;
8260 		      if (DECL_PADDING_P (fld))
8261 			continue;
8262 		      if (DECL_SIZE_UNIT (fld) == NULL_TREE
8263 			  || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld)))
8264 			return 0;
8265 		      if (integer_zerop (DECL_SIZE_UNIT (fld)))
8266 			continue;
8267 		      break;
8268 		    }
8269 		  if (fld == NULL_TREE)
8270 		    {
8271 		      if (ce == NULL)
8272 			break;
8273 		      return 0;
8274 		    }
8275 		  fld_base = DECL_CHAIN (fld);
8276 		  if (fld != field)
8277 		    {
8278 		      cnt--;
8279 		      field = fld;
8280 		      pos = int_byte_position (field);
8281 		      val = build_zero_cst (TREE_TYPE (fld));
8282 		      if (TREE_CODE (val) == CONSTRUCTOR)
8283 			to_free = val;
8284 		    }
8285 		}
8286 
8287 	      if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
8288 		  && TYPE_DOMAIN (TREE_TYPE (field))
8289 		  && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
8290 		{
8291 		  if (mask || off != -1)
8292 		    return 0;
8293 		  if (val == NULL_TREE)
8294 		    continue;
8295 		  if (TREE_CODE (TREE_TYPE (val)) != ARRAY_TYPE)
8296 		    return 0;
8297 		  fieldsize = int_size_in_bytes (TREE_TYPE (val));
8298 		  if (fieldsize < 0
8299 		      || (int) fieldsize != fieldsize
8300 		      || (pos + fieldsize) > INT_MAX)
8301 		    return 0;
8302 		  if (pos + fieldsize > total_bytes)
8303 		    {
8304 		      if (ptr != NULL && total_bytes < len)
8305 			memset (ptr + total_bytes, '\0',
8306 				MIN (pos + fieldsize, len) - total_bytes);
8307 		      total_bytes = pos + fieldsize;
8308 		    }
8309 		}
8310 	      else
8311 		{
8312 		  if (DECL_SIZE_UNIT (field) == NULL_TREE
8313 		      || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
8314 		    return 0;
8315 		  fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
8316 		}
8317 	      if (fieldsize == 0)
8318 		continue;
8319 
8320 	      if (DECL_BIT_FIELD (field))
8321 		{
8322 		  if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8323 		    return 0;
8324 		  fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8325 		  bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8326 		  if (bpos % BITS_PER_UNIT)
8327 		    bpos %= BITS_PER_UNIT;
8328 		  else
8329 		    bpos = 0;
8330 		  fieldsize += bpos;
8331 		  epos = fieldsize % BITS_PER_UNIT;
8332 		  fieldsize += BITS_PER_UNIT - 1;
8333 		  fieldsize /= BITS_PER_UNIT;
8334 		}
8335 
8336 	      if (off != -1 && pos + fieldsize <= off)
8337 		continue;
8338 
8339 	      if (val == NULL_TREE)
8340 		continue;
8341 
8342 	      if (DECL_BIT_FIELD (field))
8343 		{
8344 		  /* FIXME: Handle PDP endian.  */
8345 		  if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8346 		    return 0;
8347 
8348 		  if (TREE_CODE (val) != INTEGER_CST)
8349 		    return 0;
8350 
8351 		  tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8352 		  tree repr_type = NULL_TREE;
8353 		  HOST_WIDE_INT rpos = 0;
8354 		  if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr)))
8355 		    {
8356 		      rpos = int_byte_position (repr);
8357 		      repr_type = TREE_TYPE (repr);
8358 		    }
8359 		  else
8360 		    {
8361 		      repr_type = find_bitfield_repr_type (fieldsize, len);
8362 		      if (repr_type == NULL_TREE)
8363 			return 0;
8364 		      HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type);
8365 		      gcc_assert (repr_size > 0 && repr_size <= len);
8366 		      if (pos + repr_size <= o + len)
8367 			rpos = pos;
8368 		      else
8369 			{
8370 			  rpos = o + len - repr_size;
8371 			  gcc_assert (rpos <= pos);
8372 			}
8373 		    }
8374 
8375 		  if (rpos > pos)
8376 		    return 0;
8377 		  wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type));
8378 		  int diff = (TYPE_PRECISION (repr_type)
8379 			      - TYPE_PRECISION (TREE_TYPE (field)));
8380 		  HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos;
8381 		  if (!BYTES_BIG_ENDIAN)
8382 		    w = wi::lshift (w, bitoff);
8383 		  else
8384 		    w = wi::lshift (w, diff - bitoff);
8385 		  val = wide_int_to_tree (repr_type, w);
8386 
8387 		  unsigned char buf[MAX_BITSIZE_MODE_ANY_INT
8388 				    / BITS_PER_UNIT + 1];
8389 		  int l = native_encode_int (val, buf, sizeof buf, 0);
8390 		  if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type))
8391 		    return 0;
8392 
8393 		  if (ptr == NULL)
8394 		    continue;
8395 
8396 		  /* If the bitfield does not start at byte boundary, handle
8397 		     the partial byte at the start.  */
8398 		  if (bpos
8399 		      && (off == -1 || (pos >= off && len >= 1)))
8400 		    {
8401 		      if (!BYTES_BIG_ENDIAN)
8402 			{
8403 			  int msk = (1 << bpos) - 1;
8404 			  buf[pos - rpos] &= ~msk;
8405 			  buf[pos - rpos] |= ptr[pos - o] & msk;
8406 			  if (mask)
8407 			    {
8408 			      if (fieldsize > 1 || epos == 0)
8409 				mask[pos] &= msk;
8410 			      else
8411 				mask[pos] &= (msk | ~((1 << epos) - 1));
8412 			    }
8413 			}
8414 		      else
8415 			{
8416 			  int msk = (1 << (BITS_PER_UNIT - bpos)) - 1;
8417 			  buf[pos - rpos] &= msk;
8418 			  buf[pos - rpos] |= ptr[pos - o] & ~msk;
8419 			  if (mask)
8420 			    {
8421 			      if (fieldsize > 1 || epos == 0)
8422 				mask[pos] &= ~msk;
8423 			      else
8424 				mask[pos] &= (~msk
8425 					      | ((1 << (BITS_PER_UNIT - epos))
8426 						 - 1));
8427 			    }
8428 			}
8429 		    }
8430 		  /* If the bitfield does not end at byte boundary, handle
8431 		     the partial byte at the end.  */
8432 		  if (epos
8433 		      && (off == -1
8434 			  || pos + fieldsize <= (HOST_WIDE_INT) off + len))
8435 		    {
8436 		      if (!BYTES_BIG_ENDIAN)
8437 			{
8438 			  int msk = (1 << epos) - 1;
8439 			  buf[pos - rpos + fieldsize - 1] &= msk;
8440 			  buf[pos - rpos + fieldsize - 1]
8441 			    |= ptr[pos + fieldsize - 1 - o] & ~msk;
8442 			  if (mask && (fieldsize > 1 || bpos == 0))
8443 			    mask[pos + fieldsize - 1] &= ~msk;
8444 			}
8445 		       else
8446 			{
8447 			  int msk = (1 << (BITS_PER_UNIT - epos)) - 1;
8448 			  buf[pos - rpos + fieldsize - 1] &= ~msk;
8449 			  buf[pos - rpos + fieldsize - 1]
8450 			    |= ptr[pos + fieldsize - 1 - o] & msk;
8451 			  if (mask && (fieldsize > 1 || bpos == 0))
8452 			    mask[pos + fieldsize - 1] &= msk;
8453 			}
8454 		    }
8455 		  if (off == -1
8456 		      || (pos >= off
8457 			  && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8458 		    {
8459 		      memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize);
8460 		      if (mask && (fieldsize > (bpos != 0) + (epos != 0)))
8461 			memset (mask + pos + (bpos != 0), 0,
8462 				fieldsize - (bpos != 0) - (epos != 0));
8463 		    }
8464 		  else
8465 		    {
8466 		      /* Partial overlap.  */
8467 		      HOST_WIDE_INT fsz = fieldsize;
8468 		      gcc_assert (mask == NULL);
8469 		      if (pos < off)
8470 			{
8471 			  fsz -= (off - pos);
8472 			  pos = off;
8473 			}
8474 		      if (pos + fsz > (HOST_WIDE_INT) off + len)
8475 			fsz = (HOST_WIDE_INT) off + len - pos;
8476 		      memcpy (ptr + pos - off, buf + (pos - rpos), fsz);
8477 		    }
8478 		  continue;
8479 		}
8480 
8481 	      if (off == -1
8482 		  || (pos >= off
8483 		      && (pos + fieldsize <= (HOST_WIDE_INT) off + len)))
8484 		{
8485 		  int fldsize = fieldsize;
8486 		  if (off == -1)
8487 		    {
8488 		      tree fld = DECL_CHAIN (field);
8489 		      while (fld)
8490 			{
8491 			  if (TREE_CODE (fld) == FIELD_DECL)
8492 			    break;
8493 			  fld = DECL_CHAIN (fld);
8494 			}
8495 		      if (fld == NULL_TREE)
8496 			fldsize = len - pos;
8497 		    }
8498 		  r = native_encode_initializer (val, ptr ? ptr + pos - o
8499 							  : NULL,
8500 						 fldsize,
8501 						 off == -1 ? -1 : 0,
8502 						 mask ? mask + pos : NULL);
8503 		  if (!r)
8504 		    return 0;
8505 		  if (off == -1
8506 		      && fldsize != fieldsize
8507 		      && r > fieldsize
8508 		      && pos + r > total_bytes)
8509 		    total_bytes = pos + r;
8510 		}
8511 	      else
8512 		{
8513 		  /* Partial overlap.  */
8514 		  unsigned char *p = NULL;
8515 		  int no = 0;
8516 		  int l;
8517 		  gcc_assert (mask == NULL);
8518 		  if (pos >= off)
8519 		    {
8520 		      if (ptr)
8521 			p = ptr + pos - off;
8522 		      l = MIN ((HOST_WIDE_INT) off + len - pos,
8523 				fieldsize);
8524 		    }
8525 		  else
8526 		    {
8527 		      p = ptr;
8528 		      no = off - pos;
8529 		      l = len;
8530 		    }
8531 		  if (!native_encode_initializer (val, p, l, no, NULL))
8532 		    return 0;
8533 		}
8534 	    }
8535 	  return MIN (total_bytes - off, len);
8536 	}
8537       return 0;
8538     }
8539 }
8540 
8541 
8542 /* Subroutine of native_interpret_expr.  Interpret the contents of
8543    the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8544    If the buffer cannot be interpreted, return NULL_TREE.  */
8545 
8546 static tree
native_interpret_int(tree type,const unsigned char * ptr,int len)8547 native_interpret_int (tree type, const unsigned char *ptr, int len)
8548 {
8549   int total_bytes = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
8550 
8551   if (total_bytes > len
8552       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8553     return NULL_TREE;
8554 
8555   wide_int result = wi::from_buffer (ptr, total_bytes);
8556 
8557   return wide_int_to_tree (type, result);
8558 }
8559 
8560 
8561 /* Subroutine of native_interpret_expr.  Interpret the contents of
8562    the buffer PTR of length LEN as a FIXED_CST of type TYPE.
8563    If the buffer cannot be interpreted, return NULL_TREE.  */
8564 
8565 static tree
native_interpret_fixed(tree type,const unsigned char * ptr,int len)8566 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
8567 {
8568   scalar_mode mode = SCALAR_TYPE_MODE (type);
8569   int total_bytes = GET_MODE_SIZE (mode);
8570   double_int result;
8571   FIXED_VALUE_TYPE fixed_value;
8572 
8573   if (total_bytes > len
8574       || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
8575     return NULL_TREE;
8576 
8577   result = double_int::from_buffer (ptr, total_bytes);
8578   fixed_value = fixed_from_double_int (result, mode);
8579 
8580   return build_fixed (type, fixed_value);
8581 }
8582 
8583 
8584 /* Subroutine of native_interpret_expr.  Interpret the contents of
8585    the buffer PTR of length LEN as a REAL_CST of type TYPE.
8586    If the buffer cannot be interpreted, return NULL_TREE.  */
8587 
8588 static tree
native_interpret_real(tree type,const unsigned char * ptr,int len)8589 native_interpret_real (tree type, const unsigned char *ptr, int len)
8590 {
8591   scalar_float_mode mode = SCALAR_FLOAT_TYPE_MODE (type);
8592   int total_bytes = GET_MODE_SIZE (mode);
8593   unsigned char value;
8594   /* There are always 32 bits in each long, no matter the size of
8595      the hosts long.  We handle floating point representations with
8596      up to 192 bits.  */
8597   REAL_VALUE_TYPE r;
8598   long tmp[6];
8599 
8600   if (total_bytes > len || total_bytes > 24)
8601     return NULL_TREE;
8602   int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8603 
8604   memset (tmp, 0, sizeof (tmp));
8605   for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8606        bitpos += BITS_PER_UNIT)
8607     {
8608       /* Both OFFSET and BYTE index within a long;
8609 	 bitpos indexes the whole float.  */
8610       int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
8611       if (UNITS_PER_WORD < 4)
8612 	{
8613 	  int word = byte / UNITS_PER_WORD;
8614 	  if (WORDS_BIG_ENDIAN)
8615 	    word = (words - 1) - word;
8616 	  offset = word * UNITS_PER_WORD;
8617 	  if (BYTES_BIG_ENDIAN)
8618 	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8619 	  else
8620 	    offset += byte % UNITS_PER_WORD;
8621 	}
8622       else
8623 	{
8624 	  offset = byte;
8625 	  if (BYTES_BIG_ENDIAN)
8626 	    {
8627 	      /* Reverse bytes within each long, or within the entire float
8628 		 if it's smaller than a long (for HFmode).  */
8629 	      offset = MIN (3, total_bytes - 1) - offset;
8630 	      gcc_assert (offset >= 0);
8631 	    }
8632 	}
8633       value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8634 
8635       tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8636     }
8637 
8638   real_from_target (&r, tmp, mode);
8639   tree ret = build_real (type, r);
8640   if (MODE_COMPOSITE_P (mode))
8641     {
8642       /* For floating point values in composite modes, punt if this folding
8643 	 doesn't preserve bit representation.  As the mode doesn't have fixed
8644 	 precision while GCC pretends it does, there could be valid values that
8645 	 GCC can't really represent accurately.  See PR95450.  */
8646       unsigned char buf[24];
8647       if (native_encode_expr (ret, buf, total_bytes, 0) != total_bytes
8648 	  || memcmp (ptr, buf, total_bytes) != 0)
8649 	ret = NULL_TREE;
8650     }
8651   return ret;
8652 }
8653 
8654 
8655 /* Subroutine of native_interpret_expr.  Interpret the contents of
8656    the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8657    If the buffer cannot be interpreted, return NULL_TREE.  */
8658 
8659 static tree
native_interpret_complex(tree type,const unsigned char * ptr,int len)8660 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8661 {
8662   tree etype, rpart, ipart;
8663   int size;
8664 
8665   etype = TREE_TYPE (type);
8666   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8667   if (size * 2 > len)
8668     return NULL_TREE;
8669   rpart = native_interpret_expr (etype, ptr, size);
8670   if (!rpart)
8671     return NULL_TREE;
8672   ipart = native_interpret_expr (etype, ptr+size, size);
8673   if (!ipart)
8674     return NULL_TREE;
8675   return build_complex (type, rpart, ipart);
8676 }
8677 
8678 /* Read a vector of type TYPE from the target memory image given by BYTES,
8679    which contains LEN bytes.  The vector is known to be encodable using
8680    NPATTERNS interleaved patterns with NELTS_PER_PATTERN elements each.
8681 
8682    Return the vector on success, otherwise return null.  */
8683 
8684 static tree
native_interpret_vector_part(tree type,const unsigned char * bytes,unsigned int len,unsigned int npatterns,unsigned int nelts_per_pattern)8685 native_interpret_vector_part (tree type, const unsigned char *bytes,
8686 			      unsigned int len, unsigned int npatterns,
8687 			      unsigned int nelts_per_pattern)
8688 {
8689   tree elt_type = TREE_TYPE (type);
8690   if (VECTOR_BOOLEAN_TYPE_P (type)
8691       && TYPE_PRECISION (elt_type) <= BITS_PER_UNIT)
8692     {
8693       /* This is the only case in which elements can be smaller than a byte.
8694 	 Element 0 is always in the lsb of the containing byte.  */
8695       unsigned int elt_bits = TYPE_PRECISION (elt_type);
8696       if (elt_bits * npatterns * nelts_per_pattern > len * BITS_PER_UNIT)
8697 	return NULL_TREE;
8698 
8699       tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8700       for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8701 	{
8702 	  unsigned int bit_index = i * elt_bits;
8703 	  unsigned int byte_index = bit_index / BITS_PER_UNIT;
8704 	  unsigned int lsb = bit_index % BITS_PER_UNIT;
8705 	  builder.quick_push (bytes[byte_index] & (1 << lsb)
8706 			      ? build_all_ones_cst (elt_type)
8707 			      : build_zero_cst (elt_type));
8708 	}
8709       return builder.build ();
8710     }
8711 
8712   unsigned int elt_bytes = tree_to_uhwi (TYPE_SIZE_UNIT (elt_type));
8713   if (elt_bytes * npatterns * nelts_per_pattern > len)
8714     return NULL_TREE;
8715 
8716   tree_vector_builder builder (type, npatterns, nelts_per_pattern);
8717   for (unsigned int i = 0; i < builder.encoded_nelts (); ++i)
8718     {
8719       tree elt = native_interpret_expr (elt_type, bytes, elt_bytes);
8720       if (!elt)
8721 	return NULL_TREE;
8722       builder.quick_push (elt);
8723       bytes += elt_bytes;
8724     }
8725   return builder.build ();
8726 }
8727 
8728 /* Subroutine of native_interpret_expr.  Interpret the contents of
8729    the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8730    If the buffer cannot be interpreted, return NULL_TREE.  */
8731 
8732 static tree
native_interpret_vector(tree type,const unsigned char * ptr,unsigned int len)8733 native_interpret_vector (tree type, const unsigned char *ptr, unsigned int len)
8734 {
8735   tree etype;
8736   unsigned int size;
8737   unsigned HOST_WIDE_INT count;
8738 
8739   etype = TREE_TYPE (type);
8740   size = GET_MODE_SIZE (SCALAR_TYPE_MODE (etype));
8741   if (!TYPE_VECTOR_SUBPARTS (type).is_constant (&count)
8742       || size * count > len)
8743     return NULL_TREE;
8744 
8745   return native_interpret_vector_part (type, ptr, len, count, 1);
8746 }
8747 
8748 
8749 /* Subroutine of fold_view_convert_expr.  Interpret the contents of
8750    the buffer PTR of length LEN as a constant of type TYPE.  For
8751    INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8752    we return a REAL_CST, etc...  If the buffer cannot be interpreted,
8753    return NULL_TREE.  */
8754 
8755 tree
native_interpret_expr(tree type,const unsigned char * ptr,int len)8756 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8757 {
8758   switch (TREE_CODE (type))
8759     {
8760     case INTEGER_TYPE:
8761     case ENUMERAL_TYPE:
8762     case BOOLEAN_TYPE:
8763     case POINTER_TYPE:
8764     case REFERENCE_TYPE:
8765       return native_interpret_int (type, ptr, len);
8766 
8767     case REAL_TYPE:
8768       return native_interpret_real (type, ptr, len);
8769 
8770     case FIXED_POINT_TYPE:
8771       return native_interpret_fixed (type, ptr, len);
8772 
8773     case COMPLEX_TYPE:
8774       return native_interpret_complex (type, ptr, len);
8775 
8776     case VECTOR_TYPE:
8777       return native_interpret_vector (type, ptr, len);
8778 
8779     default:
8780       return NULL_TREE;
8781     }
8782 }
8783 
8784 /* Returns true if we can interpret the contents of a native encoding
8785    as TYPE.  */
8786 
8787 bool
can_native_interpret_type_p(tree type)8788 can_native_interpret_type_p (tree type)
8789 {
8790   switch (TREE_CODE (type))
8791     {
8792     case INTEGER_TYPE:
8793     case ENUMERAL_TYPE:
8794     case BOOLEAN_TYPE:
8795     case POINTER_TYPE:
8796     case REFERENCE_TYPE:
8797     case FIXED_POINT_TYPE:
8798     case REAL_TYPE:
8799     case COMPLEX_TYPE:
8800     case VECTOR_TYPE:
8801       return true;
8802     default:
8803       return false;
8804     }
8805 }
8806 
8807 /* Attempt to interpret aggregate of TYPE from bytes encoded in target
8808    byte order at PTR + OFF with LEN bytes.  Does not handle unions.  */
8809 
8810 tree
native_interpret_aggregate(tree type,const unsigned char * ptr,int off,int len)8811 native_interpret_aggregate (tree type, const unsigned char *ptr, int off,
8812 			    int len)
8813 {
8814   vec<constructor_elt, va_gc> *elts = NULL;
8815   if (TREE_CODE (type) == ARRAY_TYPE)
8816     {
8817       HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type));
8818       if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE)
8819 	return NULL_TREE;
8820 
8821       HOST_WIDE_INT cnt = 0;
8822       if (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
8823 	{
8824 	  if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
8825 	    return NULL_TREE;
8826 	  cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
8827 	}
8828       if (eltsz == 0)
8829 	cnt = 0;
8830       HOST_WIDE_INT pos = 0;
8831       for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz)
8832 	{
8833 	  tree v = NULL_TREE;
8834 	  if (pos >= len || pos + eltsz > len)
8835 	    return NULL_TREE;
8836 	  if (can_native_interpret_type_p (TREE_TYPE (type)))
8837 	    {
8838 	      v = native_interpret_expr (TREE_TYPE (type),
8839 					 ptr + off + pos, eltsz);
8840 	      if (v == NULL_TREE)
8841 		return NULL_TREE;
8842 	    }
8843 	  else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE
8844 		   || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
8845 	    v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos,
8846 					    eltsz);
8847 	  if (v == NULL_TREE)
8848 	    return NULL_TREE;
8849 	  CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v);
8850 	}
8851       return build_constructor (type, elts);
8852     }
8853   if (TREE_CODE (type) != RECORD_TYPE)
8854     return NULL_TREE;
8855   for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8856     {
8857       if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field))
8858 	continue;
8859       tree fld = field;
8860       HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0;
8861       int diff = 0;
8862       tree v = NULL_TREE;
8863       if (DECL_BIT_FIELD (field))
8864 	{
8865 	  fld = DECL_BIT_FIELD_REPRESENTATIVE (field);
8866 	  if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld)))
8867 	    {
8868 	      poly_int64 bitoffset;
8869 	      poly_uint64 field_offset, fld_offset;
8870 	      if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8871 		  && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset))
8872 		bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT;
8873 	      else
8874 		bitoffset = 0;
8875 	      bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8876 			    - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld)));
8877 	      diff = (TYPE_PRECISION (TREE_TYPE (fld))
8878 		      - TYPE_PRECISION (TREE_TYPE (field)));
8879 	      if (!bitoffset.is_constant (&bitoff)
8880 		  || bitoff < 0
8881 		  || bitoff > diff)
8882 		return NULL_TREE;
8883 	    }
8884 	  else
8885 	    {
8886 	      if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field)))
8887 		return NULL_TREE;
8888 	      int fieldsize = TYPE_PRECISION (TREE_TYPE (field));
8889 	      int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
8890 	      bpos %= BITS_PER_UNIT;
8891 	      fieldsize += bpos;
8892 	      fieldsize += BITS_PER_UNIT - 1;
8893 	      fieldsize /= BITS_PER_UNIT;
8894 	      tree repr_type = find_bitfield_repr_type (fieldsize, len);
8895 	      if (repr_type == NULL_TREE)
8896 		return NULL_TREE;
8897 	      sz = int_size_in_bytes (repr_type);
8898 	      if (sz < 0 || sz > len)
8899 		return NULL_TREE;
8900 	      pos = int_byte_position (field);
8901 	      if (pos < 0 || pos > len || pos + fieldsize > len)
8902 		return NULL_TREE;
8903 	      HOST_WIDE_INT rpos;
8904 	      if (pos + sz <= len)
8905 		rpos = pos;
8906 	      else
8907 		{
8908 		  rpos = len - sz;
8909 		  gcc_assert (rpos <= pos);
8910 		}
8911 	      bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos;
8912 	      pos = rpos;
8913 	      diff = (TYPE_PRECISION (repr_type)
8914 		      - TYPE_PRECISION (TREE_TYPE (field)));
8915 	      v = native_interpret_expr (repr_type, ptr + off + pos, sz);
8916 	      if (v == NULL_TREE)
8917 		return NULL_TREE;
8918 	      fld = NULL_TREE;
8919 	    }
8920 	}
8921 
8922       if (fld)
8923 	{
8924 	  sz = int_size_in_bytes (TREE_TYPE (fld));
8925 	  if (sz < 0 || sz > len)
8926 	    return NULL_TREE;
8927 	  tree byte_pos = byte_position (fld);
8928 	  if (!tree_fits_shwi_p (byte_pos))
8929 	    return NULL_TREE;
8930 	  pos = tree_to_shwi (byte_pos);
8931 	  if (pos < 0 || pos > len || pos + sz > len)
8932 	    return NULL_TREE;
8933 	}
8934       if (fld == NULL_TREE)
8935 	/* Already handled above.  */;
8936       else if (can_native_interpret_type_p (TREE_TYPE (fld)))
8937 	{
8938 	  v = native_interpret_expr (TREE_TYPE (fld),
8939 				     ptr + off + pos, sz);
8940 	  if (v == NULL_TREE)
8941 	    return NULL_TREE;
8942 	}
8943       else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE
8944 	       || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE)
8945 	v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz);
8946       if (v == NULL_TREE)
8947 	return NULL_TREE;
8948       if (fld != field)
8949 	{
8950 	  if (TREE_CODE (v) != INTEGER_CST)
8951 	    return NULL_TREE;
8952 
8953 	  /* FIXME: Figure out how to handle PDP endian bitfields.  */
8954 	  if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
8955 	    return NULL_TREE;
8956 	  if (!BYTES_BIG_ENDIAN)
8957 	    v = wide_int_to_tree (TREE_TYPE (field),
8958 				  wi::lrshift (wi::to_wide (v), bitoff));
8959 	  else
8960 	    v = wide_int_to_tree (TREE_TYPE (field),
8961 				  wi::lrshift (wi::to_wide (v),
8962 					       diff - bitoff));
8963 	}
8964       CONSTRUCTOR_APPEND_ELT (elts, field, v);
8965     }
8966   return build_constructor (type, elts);
8967 }
8968 
8969 /* Routines for manipulation of native_encode_expr encoded data if the encoded
8970    or extracted constant positions and/or sizes aren't byte aligned.  */
8971 
8972 /* Shift left the bytes in PTR of SZ elements by AMNT bits, carrying over the
8973    bits between adjacent elements.  AMNT should be within
8974    [0, BITS_PER_UNIT).
8975    Example, AMNT = 2:
8976    00011111|11100000 << 2 = 01111111|10000000
8977    PTR[1]  | PTR[0]         PTR[1]  | PTR[0].  */
8978 
8979 void
shift_bytes_in_array_left(unsigned char * ptr,unsigned int sz,unsigned int amnt)8980 shift_bytes_in_array_left (unsigned char *ptr, unsigned int sz,
8981 			   unsigned int amnt)
8982 {
8983   if (amnt == 0)
8984     return;
8985 
8986   unsigned char carry_over = 0U;
8987   unsigned char carry_mask = (~0U) << (unsigned char) (BITS_PER_UNIT - amnt);
8988   unsigned char clear_mask = (~0U) << amnt;
8989 
8990   for (unsigned int i = 0; i < sz; i++)
8991     {
8992       unsigned prev_carry_over = carry_over;
8993       carry_over = (ptr[i] & carry_mask) >> (BITS_PER_UNIT - amnt);
8994 
8995       ptr[i] <<= amnt;
8996       if (i != 0)
8997 	{
8998 	  ptr[i] &= clear_mask;
8999 	  ptr[i] |= prev_carry_over;
9000 	}
9001     }
9002 }
9003 
9004 /* Like shift_bytes_in_array_left but for big-endian.
9005    Shift right the bytes in PTR of SZ elements by AMNT bits, carrying over the
9006    bits between adjacent elements.  AMNT should be within
9007    [0, BITS_PER_UNIT).
9008    Example, AMNT = 2:
9009    00011111|11100000 >> 2 = 00000111|11111000
9010    PTR[0]  | PTR[1]         PTR[0]  | PTR[1].  */
9011 
9012 void
shift_bytes_in_array_right(unsigned char * ptr,unsigned int sz,unsigned int amnt)9013 shift_bytes_in_array_right (unsigned char *ptr, unsigned int sz,
9014 			    unsigned int amnt)
9015 {
9016   if (amnt == 0)
9017     return;
9018 
9019   unsigned char carry_over = 0U;
9020   unsigned char carry_mask = ~(~0U << amnt);
9021 
9022   for (unsigned int i = 0; i < sz; i++)
9023     {
9024       unsigned prev_carry_over = carry_over;
9025       carry_over = ptr[i] & carry_mask;
9026 
9027       carry_over <<= (unsigned char) BITS_PER_UNIT - amnt;
9028       ptr[i] >>= amnt;
9029       ptr[i] |= prev_carry_over;
9030     }
9031 }
9032 
9033 /* Try to view-convert VECTOR_CST EXPR to VECTOR_TYPE TYPE by operating
9034    directly on the VECTOR_CST encoding, in a way that works for variable-
9035    length vectors.  Return the resulting VECTOR_CST on success or null
9036    on failure.  */
9037 
9038 static tree
fold_view_convert_vector_encoding(tree type,tree expr)9039 fold_view_convert_vector_encoding (tree type, tree expr)
9040 {
9041   tree expr_type = TREE_TYPE (expr);
9042   poly_uint64 type_bits, expr_bits;
9043   if (!poly_int_tree_p (TYPE_SIZE (type), &type_bits)
9044       || !poly_int_tree_p (TYPE_SIZE (expr_type), &expr_bits))
9045     return NULL_TREE;
9046 
9047   poly_uint64 type_units = TYPE_VECTOR_SUBPARTS (type);
9048   poly_uint64 expr_units = TYPE_VECTOR_SUBPARTS (expr_type);
9049   unsigned int type_elt_bits = vector_element_size (type_bits, type_units);
9050   unsigned int expr_elt_bits = vector_element_size (expr_bits, expr_units);
9051 
9052   /* We can only preserve the semantics of a stepped pattern if the new
9053      vector element is an integer of the same size.  */
9054   if (VECTOR_CST_STEPPED_P (expr)
9055       && (!INTEGRAL_TYPE_P (type) || type_elt_bits != expr_elt_bits))
9056     return NULL_TREE;
9057 
9058   /* The number of bits needed to encode one element from every pattern
9059      of the original vector.  */
9060   unsigned int expr_sequence_bits
9061     = VECTOR_CST_NPATTERNS (expr) * expr_elt_bits;
9062 
9063   /* The number of bits needed to encode one element from every pattern
9064      of the result.  */
9065   unsigned int type_sequence_bits
9066     = least_common_multiple (expr_sequence_bits, type_elt_bits);
9067 
9068   /* Don't try to read more bytes than are available, which can happen
9069      for constant-sized vectors if TYPE has larger elements than EXPR_TYPE.
9070      The general VIEW_CONVERT handling can cope with that case, so there's
9071      no point complicating things here.  */
9072   unsigned int nelts_per_pattern = VECTOR_CST_NELTS_PER_PATTERN (expr);
9073   unsigned int buffer_bytes = CEIL (nelts_per_pattern * type_sequence_bits,
9074 				    BITS_PER_UNIT);
9075   unsigned int buffer_bits = buffer_bytes * BITS_PER_UNIT;
9076   if (known_gt (buffer_bits, expr_bits))
9077     return NULL_TREE;
9078 
9079   /* Get enough bytes of EXPR to form the new encoding.  */
9080   auto_vec<unsigned char, 128> buffer (buffer_bytes);
9081   buffer.quick_grow (buffer_bytes);
9082   if (native_encode_vector_part (expr, buffer.address (), buffer_bytes, 0,
9083 				 buffer_bits / expr_elt_bits)
9084       != (int) buffer_bytes)
9085     return NULL_TREE;
9086 
9087   /* Reencode the bytes as TYPE.  */
9088   unsigned int type_npatterns = type_sequence_bits / type_elt_bits;
9089   return native_interpret_vector_part (type, &buffer[0], buffer.length (),
9090 				       type_npatterns, nelts_per_pattern);
9091 }
9092 
9093 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
9094    TYPE at compile-time.  If we're unable to perform the conversion
9095    return NULL_TREE.  */
9096 
9097 static tree
fold_view_convert_expr(tree type,tree expr)9098 fold_view_convert_expr (tree type, tree expr)
9099 {
9100   /* We support up to 512-bit values (for V8DFmode).  */
9101   unsigned char buffer[64];
9102   int len;
9103 
9104   /* Check that the host and target are sane.  */
9105   if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
9106     return NULL_TREE;
9107 
9108   if (VECTOR_TYPE_P (type) && TREE_CODE (expr) == VECTOR_CST)
9109     if (tree res = fold_view_convert_vector_encoding (type, expr))
9110       return res;
9111 
9112   len = native_encode_expr (expr, buffer, sizeof (buffer));
9113   if (len == 0)
9114     return NULL_TREE;
9115 
9116   return native_interpret_expr (type, buffer, len);
9117 }
9118 
9119 /* Build an expression for the address of T.  Folds away INDIRECT_REF
9120    to avoid confusing the gimplify process.  */
9121 
9122 tree
build_fold_addr_expr_with_type_loc(location_t loc,tree t,tree ptrtype)9123 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
9124 {
9125   /* The size of the object is not relevant when talking about its address.  */
9126   if (TREE_CODE (t) == WITH_SIZE_EXPR)
9127     t = TREE_OPERAND (t, 0);
9128 
9129   if (TREE_CODE (t) == INDIRECT_REF)
9130     {
9131       t = TREE_OPERAND (t, 0);
9132 
9133       if (TREE_TYPE (t) != ptrtype)
9134 	t = build1_loc (loc, NOP_EXPR, ptrtype, t);
9135     }
9136   else if (TREE_CODE (t) == MEM_REF
9137 	   && integer_zerop (TREE_OPERAND (t, 1)))
9138     {
9139       t = TREE_OPERAND (t, 0);
9140 
9141       if (TREE_TYPE (t) != ptrtype)
9142 	t = fold_convert_loc (loc, ptrtype, t);
9143     }
9144   else if (TREE_CODE (t) == MEM_REF
9145 	   && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
9146     return fold_binary (POINTER_PLUS_EXPR, ptrtype,
9147 			TREE_OPERAND (t, 0),
9148 			convert_to_ptrofftype (TREE_OPERAND (t, 1)));
9149   else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
9150     {
9151       t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
9152 
9153       if (TREE_TYPE (t) != ptrtype)
9154 	t = fold_convert_loc (loc, ptrtype, t);
9155     }
9156   else
9157     t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
9158 
9159   return t;
9160 }
9161 
9162 /* Build an expression for the address of T.  */
9163 
9164 tree
build_fold_addr_expr_loc(location_t loc,tree t)9165 build_fold_addr_expr_loc (location_t loc, tree t)
9166 {
9167   tree ptrtype = build_pointer_type (TREE_TYPE (t));
9168 
9169   return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
9170 }
9171 
9172 /* Fold a unary expression of code CODE and type TYPE with operand
9173    OP0.  Return the folded expression if folding is successful.
9174    Otherwise, return NULL_TREE.  */
9175 
9176 tree
fold_unary_loc(location_t loc,enum tree_code code,tree type,tree op0)9177 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
9178 {
9179   tree tem;
9180   tree arg0;
9181   enum tree_code_class kind = TREE_CODE_CLASS (code);
9182 
9183   gcc_assert (IS_EXPR_CODE_CLASS (kind)
9184 	      && TREE_CODE_LENGTH (code) == 1);
9185 
9186   arg0 = op0;
9187   if (arg0)
9188     {
9189       if (CONVERT_EXPR_CODE_P (code)
9190 	  || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
9191 	{
9192 	  /* Don't use STRIP_NOPS, because signedness of argument type
9193 	     matters.  */
9194 	  STRIP_SIGN_NOPS (arg0);
9195 	}
9196       else
9197 	{
9198 	  /* Strip any conversions that don't change the mode.  This
9199 	     is safe for every expression, except for a comparison
9200 	     expression because its signedness is derived from its
9201 	     operands.
9202 
9203 	     Note that this is done as an internal manipulation within
9204 	     the constant folder, in order to find the simplest
9205 	     representation of the arguments so that their form can be
9206 	     studied.  In any cases, the appropriate type conversions
9207 	     should be put back in the tree that will get out of the
9208 	     constant folder.  */
9209 	  STRIP_NOPS (arg0);
9210 	}
9211 
9212       if (CONSTANT_CLASS_P (arg0))
9213 	{
9214 	  tree tem = const_unop (code, type, arg0);
9215 	  if (tem)
9216 	    {
9217 	      if (TREE_TYPE (tem) != type)
9218 		tem = fold_convert_loc (loc, type, tem);
9219 	      return tem;
9220 	    }
9221 	}
9222     }
9223 
9224   tem = generic_simplify (loc, code, type, op0);
9225   if (tem)
9226     return tem;
9227 
9228   if (TREE_CODE_CLASS (code) == tcc_unary)
9229     {
9230       if (TREE_CODE (arg0) == COMPOUND_EXPR)
9231 	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9232 		       fold_build1_loc (loc, code, type,
9233 				    fold_convert_loc (loc, TREE_TYPE (op0),
9234 						      TREE_OPERAND (arg0, 1))));
9235       else if (TREE_CODE (arg0) == COND_EXPR)
9236 	{
9237 	  tree arg01 = TREE_OPERAND (arg0, 1);
9238 	  tree arg02 = TREE_OPERAND (arg0, 2);
9239 	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
9240 	    arg01 = fold_build1_loc (loc, code, type,
9241 				 fold_convert_loc (loc,
9242 						   TREE_TYPE (op0), arg01));
9243 	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
9244 	    arg02 = fold_build1_loc (loc, code, type,
9245 				 fold_convert_loc (loc,
9246 						   TREE_TYPE (op0), arg02));
9247 	  tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
9248 			     arg01, arg02);
9249 
9250 	  /* If this was a conversion, and all we did was to move into
9251 	     inside the COND_EXPR, bring it back out.  But leave it if
9252 	     it is a conversion from integer to integer and the
9253 	     result precision is no wider than a word since such a
9254 	     conversion is cheap and may be optimized away by combine,
9255 	     while it couldn't if it were outside the COND_EXPR.  Then return
9256 	     so we don't get into an infinite recursion loop taking the
9257 	     conversion out and then back in.  */
9258 
9259 	  if ((CONVERT_EXPR_CODE_P (code)
9260 	       || code == NON_LVALUE_EXPR)
9261 	      && TREE_CODE (tem) == COND_EXPR
9262 	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
9263 	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
9264 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
9265 	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
9266 	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
9267 		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
9268 	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9269 		     && (INTEGRAL_TYPE_P
9270 			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
9271 		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
9272 		  || flag_syntax_only))
9273 	    tem = build1_loc (loc, code, type,
9274 			      build3 (COND_EXPR,
9275 				      TREE_TYPE (TREE_OPERAND
9276 						 (TREE_OPERAND (tem, 1), 0)),
9277 				      TREE_OPERAND (tem, 0),
9278 				      TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
9279 				      TREE_OPERAND (TREE_OPERAND (tem, 2),
9280 						    0)));
9281 	  return tem;
9282 	}
9283    }
9284 
9285   switch (code)
9286     {
9287     case NON_LVALUE_EXPR:
9288       if (!maybe_lvalue_p (op0))
9289 	return fold_convert_loc (loc, type, op0);
9290       return NULL_TREE;
9291 
9292     CASE_CONVERT:
9293     case FLOAT_EXPR:
9294     case FIX_TRUNC_EXPR:
9295       if (COMPARISON_CLASS_P (op0))
9296 	{
9297 	  /* If we have (type) (a CMP b) and type is an integral type, return
9298 	     new expression involving the new type.  Canonicalize
9299 	     (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
9300 	     non-integral type.
9301 	     Do not fold the result as that would not simplify further, also
9302 	     folding again results in recursions.  */
9303 	  if (TREE_CODE (type) == BOOLEAN_TYPE)
9304 	    return build2_loc (loc, TREE_CODE (op0), type,
9305 			       TREE_OPERAND (op0, 0),
9306 			       TREE_OPERAND (op0, 1));
9307 	  else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
9308 		   && TREE_CODE (type) != VECTOR_TYPE)
9309 	    return build3_loc (loc, COND_EXPR, type, op0,
9310 			       constant_boolean_node (true, type),
9311 			       constant_boolean_node (false, type));
9312 	}
9313 
9314       /* Handle (T *)&A.B.C for A being of type T and B and C
9315 	 living at offset zero.  This occurs frequently in
9316 	 C++ upcasting and then accessing the base.  */
9317       if (TREE_CODE (op0) == ADDR_EXPR
9318 	  && POINTER_TYPE_P (type)
9319 	  && handled_component_p (TREE_OPERAND (op0, 0)))
9320         {
9321 	  poly_int64 bitsize, bitpos;
9322 	  tree offset;
9323 	  machine_mode mode;
9324 	  int unsignedp, reversep, volatilep;
9325 	  tree base
9326 	    = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
9327 				   &offset, &mode, &unsignedp, &reversep,
9328 				   &volatilep);
9329 	  /* If the reference was to a (constant) zero offset, we can use
9330 	     the address of the base if it has the same base type
9331 	     as the result type and the pointer type is unqualified.  */
9332 	  if (!offset
9333 	      && known_eq (bitpos, 0)
9334 	      && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
9335 		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
9336 	      && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
9337 	    return fold_convert_loc (loc, type,
9338 				     build_fold_addr_expr_loc (loc, base));
9339         }
9340 
9341       if (TREE_CODE (op0) == MODIFY_EXPR
9342 	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
9343 	  /* Detect assigning a bitfield.  */
9344 	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
9345 	       && DECL_BIT_FIELD
9346 	       (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
9347 	{
9348 	  /* Don't leave an assignment inside a conversion
9349 	     unless assigning a bitfield.  */
9350 	  tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
9351 	  /* First do the assignment, then return converted constant.  */
9352 	  tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
9353 	  TREE_NO_WARNING (tem) = 1;
9354 	  TREE_USED (tem) = 1;
9355 	  return tem;
9356 	}
9357 
9358       /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
9359 	 constants (if x has signed type, the sign bit cannot be set
9360 	 in c).  This folds extension into the BIT_AND_EXPR.
9361 	 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
9362 	 very likely don't have maximal range for their precision and this
9363 	 transformation effectively doesn't preserve non-maximal ranges.  */
9364       if (TREE_CODE (type) == INTEGER_TYPE
9365 	  && TREE_CODE (op0) == BIT_AND_EXPR
9366 	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9367 	{
9368 	  tree and_expr = op0;
9369 	  tree and0 = TREE_OPERAND (and_expr, 0);
9370 	  tree and1 = TREE_OPERAND (and_expr, 1);
9371 	  int change = 0;
9372 
9373 	  if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
9374 	      || (TYPE_PRECISION (type)
9375 		  <= TYPE_PRECISION (TREE_TYPE (and_expr))))
9376 	    change = 1;
9377 	  else if (TYPE_PRECISION (TREE_TYPE (and1))
9378 		   <= HOST_BITS_PER_WIDE_INT
9379 		   && tree_fits_uhwi_p (and1))
9380 	    {
9381 	      unsigned HOST_WIDE_INT cst;
9382 
9383 	      cst = tree_to_uhwi (and1);
9384 	      cst &= HOST_WIDE_INT_M1U
9385 		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
9386 	      change = (cst == 0);
9387 	      if (change
9388 		  && !flag_syntax_only
9389 		  && (load_extend_op (TYPE_MODE (TREE_TYPE (and0)))
9390 		      == ZERO_EXTEND))
9391 		{
9392 		  tree uns = unsigned_type_for (TREE_TYPE (and0));
9393 		  and0 = fold_convert_loc (loc, uns, and0);
9394 		  and1 = fold_convert_loc (loc, uns, and1);
9395 		}
9396 	    }
9397 	  if (change)
9398 	    {
9399 	      tem = force_fit_type (type, wi::to_widest (and1), 0,
9400 				    TREE_OVERFLOW (and1));
9401 	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
9402 				      fold_convert_loc (loc, type, and0), tem);
9403 	    }
9404 	}
9405 
9406       /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
9407 	 cast (T1)X will fold away.  We assume that this happens when X itself
9408 	 is a cast.  */
9409       if (POINTER_TYPE_P (type)
9410 	  && TREE_CODE (arg0) == POINTER_PLUS_EXPR
9411 	  && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
9412 	{
9413 	  tree arg00 = TREE_OPERAND (arg0, 0);
9414 	  tree arg01 = TREE_OPERAND (arg0, 1);
9415 
9416 	  /* If -fsanitize=alignment, avoid this optimization in GENERIC
9417 	     when the pointed type needs higher alignment than
9418 	     the p+ first operand's pointed type.  */
9419 	  if (!in_gimple_form
9420 	      && sanitize_flags_p (SANITIZE_ALIGNMENT)
9421 	      && (min_align_of_type (TREE_TYPE (type))
9422 		  > min_align_of_type (TREE_TYPE (TREE_TYPE (arg00)))))
9423 	    return NULL_TREE;
9424 
9425 	  arg00 = fold_convert_loc (loc, type, arg00);
9426 	  return fold_build_pointer_plus_loc (loc, arg00, arg01);
9427 	}
9428 
9429       /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
9430 	 of the same precision, and X is an integer type not narrower than
9431 	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
9432       if (INTEGRAL_TYPE_P (type)
9433 	  && TREE_CODE (op0) == BIT_NOT_EXPR
9434 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9435 	  && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
9436 	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
9437 	{
9438 	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
9439 	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
9440 	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
9441 	    return fold_build1_loc (loc, BIT_NOT_EXPR, type,
9442 				fold_convert_loc (loc, type, tem));
9443 	}
9444 
9445       /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
9446 	 type of X and Y (integer types only).  */
9447       if (INTEGRAL_TYPE_P (type)
9448 	  && TREE_CODE (op0) == MULT_EXPR
9449 	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
9450 	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
9451 	{
9452 	  /* Be careful not to introduce new overflows.  */
9453 	  tree mult_type;
9454           if (TYPE_OVERFLOW_WRAPS (type))
9455 	    mult_type = type;
9456 	  else
9457 	    mult_type = unsigned_type_for (type);
9458 
9459 	  if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
9460 	    {
9461 	      tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
9462 				 fold_convert_loc (loc, mult_type,
9463 						   TREE_OPERAND (op0, 0)),
9464 				 fold_convert_loc (loc, mult_type,
9465 						   TREE_OPERAND (op0, 1)));
9466 	      return fold_convert_loc (loc, type, tem);
9467 	    }
9468 	}
9469 
9470       return NULL_TREE;
9471 
9472     case VIEW_CONVERT_EXPR:
9473       if (TREE_CODE (op0) == MEM_REF)
9474         {
9475 	  if (TYPE_ALIGN (TREE_TYPE (op0)) != TYPE_ALIGN (type))
9476 	    type = build_aligned_type (type, TYPE_ALIGN (TREE_TYPE (op0)));
9477 	  tem = fold_build2_loc (loc, MEM_REF, type,
9478 				 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
9479 	  REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
9480 	  return tem;
9481 	}
9482 
9483       return NULL_TREE;
9484 
9485     case NEGATE_EXPR:
9486       tem = fold_negate_expr (loc, arg0);
9487       if (tem)
9488 	return fold_convert_loc (loc, type, tem);
9489       return NULL_TREE;
9490 
9491     case ABS_EXPR:
9492       /* Convert fabs((double)float) into (double)fabsf(float).  */
9493       if (TREE_CODE (arg0) == NOP_EXPR
9494 	  && TREE_CODE (type) == REAL_TYPE)
9495 	{
9496 	  tree targ0 = strip_float_extensions (arg0);
9497 	  if (targ0 != arg0)
9498 	    return fold_convert_loc (loc, type,
9499 				     fold_build1_loc (loc, ABS_EXPR,
9500 						  TREE_TYPE (targ0),
9501 						  targ0));
9502 	}
9503       return NULL_TREE;
9504 
9505     case BIT_NOT_EXPR:
9506       /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
9507       if (TREE_CODE (arg0) == BIT_XOR_EXPR
9508 	  && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9509 				    fold_convert_loc (loc, type,
9510 						      TREE_OPERAND (arg0, 0)))))
9511 	return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
9512 				fold_convert_loc (loc, type,
9513 						  TREE_OPERAND (arg0, 1)));
9514       else if (TREE_CODE (arg0) == BIT_XOR_EXPR
9515 	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
9516 			       	     fold_convert_loc (loc, type,
9517 						       TREE_OPERAND (arg0, 1)))))
9518 	return fold_build2_loc (loc, BIT_XOR_EXPR, type,
9519 			    fold_convert_loc (loc, type,
9520 					      TREE_OPERAND (arg0, 0)), tem);
9521 
9522       return NULL_TREE;
9523 
9524     case TRUTH_NOT_EXPR:
9525       /* Note that the operand of this must be an int
9526 	 and its values must be 0 or 1.
9527 	 ("true" is a fixed value perhaps depending on the language,
9528 	 but we don't handle values other than 1 correctly yet.)  */
9529       tem = fold_truth_not_expr (loc, arg0);
9530       if (!tem)
9531 	return NULL_TREE;
9532       return fold_convert_loc (loc, type, tem);
9533 
9534     case INDIRECT_REF:
9535       /* Fold *&X to X if X is an lvalue.  */
9536       if (TREE_CODE (op0) == ADDR_EXPR)
9537 	{
9538 	  tree op00 = TREE_OPERAND (op0, 0);
9539 	  if ((VAR_P (op00)
9540 	       || TREE_CODE (op00) == PARM_DECL
9541 	       || TREE_CODE (op00) == RESULT_DECL)
9542 	      && !TREE_READONLY (op00))
9543 	    return op00;
9544 	}
9545       return NULL_TREE;
9546 
9547     default:
9548       return NULL_TREE;
9549     } /* switch (code) */
9550 }
9551 
9552 
9553 /* If the operation was a conversion do _not_ mark a resulting constant
9554    with TREE_OVERFLOW if the original constant was not.  These conversions
9555    have implementation defined behavior and retaining the TREE_OVERFLOW
9556    flag here would confuse later passes such as VRP.  */
9557 tree
fold_unary_ignore_overflow_loc(location_t loc,enum tree_code code,tree type,tree op0)9558 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
9559 				tree type, tree op0)
9560 {
9561   tree res = fold_unary_loc (loc, code, type, op0);
9562   if (res
9563       && TREE_CODE (res) == INTEGER_CST
9564       && TREE_CODE (op0) == INTEGER_CST
9565       && CONVERT_EXPR_CODE_P (code))
9566     TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
9567 
9568   return res;
9569 }
9570 
9571 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
9572    operands OP0 and OP1.  LOC is the location of the resulting expression.
9573    ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
9574    Return the folded expression if folding is successful.  Otherwise,
9575    return NULL_TREE.  */
9576 static tree
fold_truth_andor(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1,tree op0,tree op1)9577 fold_truth_andor (location_t loc, enum tree_code code, tree type,
9578 		  tree arg0, tree arg1, tree op0, tree op1)
9579 {
9580   tree tem;
9581 
9582   /* We only do these simplifications if we are optimizing.  */
9583   if (!optimize)
9584     return NULL_TREE;
9585 
9586   /* Check for things like (A || B) && (A || C).  We can convert this
9587      to A || (B && C).  Note that either operator can be any of the four
9588      truth and/or operations and the transformation will still be
9589      valid.   Also note that we only care about order for the
9590      ANDIF and ORIF operators.  If B contains side effects, this
9591      might change the truth-value of A.  */
9592   if (TREE_CODE (arg0) == TREE_CODE (arg1)
9593       && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9594 	  || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9595 	  || TREE_CODE (arg0) == TRUTH_AND_EXPR
9596 	  || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9597       && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9598     {
9599       tree a00 = TREE_OPERAND (arg0, 0);
9600       tree a01 = TREE_OPERAND (arg0, 1);
9601       tree a10 = TREE_OPERAND (arg1, 0);
9602       tree a11 = TREE_OPERAND (arg1, 1);
9603       int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9604 			  || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9605 			 && (code == TRUTH_AND_EXPR
9606 			     || code == TRUTH_OR_EXPR));
9607 
9608       if (operand_equal_p (a00, a10, 0))
9609 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9610 			    fold_build2_loc (loc, code, type, a01, a11));
9611       else if (commutative && operand_equal_p (a00, a11, 0))
9612 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
9613 			    fold_build2_loc (loc, code, type, a01, a10));
9614       else if (commutative && operand_equal_p (a01, a10, 0))
9615 	return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
9616 			    fold_build2_loc (loc, code, type, a00, a11));
9617 
9618       /* This case if tricky because we must either have commutative
9619 	 operators or else A10 must not have side-effects.  */
9620 
9621       else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9622 	       && operand_equal_p (a01, a11, 0))
9623 	return fold_build2_loc (loc, TREE_CODE (arg0), type,
9624 			    fold_build2_loc (loc, code, type, a00, a10),
9625 			    a01);
9626     }
9627 
9628   /* See if we can build a range comparison.  */
9629   if ((tem = fold_range_test (loc, code, type, op0, op1)) != 0)
9630     return tem;
9631 
9632   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
9633       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
9634     {
9635       tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
9636       if (tem)
9637 	return fold_build2_loc (loc, code, type, tem, arg1);
9638     }
9639 
9640   if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
9641       || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
9642     {
9643       tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
9644       if (tem)
9645 	return fold_build2_loc (loc, code, type, arg0, tem);
9646     }
9647 
9648   /* Check for the possibility of merging component references.  If our
9649      lhs is another similar operation, try to merge its rhs with our
9650      rhs.  Then try to merge our lhs and rhs.  */
9651   if (TREE_CODE (arg0) == code
9652       && (tem = fold_truth_andor_1 (loc, code, type,
9653 				    TREE_OPERAND (arg0, 1), arg1)) != 0)
9654     return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9655 
9656   if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
9657     return tem;
9658 
9659   bool logical_op_non_short_circuit = LOGICAL_OP_NON_SHORT_CIRCUIT;
9660   if (param_logical_op_non_short_circuit != -1)
9661     logical_op_non_short_circuit
9662       = param_logical_op_non_short_circuit;
9663   if (logical_op_non_short_circuit
9664       && !flag_sanitize_coverage
9665       && (code == TRUTH_AND_EXPR
9666           || code == TRUTH_ANDIF_EXPR
9667           || code == TRUTH_OR_EXPR
9668           || code == TRUTH_ORIF_EXPR))
9669     {
9670       enum tree_code ncode, icode;
9671 
9672       ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
9673 	      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
9674       icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
9675 
9676       /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
9677 	 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
9678 	 We don't want to pack more than two leafs to a non-IF AND/OR
9679 	 expression.
9680 	 If tree-code of left-hand operand isn't an AND/OR-IF code and not
9681 	 equal to IF-CODE, then we don't want to add right-hand operand.
9682 	 If the inner right-hand side of left-hand operand has
9683 	 side-effects, or isn't simple, then we can't add to it,
9684 	 as otherwise we might destroy if-sequence.  */
9685       if (TREE_CODE (arg0) == icode
9686 	  && simple_operand_p_2 (arg1)
9687 	  /* Needed for sequence points to handle trappings, and
9688 	     side-effects.  */
9689 	  && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
9690 	{
9691 	  tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
9692 				 arg1);
9693 	  return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
9694 				  tem);
9695 	}
9696 	/* Same as above but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
9697 	   or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C).  */
9698       else if (TREE_CODE (arg1) == icode
9699 	  && simple_operand_p_2 (arg0)
9700 	  /* Needed for sequence points to handle trappings, and
9701 	     side-effects.  */
9702 	  && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
9703 	{
9704 	  tem = fold_build2_loc (loc, ncode, type,
9705 				 arg0, TREE_OPERAND (arg1, 0));
9706 	  return fold_build2_loc (loc, icode, type, tem,
9707 				  TREE_OPERAND (arg1, 1));
9708 	}
9709       /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
9710 	 into (A OR B).
9711 	 For sequence point consistancy, we need to check for trapping,
9712 	 and side-effects.  */
9713       else if (code == icode && simple_operand_p_2 (arg0)
9714                && simple_operand_p_2 (arg1))
9715 	return fold_build2_loc (loc, ncode, type, arg0, arg1);
9716     }
9717 
9718   return NULL_TREE;
9719 }
9720 
9721 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9722    by changing CODE to reduce the magnitude of constants involved in
9723    ARG0 of the comparison.
9724    Returns a canonicalized comparison tree if a simplification was
9725    possible, otherwise returns NULL_TREE.
9726    Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9727    valid if signed overflow is undefined.  */
9728 
9729 static tree
maybe_canonicalize_comparison_1(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1,bool * strict_overflow_p)9730 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9731 				 tree arg0, tree arg1,
9732 				 bool *strict_overflow_p)
9733 {
9734   enum tree_code code0 = TREE_CODE (arg0);
9735   tree t, cst0 = NULL_TREE;
9736   int sgn0;
9737 
9738   /* Match A +- CST code arg1.  We can change this only if overflow
9739      is undefined.  */
9740   if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9741 	 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
9742 	/* In principle pointers also have undefined overflow behavior,
9743 	   but that causes problems elsewhere.  */
9744 	&& !POINTER_TYPE_P (TREE_TYPE (arg0))
9745 	&& (code0 == MINUS_EXPR
9746 	    || code0 == PLUS_EXPR)
9747 	&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
9748     return NULL_TREE;
9749 
9750   /* Identify the constant in arg0 and its sign.  */
9751   cst0 = TREE_OPERAND (arg0, 1);
9752   sgn0 = tree_int_cst_sgn (cst0);
9753 
9754   /* Overflowed constants and zero will cause problems.  */
9755   if (integer_zerop (cst0)
9756       || TREE_OVERFLOW (cst0))
9757     return NULL_TREE;
9758 
9759   /* See if we can reduce the magnitude of the constant in
9760      arg0 by changing the comparison code.  */
9761   /* A - CST < arg1  ->  A - CST-1 <= arg1.  */
9762   if (code == LT_EXPR
9763       && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9764     code = LE_EXPR;
9765   /* A + CST > arg1  ->  A + CST-1 >= arg1.  */
9766   else if (code == GT_EXPR
9767 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9768     code = GE_EXPR;
9769   /* A + CST <= arg1  ->  A + CST-1 < arg1.  */
9770   else if (code == LE_EXPR
9771 	   && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9772     code = LT_EXPR;
9773   /* A - CST >= arg1  ->  A - CST-1 > arg1.  */
9774   else if (code == GE_EXPR
9775 	   && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9776     code = GT_EXPR;
9777   else
9778     return NULL_TREE;
9779   *strict_overflow_p = true;
9780 
9781   /* Now build the constant reduced in magnitude.  But not if that
9782      would produce one outside of its types range.  */
9783   if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9784       && ((sgn0 == 1
9785 	   && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9786 	   && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9787 	  || (sgn0 == -1
9788 	      && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9789 	      && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9790     return NULL_TREE;
9791 
9792   t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9793 		       cst0, build_int_cst (TREE_TYPE (cst0), 1));
9794   t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9795   t = fold_convert (TREE_TYPE (arg1), t);
9796 
9797   return fold_build2_loc (loc, code, type, t, arg1);
9798 }
9799 
9800 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9801    overflow further.  Try to decrease the magnitude of constants involved
9802    by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9803    and put sole constants at the second argument position.
9804    Returns the canonicalized tree if changed, otherwise NULL_TREE.  */
9805 
9806 static tree
maybe_canonicalize_comparison(location_t loc,enum tree_code code,tree type,tree arg0,tree arg1)9807 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9808 			       tree arg0, tree arg1)
9809 {
9810   tree t;
9811   bool strict_overflow_p;
9812   const char * const warnmsg = G_("assuming signed overflow does not occur "
9813 				  "when reducing constant in comparison");
9814 
9815   /* Try canonicalization by simplifying arg0.  */
9816   strict_overflow_p = false;
9817   t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9818 				       &strict_overflow_p);
9819   if (t)
9820     {
9821       if (strict_overflow_p)
9822 	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9823       return t;
9824     }
9825 
9826   /* Try canonicalization by simplifying arg1 using the swapped
9827      comparison.  */
9828   code = swap_tree_comparison (code);
9829   strict_overflow_p = false;
9830   t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9831 				       &strict_overflow_p);
9832   if (t && strict_overflow_p)
9833     fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9834   return t;
9835 }
9836 
9837 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9838    space.  This is used to avoid issuing overflow warnings for
9839    expressions like &p->x which cannot wrap.  */
9840 
9841 static bool
pointer_may_wrap_p(tree base,tree offset,poly_int64 bitpos)9842 pointer_may_wrap_p (tree base, tree offset, poly_int64 bitpos)
9843 {
9844   if (!POINTER_TYPE_P (TREE_TYPE (base)))
9845     return true;
9846 
9847   if (maybe_lt (bitpos, 0))
9848     return true;
9849 
9850   poly_wide_int wi_offset;
9851   int precision = TYPE_PRECISION (TREE_TYPE (base));
9852   if (offset == NULL_TREE)
9853     wi_offset = wi::zero (precision);
9854   else if (!poly_int_tree_p (offset) || TREE_OVERFLOW (offset))
9855     return true;
9856   else
9857     wi_offset = wi::to_poly_wide (offset);
9858 
9859   wi::overflow_type overflow;
9860   poly_wide_int units = wi::shwi (bits_to_bytes_round_down (bitpos),
9861 				  precision);
9862   poly_wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
9863   if (overflow)
9864     return true;
9865 
9866   poly_uint64 total_hwi, size;
9867   if (!total.to_uhwi (&total_hwi)
9868       || !poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (base))),
9869 			   &size)
9870       || known_eq (size, 0U))
9871     return true;
9872 
9873   if (known_le (total_hwi, size))
9874     return false;
9875 
9876   /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9877      array.  */
9878   if (TREE_CODE (base) == ADDR_EXPR
9879       && poly_int_tree_p (TYPE_SIZE_UNIT (TREE_TYPE (TREE_OPERAND (base, 0))),
9880 			  &size)
9881       && maybe_ne (size, 0U)
9882       && known_le (total_hwi, size))
9883     return false;
9884 
9885   return true;
9886 }
9887 
9888 /* Return a positive integer when the symbol DECL is known to have
9889    a nonzero address, zero when it's known not to (e.g., it's a weak
9890    symbol), and a negative integer when the symbol is not yet in the
9891    symbol table and so whether or not its address is zero is unknown.
9892    For function local objects always return positive integer.  */
9893 static int
maybe_nonzero_address(tree decl)9894 maybe_nonzero_address (tree decl)
9895 {
9896   if (DECL_P (decl) && decl_in_symtab_p (decl))
9897     if (struct symtab_node *symbol = symtab_node::get_create (decl))
9898       return symbol->nonzero_address ();
9899 
9900   /* Function local objects are never NULL.  */
9901   if (DECL_P (decl)
9902       && (DECL_CONTEXT (decl)
9903       && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
9904       && auto_var_in_fn_p (decl, DECL_CONTEXT (decl))))
9905     return 1;
9906 
9907   return -1;
9908 }
9909 
9910 /* Subroutine of fold_binary.  This routine performs all of the
9911    transformations that are common to the equality/inequality
9912    operators (EQ_EXPR and NE_EXPR) and the ordering operators
9913    (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
9914    fold_binary should call fold_binary.  Fold a comparison with
9915    tree code CODE and type TYPE with operands OP0 and OP1.  Return
9916    the folded comparison or NULL_TREE.  */
9917 
9918 static tree
fold_comparison(location_t loc,enum tree_code code,tree type,tree op0,tree op1)9919 fold_comparison (location_t loc, enum tree_code code, tree type,
9920 		 tree op0, tree op1)
9921 {
9922   const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
9923   tree arg0, arg1, tem;
9924 
9925   arg0 = op0;
9926   arg1 = op1;
9927 
9928   STRIP_SIGN_NOPS (arg0);
9929   STRIP_SIGN_NOPS (arg1);
9930 
9931   /* For comparisons of pointers we can decompose it to a compile time
9932      comparison of the base objects and the offsets into the object.
9933      This requires at least one operand being an ADDR_EXPR or a
9934      POINTER_PLUS_EXPR to do more than the operand_equal_p test below.  */
9935   if (POINTER_TYPE_P (TREE_TYPE (arg0))
9936       && (TREE_CODE (arg0) == ADDR_EXPR
9937 	  || TREE_CODE (arg1) == ADDR_EXPR
9938 	  || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9939 	  || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9940     {
9941       tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9942       poly_int64 bitsize, bitpos0 = 0, bitpos1 = 0;
9943       machine_mode mode;
9944       int volatilep, reversep, unsignedp;
9945       bool indirect_base0 = false, indirect_base1 = false;
9946 
9947       /* Get base and offset for the access.  Strip ADDR_EXPR for
9948 	 get_inner_reference, but put it back by stripping INDIRECT_REF
9949 	 off the base object if possible.  indirect_baseN will be true
9950 	 if baseN is not an address but refers to the object itself.  */
9951       base0 = arg0;
9952       if (TREE_CODE (arg0) == ADDR_EXPR)
9953 	{
9954 	  base0
9955 	    = get_inner_reference (TREE_OPERAND (arg0, 0),
9956 				   &bitsize, &bitpos0, &offset0, &mode,
9957 				   &unsignedp, &reversep, &volatilep);
9958 	  if (TREE_CODE (base0) == INDIRECT_REF)
9959 	    base0 = TREE_OPERAND (base0, 0);
9960 	  else
9961 	    indirect_base0 = true;
9962 	}
9963       else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9964 	{
9965 	  base0 = TREE_OPERAND (arg0, 0);
9966 	  STRIP_SIGN_NOPS (base0);
9967 	  if (TREE_CODE (base0) == ADDR_EXPR)
9968 	    {
9969 	      base0
9970 		= get_inner_reference (TREE_OPERAND (base0, 0),
9971 				       &bitsize, &bitpos0, &offset0, &mode,
9972 				       &unsignedp, &reversep, &volatilep);
9973 	      if (TREE_CODE (base0) == INDIRECT_REF)
9974 		base0 = TREE_OPERAND (base0, 0);
9975 	      else
9976 		indirect_base0 = true;
9977 	    }
9978 	  if (offset0 == NULL_TREE || integer_zerop (offset0))
9979 	    offset0 = TREE_OPERAND (arg0, 1);
9980 	  else
9981 	    offset0 = size_binop (PLUS_EXPR, offset0,
9982 				  TREE_OPERAND (arg0, 1));
9983 	  if (poly_int_tree_p (offset0))
9984 	    {
9985 	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset0),
9986 					      TYPE_PRECISION (sizetype));
9987 	      tem <<= LOG2_BITS_PER_UNIT;
9988 	      tem += bitpos0;
9989 	      if (tem.to_shwi (&bitpos0))
9990 		offset0 = NULL_TREE;
9991 	    }
9992 	}
9993 
9994       base1 = arg1;
9995       if (TREE_CODE (arg1) == ADDR_EXPR)
9996 	{
9997 	  base1
9998 	    = get_inner_reference (TREE_OPERAND (arg1, 0),
9999 				   &bitsize, &bitpos1, &offset1, &mode,
10000 				   &unsignedp, &reversep, &volatilep);
10001 	  if (TREE_CODE (base1) == INDIRECT_REF)
10002 	    base1 = TREE_OPERAND (base1, 0);
10003 	  else
10004 	    indirect_base1 = true;
10005 	}
10006       else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10007 	{
10008 	  base1 = TREE_OPERAND (arg1, 0);
10009 	  STRIP_SIGN_NOPS (base1);
10010 	  if (TREE_CODE (base1) == ADDR_EXPR)
10011 	    {
10012 	      base1
10013 		= get_inner_reference (TREE_OPERAND (base1, 0),
10014 				       &bitsize, &bitpos1, &offset1, &mode,
10015 				       &unsignedp, &reversep, &volatilep);
10016 	      if (TREE_CODE (base1) == INDIRECT_REF)
10017 		base1 = TREE_OPERAND (base1, 0);
10018 	      else
10019 		indirect_base1 = true;
10020 	    }
10021 	  if (offset1 == NULL_TREE || integer_zerop (offset1))
10022 	    offset1 = TREE_OPERAND (arg1, 1);
10023 	  else
10024 	    offset1 = size_binop (PLUS_EXPR, offset1,
10025 				  TREE_OPERAND (arg1, 1));
10026 	  if (poly_int_tree_p (offset1))
10027 	    {
10028 	      poly_offset_int tem = wi::sext (wi::to_poly_offset (offset1),
10029 					      TYPE_PRECISION (sizetype));
10030 	      tem <<= LOG2_BITS_PER_UNIT;
10031 	      tem += bitpos1;
10032 	      if (tem.to_shwi (&bitpos1))
10033 		offset1 = NULL_TREE;
10034 	    }
10035 	}
10036 
10037       /* If we have equivalent bases we might be able to simplify.  */
10038       if (indirect_base0 == indirect_base1
10039 	  && operand_equal_p (base0, base1,
10040 			      indirect_base0 ? OEP_ADDRESS_OF : 0))
10041 	{
10042 	  /* We can fold this expression to a constant if the non-constant
10043 	     offset parts are equal.  */
10044 	  if ((offset0 == offset1
10045 	       || (offset0 && offset1
10046 		   && operand_equal_p (offset0, offset1, 0)))
10047 	      && (equality_code
10048 		  || (indirect_base0
10049 		      && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10050 		  || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10051 	    {
10052 	      if (!equality_code
10053 		  && maybe_ne (bitpos0, bitpos1)
10054 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
10055 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
10056 		fold_overflow_warning (("assuming pointer wraparound does not "
10057 					"occur when comparing P +- C1 with "
10058 					"P +- C2"),
10059 				       WARN_STRICT_OVERFLOW_CONDITIONAL);
10060 
10061 	      switch (code)
10062 		{
10063 		case EQ_EXPR:
10064 		  if (known_eq (bitpos0, bitpos1))
10065 		    return constant_boolean_node (true, type);
10066 		  if (known_ne (bitpos0, bitpos1))
10067 		    return constant_boolean_node (false, type);
10068 		  break;
10069 		case NE_EXPR:
10070 		  if (known_ne (bitpos0, bitpos1))
10071 		    return constant_boolean_node (true, type);
10072 		  if (known_eq (bitpos0, bitpos1))
10073 		    return constant_boolean_node (false, type);
10074 		  break;
10075 		case LT_EXPR:
10076 		  if (known_lt (bitpos0, bitpos1))
10077 		    return constant_boolean_node (true, type);
10078 		  if (known_ge (bitpos0, bitpos1))
10079 		    return constant_boolean_node (false, type);
10080 		  break;
10081 		case LE_EXPR:
10082 		  if (known_le (bitpos0, bitpos1))
10083 		    return constant_boolean_node (true, type);
10084 		  if (known_gt (bitpos0, bitpos1))
10085 		    return constant_boolean_node (false, type);
10086 		  break;
10087 		case GE_EXPR:
10088 		  if (known_ge (bitpos0, bitpos1))
10089 		    return constant_boolean_node (true, type);
10090 		  if (known_lt (bitpos0, bitpos1))
10091 		    return constant_boolean_node (false, type);
10092 		  break;
10093 		case GT_EXPR:
10094 		  if (known_gt (bitpos0, bitpos1))
10095 		    return constant_boolean_node (true, type);
10096 		  if (known_le (bitpos0, bitpos1))
10097 		    return constant_boolean_node (false, type);
10098 		  break;
10099 		default:;
10100 		}
10101 	    }
10102 	  /* We can simplify the comparison to a comparison of the variable
10103 	     offset parts if the constant offset parts are equal.
10104 	     Be careful to use signed sizetype here because otherwise we
10105 	     mess with array offsets in the wrong way.  This is possible
10106 	     because pointer arithmetic is restricted to retain within an
10107 	     object and overflow on pointer differences is undefined as of
10108 	     6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
10109 	  else if (known_eq (bitpos0, bitpos1)
10110 		   && (equality_code
10111 		       || (indirect_base0
10112 			   && (DECL_P (base0) || CONSTANT_CLASS_P (base0)))
10113 		       || TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
10114 	    {
10115 	      /* By converting to signed sizetype we cover middle-end pointer
10116 	         arithmetic which operates on unsigned pointer types of size
10117 	         type size and ARRAY_REF offsets which are properly sign or
10118 	         zero extended from their type in case it is narrower than
10119 	         sizetype.  */
10120 	      if (offset0 == NULL_TREE)
10121 		offset0 = build_int_cst (ssizetype, 0);
10122 	      else
10123 		offset0 = fold_convert_loc (loc, ssizetype, offset0);
10124 	      if (offset1 == NULL_TREE)
10125 		offset1 = build_int_cst (ssizetype, 0);
10126 	      else
10127 		offset1 = fold_convert_loc (loc, ssizetype, offset1);
10128 
10129 	      if (!equality_code
10130 		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
10131 		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
10132 		fold_overflow_warning (("assuming pointer wraparound does not "
10133 					"occur when comparing P +- C1 with "
10134 					"P +- C2"),
10135 				       WARN_STRICT_OVERFLOW_COMPARISON);
10136 
10137 	      return fold_build2_loc (loc, code, type, offset0, offset1);
10138 	    }
10139 	}
10140       /* For equal offsets we can simplify to a comparison of the
10141 	 base addresses.  */
10142       else if (known_eq (bitpos0, bitpos1)
10143 	       && (indirect_base0
10144 		   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
10145 	       && (indirect_base1
10146 		   ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
10147 	       && ((offset0 == offset1)
10148 		   || (offset0 && offset1
10149 		       && operand_equal_p (offset0, offset1, 0))))
10150 	{
10151 	  if (indirect_base0)
10152 	    base0 = build_fold_addr_expr_loc (loc, base0);
10153 	  if (indirect_base1)
10154 	    base1 = build_fold_addr_expr_loc (loc, base1);
10155 	  return fold_build2_loc (loc, code, type, base0, base1);
10156 	}
10157       /* Comparison between an ordinary (non-weak) symbol and a null
10158 	 pointer can be eliminated since such symbols must have a non
10159 	 null address.  In C, relational expressions between pointers
10160 	 to objects and null pointers are undefined.  The results
10161 	 below follow the C++ rules with the additional property that
10162 	 every object pointer compares greater than a null pointer.
10163       */
10164       else if (((DECL_P (base0)
10165 		 && maybe_nonzero_address (base0) > 0
10166 		 /* Avoid folding references to struct members at offset 0 to
10167 		    prevent tests like '&ptr->firstmember == 0' from getting
10168 		    eliminated.  When ptr is null, although the -> expression
10169 		    is strictly speaking invalid, GCC retains it as a matter
10170 		    of QoI.  See PR c/44555. */
10171 		 && (offset0 == NULL_TREE && known_ne (bitpos0, 0)))
10172 		|| CONSTANT_CLASS_P (base0))
10173 	       && indirect_base0
10174 	       /* The caller guarantees that when one of the arguments is
10175 		  constant (i.e., null in this case) it is second.  */
10176 	       && integer_zerop (arg1))
10177 	{
10178 	  switch (code)
10179 	    {
10180 	    case EQ_EXPR:
10181 	    case LE_EXPR:
10182 	    case LT_EXPR:
10183 	      return constant_boolean_node (false, type);
10184 	    case GE_EXPR:
10185 	    case GT_EXPR:
10186 	    case NE_EXPR:
10187 	      return constant_boolean_node (true, type);
10188 	    default:
10189 	      gcc_unreachable ();
10190 	    }
10191 	}
10192     }
10193 
10194   /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
10195      X CMP Y +- C2 +- C1 for signed X, Y.  This is valid if
10196      the resulting offset is smaller in absolute value than the
10197      original one and has the same sign.  */
10198   if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10199       && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
10200       && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10201       && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10202 	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
10203       && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
10204       && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10205 	  && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
10206     {
10207       tree const1 = TREE_OPERAND (arg0, 1);
10208       tree const2 = TREE_OPERAND (arg1, 1);
10209       tree variable1 = TREE_OPERAND (arg0, 0);
10210       tree variable2 = TREE_OPERAND (arg1, 0);
10211       tree cst;
10212       const char * const warnmsg = G_("assuming signed overflow does not "
10213 				      "occur when combining constants around "
10214 				      "a comparison");
10215 
10216       /* Put the constant on the side where it doesn't overflow and is
10217 	 of lower absolute value and of same sign than before.  */
10218       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10219 			     ? MINUS_EXPR : PLUS_EXPR,
10220 			     const2, const1);
10221       if (!TREE_OVERFLOW (cst)
10222 	  && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
10223 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
10224 	{
10225 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10226 	  return fold_build2_loc (loc, code, type,
10227 				  variable1,
10228 				  fold_build2_loc (loc, TREE_CODE (arg1),
10229 						   TREE_TYPE (arg1),
10230 						   variable2, cst));
10231 	}
10232 
10233       cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
10234 			     ? MINUS_EXPR : PLUS_EXPR,
10235 			     const1, const2);
10236       if (!TREE_OVERFLOW (cst)
10237 	  && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
10238 	  && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
10239 	{
10240 	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
10241 	  return fold_build2_loc (loc, code, type,
10242 				  fold_build2_loc (loc, TREE_CODE (arg0),
10243 						   TREE_TYPE (arg0),
10244 						   variable1, cst),
10245 				  variable2);
10246 	}
10247     }
10248 
10249   tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
10250   if (tem)
10251     return tem;
10252 
10253   /* If we are comparing an expression that just has comparisons
10254      of two integer values, arithmetic expressions of those comparisons,
10255      and constants, we can simplify it.  There are only three cases
10256      to check: the two values can either be equal, the first can be
10257      greater, or the second can be greater.  Fold the expression for
10258      those three values.  Since each value must be 0 or 1, we have
10259      eight possibilities, each of which corresponds to the constant 0
10260      or 1 or one of the six possible comparisons.
10261 
10262      This handles common cases like (a > b) == 0 but also handles
10263      expressions like  ((x > y) - (y > x)) > 0, which supposedly
10264      occur in macroized code.  */
10265 
10266   if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
10267     {
10268       tree cval1 = 0, cval2 = 0;
10269 
10270       if (twoval_comparison_p (arg0, &cval1, &cval2)
10271 	  /* Don't handle degenerate cases here; they should already
10272 	     have been handled anyway.  */
10273 	  && cval1 != 0 && cval2 != 0
10274 	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
10275 	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
10276 	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
10277 	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
10278 	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
10279 	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
10280 				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
10281 	{
10282 	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
10283 	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
10284 
10285 	  /* We can't just pass T to eval_subst in case cval1 or cval2
10286 	     was the same as ARG1.  */
10287 
10288 	  tree high_result
10289 		= fold_build2_loc (loc, code, type,
10290 			       eval_subst (loc, arg0, cval1, maxval,
10291 					   cval2, minval),
10292 			       arg1);
10293 	  tree equal_result
10294 		= fold_build2_loc (loc, code, type,
10295 			       eval_subst (loc, arg0, cval1, maxval,
10296 					   cval2, maxval),
10297 			       arg1);
10298 	  tree low_result
10299 		= fold_build2_loc (loc, code, type,
10300 			       eval_subst (loc, arg0, cval1, minval,
10301 					   cval2, maxval),
10302 			       arg1);
10303 
10304 	  /* All three of these results should be 0 or 1.  Confirm they are.
10305 	     Then use those values to select the proper code to use.  */
10306 
10307 	  if (TREE_CODE (high_result) == INTEGER_CST
10308 	      && TREE_CODE (equal_result) == INTEGER_CST
10309 	      && TREE_CODE (low_result) == INTEGER_CST)
10310 	    {
10311 	      /* Make a 3-bit mask with the high-order bit being the
10312 		 value for `>', the next for '=', and the low for '<'.  */
10313 	      switch ((integer_onep (high_result) * 4)
10314 		      + (integer_onep (equal_result) * 2)
10315 		      + integer_onep (low_result))
10316 		{
10317 		case 0:
10318 		  /* Always false.  */
10319 		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10320 		case 1:
10321 		  code = LT_EXPR;
10322 		  break;
10323 		case 2:
10324 		  code = EQ_EXPR;
10325 		  break;
10326 		case 3:
10327 		  code = LE_EXPR;
10328 		  break;
10329 		case 4:
10330 		  code = GT_EXPR;
10331 		  break;
10332 		case 5:
10333 		  code = NE_EXPR;
10334 		  break;
10335 		case 6:
10336 		  code = GE_EXPR;
10337 		  break;
10338 		case 7:
10339 		  /* Always true.  */
10340 		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10341 		}
10342 
10343 	      return fold_build2_loc (loc, code, type, cval1, cval2);
10344 	    }
10345 	}
10346     }
10347 
10348   return NULL_TREE;
10349 }
10350 
10351 
10352 /* Subroutine of fold_binary.  Optimize complex multiplications of the
10353    form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
10354    argument EXPR represents the expression "z" of type TYPE.  */
10355 
10356 static tree
fold_mult_zconjz(location_t loc,tree type,tree expr)10357 fold_mult_zconjz (location_t loc, tree type, tree expr)
10358 {
10359   tree itype = TREE_TYPE (type);
10360   tree rpart, ipart, tem;
10361 
10362   if (TREE_CODE (expr) == COMPLEX_EXPR)
10363     {
10364       rpart = TREE_OPERAND (expr, 0);
10365       ipart = TREE_OPERAND (expr, 1);
10366     }
10367   else if (TREE_CODE (expr) == COMPLEX_CST)
10368     {
10369       rpart = TREE_REALPART (expr);
10370       ipart = TREE_IMAGPART (expr);
10371     }
10372   else
10373     {
10374       expr = save_expr (expr);
10375       rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
10376       ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
10377     }
10378 
10379   rpart = save_expr (rpart);
10380   ipart = save_expr (ipart);
10381   tem = fold_build2_loc (loc, PLUS_EXPR, itype,
10382 		     fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
10383 		     fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
10384   return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
10385 			  build_zero_cst (itype));
10386 }
10387 
10388 
10389 /* Helper function for fold_vec_perm.  Store elements of VECTOR_CST or
10390    CONSTRUCTOR ARG into array ELTS, which has NELTS elements, and return
10391    true if successful.  */
10392 
10393 static bool
vec_cst_ctor_to_array(tree arg,unsigned int nelts,tree * elts)10394 vec_cst_ctor_to_array (tree arg, unsigned int nelts, tree *elts)
10395 {
10396   unsigned HOST_WIDE_INT i, nunits;
10397 
10398   if (TREE_CODE (arg) == VECTOR_CST
10399       && VECTOR_CST_NELTS (arg).is_constant (&nunits))
10400     {
10401       for (i = 0; i < nunits; ++i)
10402 	elts[i] = VECTOR_CST_ELT (arg, i);
10403     }
10404   else if (TREE_CODE (arg) == CONSTRUCTOR)
10405     {
10406       constructor_elt *elt;
10407 
10408       FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
10409 	if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
10410 	  return false;
10411 	else
10412 	  elts[i] = elt->value;
10413     }
10414   else
10415     return false;
10416   for (; i < nelts; i++)
10417     elts[i]
10418       = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
10419   return true;
10420 }
10421 
10422 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
10423    selector.  Return the folded VECTOR_CST or CONSTRUCTOR if successful,
10424    NULL_TREE otherwise.  */
10425 
10426 tree
fold_vec_perm(tree type,tree arg0,tree arg1,const vec_perm_indices & sel)10427 fold_vec_perm (tree type, tree arg0, tree arg1, const vec_perm_indices &sel)
10428 {
10429   unsigned int i;
10430   unsigned HOST_WIDE_INT nelts;
10431   bool need_ctor = false;
10432 
10433   if (!sel.length ().is_constant (&nelts))
10434     return NULL_TREE;
10435   gcc_assert (known_eq (TYPE_VECTOR_SUBPARTS (type), nelts)
10436 	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)), nelts)
10437 	      && known_eq (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)), nelts));
10438   if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
10439       || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
10440     return NULL_TREE;
10441 
10442   tree *in_elts = XALLOCAVEC (tree, nelts * 2);
10443   if (!vec_cst_ctor_to_array (arg0, nelts, in_elts)
10444       || !vec_cst_ctor_to_array (arg1, nelts, in_elts + nelts))
10445     return NULL_TREE;
10446 
10447   tree_vector_builder out_elts (type, nelts, 1);
10448   for (i = 0; i < nelts; i++)
10449     {
10450       HOST_WIDE_INT index;
10451       if (!sel[i].is_constant (&index))
10452 	return NULL_TREE;
10453       if (!CONSTANT_CLASS_P (in_elts[index]))
10454 	need_ctor = true;
10455       out_elts.quick_push (unshare_expr (in_elts[index]));
10456     }
10457 
10458   if (need_ctor)
10459     {
10460       vec<constructor_elt, va_gc> *v;
10461       vec_alloc (v, nelts);
10462       for (i = 0; i < nelts; i++)
10463 	CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, out_elts[i]);
10464       return build_constructor (type, v);
10465     }
10466   else
10467     return out_elts.build ();
10468 }
10469 
10470 /* Try to fold a pointer difference of type TYPE two address expressions of
10471    array references AREF0 and AREF1 using location LOC.  Return a
10472    simplified expression for the difference or NULL_TREE.  */
10473 
10474 static tree
fold_addr_of_array_ref_difference(location_t loc,tree type,tree aref0,tree aref1,bool use_pointer_diff)10475 fold_addr_of_array_ref_difference (location_t loc, tree type,
10476 				   tree aref0, tree aref1,
10477 				   bool use_pointer_diff)
10478 {
10479   tree base0 = TREE_OPERAND (aref0, 0);
10480   tree base1 = TREE_OPERAND (aref1, 0);
10481   tree base_offset = build_int_cst (type, 0);
10482 
10483   /* If the bases are array references as well, recurse.  If the bases
10484      are pointer indirections compute the difference of the pointers.
10485      If the bases are equal, we are set.  */
10486   if ((TREE_CODE (base0) == ARRAY_REF
10487        && TREE_CODE (base1) == ARRAY_REF
10488        && (base_offset
10489 	   = fold_addr_of_array_ref_difference (loc, type, base0, base1,
10490 						use_pointer_diff)))
10491       || (INDIRECT_REF_P (base0)
10492 	  && INDIRECT_REF_P (base1)
10493 	  && (base_offset
10494 	        = use_pointer_diff
10495 		  ? fold_binary_loc (loc, POINTER_DIFF_EXPR, type,
10496 				     TREE_OPERAND (base0, 0),
10497 				     TREE_OPERAND (base1, 0))
10498 		  : fold_binary_loc (loc, MINUS_EXPR, type,
10499 				     fold_convert (type,
10500 						   TREE_OPERAND (base0, 0)),
10501 				     fold_convert (type,
10502 						   TREE_OPERAND (base1, 0)))))
10503       || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
10504     {
10505       tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10506       tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10507       tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
10508       tree diff = fold_build2_loc (loc, MINUS_EXPR, type, op0, op1);
10509       return fold_build2_loc (loc, PLUS_EXPR, type,
10510 			      base_offset,
10511 			      fold_build2_loc (loc, MULT_EXPR, type,
10512 					       diff, esz));
10513     }
10514   return NULL_TREE;
10515 }
10516 
10517 /* If the real or vector real constant CST of type TYPE has an exact
10518    inverse, return it, else return NULL.  */
10519 
10520 tree
exact_inverse(tree type,tree cst)10521 exact_inverse (tree type, tree cst)
10522 {
10523   REAL_VALUE_TYPE r;
10524   tree unit_type;
10525   machine_mode mode;
10526 
10527   switch (TREE_CODE (cst))
10528     {
10529     case REAL_CST:
10530       r = TREE_REAL_CST (cst);
10531 
10532       if (exact_real_inverse (TYPE_MODE (type), &r))
10533 	return build_real (type, r);
10534 
10535       return NULL_TREE;
10536 
10537     case VECTOR_CST:
10538       {
10539 	unit_type = TREE_TYPE (type);
10540 	mode = TYPE_MODE (unit_type);
10541 
10542 	tree_vector_builder elts;
10543 	if (!elts.new_unary_operation (type, cst, false))
10544 	  return NULL_TREE;
10545 	unsigned int count = elts.encoded_nelts ();
10546 	for (unsigned int i = 0; i < count; ++i)
10547 	  {
10548 	    r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
10549 	    if (!exact_real_inverse (mode, &r))
10550 	      return NULL_TREE;
10551 	    elts.quick_push (build_real (unit_type, r));
10552 	  }
10553 
10554 	return elts.build ();
10555       }
10556 
10557     default:
10558       return NULL_TREE;
10559     }
10560 }
10561 
10562 /*  Mask out the tz least significant bits of X of type TYPE where
10563     tz is the number of trailing zeroes in Y.  */
10564 static wide_int
mask_with_tz(tree type,const wide_int & x,const wide_int & y)10565 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
10566 {
10567   int tz = wi::ctz (y);
10568   if (tz > 0)
10569     return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
10570   return x;
10571 }
10572 
10573 /* Return true when T is an address and is known to be nonzero.
10574    For floating point we further ensure that T is not denormal.
10575    Similar logic is present in nonzero_address in rtlanal.h.
10576 
10577    If the return value is based on the assumption that signed overflow
10578    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
10579    change *STRICT_OVERFLOW_P.  */
10580 
10581 static bool
tree_expr_nonzero_warnv_p(tree t,bool * strict_overflow_p)10582 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
10583 {
10584   tree type = TREE_TYPE (t);
10585   enum tree_code code;
10586 
10587   /* Doing something useful for floating point would need more work.  */
10588   if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10589     return false;
10590 
10591   code = TREE_CODE (t);
10592   switch (TREE_CODE_CLASS (code))
10593     {
10594     case tcc_unary:
10595       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10596 					      strict_overflow_p);
10597     case tcc_binary:
10598     case tcc_comparison:
10599       return tree_binary_nonzero_warnv_p (code, type,
10600 					       TREE_OPERAND (t, 0),
10601 					       TREE_OPERAND (t, 1),
10602 					       strict_overflow_p);
10603     case tcc_constant:
10604     case tcc_declaration:
10605     case tcc_reference:
10606       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10607 
10608     default:
10609       break;
10610     }
10611 
10612   switch (code)
10613     {
10614     case TRUTH_NOT_EXPR:
10615       return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10616 					      strict_overflow_p);
10617 
10618     case TRUTH_AND_EXPR:
10619     case TRUTH_OR_EXPR:
10620     case TRUTH_XOR_EXPR:
10621       return tree_binary_nonzero_warnv_p (code, type,
10622 					       TREE_OPERAND (t, 0),
10623 					       TREE_OPERAND (t, 1),
10624 					       strict_overflow_p);
10625 
10626     case COND_EXPR:
10627     case CONSTRUCTOR:
10628     case OBJ_TYPE_REF:
10629     case ASSERT_EXPR:
10630     case ADDR_EXPR:
10631     case WITH_SIZE_EXPR:
10632     case SSA_NAME:
10633       return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10634 
10635     case COMPOUND_EXPR:
10636     case MODIFY_EXPR:
10637     case BIND_EXPR:
10638       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10639 					strict_overflow_p);
10640 
10641     case SAVE_EXPR:
10642       return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10643 					strict_overflow_p);
10644 
10645     case CALL_EXPR:
10646       {
10647 	tree fndecl = get_callee_fndecl (t);
10648 	if (!fndecl) return false;
10649 	if (flag_delete_null_pointer_checks && !flag_check_new
10650 	    && DECL_IS_OPERATOR_NEW_P (fndecl)
10651 	    && !TREE_NOTHROW (fndecl))
10652 	  return true;
10653 	if (flag_delete_null_pointer_checks
10654 	    && lookup_attribute ("returns_nonnull",
10655 		 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10656 	  return true;
10657 	return alloca_call_p (t);
10658       }
10659 
10660     default:
10661       break;
10662     }
10663   return false;
10664 }
10665 
10666 /* Return true when T is an address and is known to be nonzero.
10667    Handle warnings about undefined signed overflow.  */
10668 
10669 bool
tree_expr_nonzero_p(tree t)10670 tree_expr_nonzero_p (tree t)
10671 {
10672   bool ret, strict_overflow_p;
10673 
10674   strict_overflow_p = false;
10675   ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10676   if (strict_overflow_p)
10677     fold_overflow_warning (("assuming signed overflow does not occur when "
10678 			    "determining that expression is always "
10679 			    "non-zero"),
10680 			   WARN_STRICT_OVERFLOW_MISC);
10681   return ret;
10682 }
10683 
10684 /* Return true if T is known not to be equal to an integer W.  */
10685 
10686 bool
expr_not_equal_to(tree t,const wide_int & w)10687 expr_not_equal_to (tree t, const wide_int &w)
10688 {
10689   value_range vr;
10690   switch (TREE_CODE (t))
10691     {
10692     case INTEGER_CST:
10693       return wi::to_wide (t) != w;
10694 
10695     case SSA_NAME:
10696       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
10697 	return false;
10698       get_range_info (t, vr);
10699       if (!vr.undefined_p ()
10700 	  && !vr.contains_p (wide_int_to_tree (TREE_TYPE (t), w)))
10701 	return true;
10702       /* If T has some known zero bits and W has any of those bits set,
10703 	 then T is known not to be equal to W.  */
10704       if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
10705 			      TYPE_PRECISION (TREE_TYPE (t))), 0))
10706 	return true;
10707       return false;
10708 
10709     default:
10710       return false;
10711     }
10712 }
10713 
10714 /* Fold a binary expression of code CODE and type TYPE with operands
10715    OP0 and OP1.  LOC is the location of the resulting expression.
10716    Return the folded expression if folding is successful.  Otherwise,
10717    return NULL_TREE.  */
10718 
10719 tree
fold_binary_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1)10720 fold_binary_loc (location_t loc, enum tree_code code, tree type,
10721 		 tree op0, tree op1)
10722 {
10723   enum tree_code_class kind = TREE_CODE_CLASS (code);
10724   tree arg0, arg1, tem;
10725   tree t1 = NULL_TREE;
10726   bool strict_overflow_p;
10727   unsigned int prec;
10728 
10729   gcc_assert (IS_EXPR_CODE_CLASS (kind)
10730 	      && TREE_CODE_LENGTH (code) == 2
10731 	      && op0 != NULL_TREE
10732 	      && op1 != NULL_TREE);
10733 
10734   arg0 = op0;
10735   arg1 = op1;
10736 
10737   /* Strip any conversions that don't change the mode.  This is
10738      safe for every expression, except for a comparison expression
10739      because its signedness is derived from its operands.  So, in
10740      the latter case, only strip conversions that don't change the
10741      signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
10742      preserved.
10743 
10744      Note that this is done as an internal manipulation within the
10745      constant folder, in order to find the simplest representation
10746      of the arguments so that their form can be studied.  In any
10747      cases, the appropriate type conversions should be put back in
10748      the tree that will get out of the constant folder.  */
10749 
10750   if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10751     {
10752       STRIP_SIGN_NOPS (arg0);
10753       STRIP_SIGN_NOPS (arg1);
10754     }
10755   else
10756     {
10757       STRIP_NOPS (arg0);
10758       STRIP_NOPS (arg1);
10759     }
10760 
10761   /* Note that TREE_CONSTANT isn't enough: static var addresses are
10762      constant but we can't do arithmetic on them.  */
10763   if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
10764     {
10765       tem = const_binop (code, type, arg0, arg1);
10766       if (tem != NULL_TREE)
10767 	{
10768 	  if (TREE_TYPE (tem) != type)
10769 	    tem = fold_convert_loc (loc, type, tem);
10770 	  return tem;
10771 	}
10772     }
10773 
10774   /* If this is a commutative operation, and ARG0 is a constant, move it
10775      to ARG1 to reduce the number of tests below.  */
10776   if (commutative_tree_code (code)
10777       && tree_swap_operands_p (arg0, arg1))
10778     return fold_build2_loc (loc, code, type, op1, op0);
10779 
10780   /* Likewise if this is a comparison, and ARG0 is a constant, move it
10781      to ARG1 to reduce the number of tests below.  */
10782   if (kind == tcc_comparison
10783       && tree_swap_operands_p (arg0, arg1))
10784     return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
10785 
10786   tem = generic_simplify (loc, code, type, op0, op1);
10787   if (tem)
10788     return tem;
10789 
10790   /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10791 
10792      First check for cases where an arithmetic operation is applied to a
10793      compound, conditional, or comparison operation.  Push the arithmetic
10794      operation inside the compound or conditional to see if any folding
10795      can then be done.  Convert comparison to conditional for this purpose.
10796      The also optimizes non-constant cases that used to be done in
10797      expand_expr.
10798 
10799      Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10800      one of the operands is a comparison and the other is a comparison, a
10801      BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
10802      code below would make the expression more complex.  Change it to a
10803      TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
10804      TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
10805 
10806   if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10807        || code == EQ_EXPR || code == NE_EXPR)
10808       && !VECTOR_TYPE_P (TREE_TYPE (arg0))
10809       && ((truth_value_p (TREE_CODE (arg0))
10810 	   && (truth_value_p (TREE_CODE (arg1))
10811 	       || (TREE_CODE (arg1) == BIT_AND_EXPR
10812 		   && integer_onep (TREE_OPERAND (arg1, 1)))))
10813 	  || (truth_value_p (TREE_CODE (arg1))
10814 	      && (truth_value_p (TREE_CODE (arg0))
10815 		  || (TREE_CODE (arg0) == BIT_AND_EXPR
10816 		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
10817     {
10818       tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10819 			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10820 			 : TRUTH_XOR_EXPR,
10821 			 boolean_type_node,
10822 			 fold_convert_loc (loc, boolean_type_node, arg0),
10823 			 fold_convert_loc (loc, boolean_type_node, arg1));
10824 
10825       if (code == EQ_EXPR)
10826 	tem = invert_truthvalue_loc (loc, tem);
10827 
10828       return fold_convert_loc (loc, type, tem);
10829     }
10830 
10831   if (TREE_CODE_CLASS (code) == tcc_binary
10832       || TREE_CODE_CLASS (code) == tcc_comparison)
10833     {
10834       if (TREE_CODE (arg0) == COMPOUND_EXPR)
10835 	{
10836 	  tem = fold_build2_loc (loc, code, type,
10837 			     fold_convert_loc (loc, TREE_TYPE (op0),
10838 					       TREE_OPERAND (arg0, 1)), op1);
10839 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10840 			     tem);
10841 	}
10842       if (TREE_CODE (arg1) == COMPOUND_EXPR)
10843 	{
10844 	  tem = fold_build2_loc (loc, code, type, op0,
10845 			     fold_convert_loc (loc, TREE_TYPE (op1),
10846 					       TREE_OPERAND (arg1, 1)));
10847 	  return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10848 			     tem);
10849 	}
10850 
10851       if (TREE_CODE (arg0) == COND_EXPR
10852 	  || TREE_CODE (arg0) == VEC_COND_EXPR
10853 	  || COMPARISON_CLASS_P (arg0))
10854 	{
10855 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10856 						     arg0, arg1,
10857 						     /*cond_first_p=*/1);
10858 	  if (tem != NULL_TREE)
10859 	    return tem;
10860 	}
10861 
10862       if (TREE_CODE (arg1) == COND_EXPR
10863 	  || TREE_CODE (arg1) == VEC_COND_EXPR
10864 	  || COMPARISON_CLASS_P (arg1))
10865 	{
10866 	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10867 						     arg1, arg0,
10868 					             /*cond_first_p=*/0);
10869 	  if (tem != NULL_TREE)
10870 	    return tem;
10871 	}
10872     }
10873 
10874   switch (code)
10875     {
10876     case MEM_REF:
10877       /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2].  */
10878       if (TREE_CODE (arg0) == ADDR_EXPR
10879 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10880 	{
10881 	  tree iref = TREE_OPERAND (arg0, 0);
10882 	  return fold_build2 (MEM_REF, type,
10883 			      TREE_OPERAND (iref, 0),
10884 			      int_const_binop (PLUS_EXPR, arg1,
10885 					       TREE_OPERAND (iref, 1)));
10886 	}
10887 
10888       /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2].  */
10889       if (TREE_CODE (arg0) == ADDR_EXPR
10890 	  && handled_component_p (TREE_OPERAND (arg0, 0)))
10891 	{
10892 	  tree base;
10893 	  poly_int64 coffset;
10894 	  base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10895 						&coffset);
10896 	  if (!base)
10897 	    return NULL_TREE;
10898 	  return fold_build2 (MEM_REF, type,
10899 			      build1 (ADDR_EXPR, TREE_TYPE (arg0), base),
10900 			      int_const_binop (PLUS_EXPR, arg1,
10901 					       size_int (coffset)));
10902 	}
10903 
10904       return NULL_TREE;
10905 
10906     case POINTER_PLUS_EXPR:
10907       /* INT +p INT -> (PTR)(INT + INT).  Stripping types allows for this. */
10908       if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10909 	   && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10910         return fold_convert_loc (loc, type,
10911 				 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10912 					      fold_convert_loc (loc, sizetype,
10913 								arg1),
10914 					      fold_convert_loc (loc, sizetype,
10915 								arg0)));
10916 
10917       return NULL_TREE;
10918 
10919     case PLUS_EXPR:
10920       if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10921 	{
10922 	  /* X + (X / CST) * -CST is X % CST.  */
10923 	  if (TREE_CODE (arg1) == MULT_EXPR
10924 	      && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10925 	      && operand_equal_p (arg0,
10926 				  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10927 	    {
10928 	      tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10929 	      tree cst1 = TREE_OPERAND (arg1, 1);
10930 	      tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10931 				      cst1, cst0);
10932 	      if (sum && integer_zerop (sum))
10933 		return fold_convert_loc (loc, type,
10934 					 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10935 						      TREE_TYPE (arg0), arg0,
10936 						      cst0));
10937 	    }
10938 	}
10939 
10940       /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10941 	 one.  Make sure the type is not saturating and has the signedness of
10942 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10943 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
10944       if ((TREE_CODE (arg0) == MULT_EXPR
10945 	   || TREE_CODE (arg1) == MULT_EXPR)
10946 	  && !TYPE_SATURATING (type)
10947 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10948 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10949 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
10950         {
10951 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10952 	  if (tem)
10953 	    return tem;
10954 	}
10955 
10956       if (! FLOAT_TYPE_P (type))
10957 	{
10958 	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10959 	     (plus (plus (mult) (mult)) (foo)) so that we can
10960 	     take advantage of the factoring cases below.  */
10961 	  if (ANY_INTEGRAL_TYPE_P (type)
10962 	      && TYPE_OVERFLOW_WRAPS (type)
10963 	      && (((TREE_CODE (arg0) == PLUS_EXPR
10964 		    || TREE_CODE (arg0) == MINUS_EXPR)
10965 		   && TREE_CODE (arg1) == MULT_EXPR)
10966 		  || ((TREE_CODE (arg1) == PLUS_EXPR
10967 		       || TREE_CODE (arg1) == MINUS_EXPR)
10968 		      && TREE_CODE (arg0) == MULT_EXPR)))
10969 	    {
10970 	      tree parg0, parg1, parg, marg;
10971 	      enum tree_code pcode;
10972 
10973 	      if (TREE_CODE (arg1) == MULT_EXPR)
10974 		parg = arg0, marg = arg1;
10975 	      else
10976 		parg = arg1, marg = arg0;
10977 	      pcode = TREE_CODE (parg);
10978 	      parg0 = TREE_OPERAND (parg, 0);
10979 	      parg1 = TREE_OPERAND (parg, 1);
10980 	      STRIP_NOPS (parg0);
10981 	      STRIP_NOPS (parg1);
10982 
10983 	      if (TREE_CODE (parg0) == MULT_EXPR
10984 		  && TREE_CODE (parg1) != MULT_EXPR)
10985 		return fold_build2_loc (loc, pcode, type,
10986 				    fold_build2_loc (loc, PLUS_EXPR, type,
10987 						 fold_convert_loc (loc, type,
10988 								   parg0),
10989 						 fold_convert_loc (loc, type,
10990 								   marg)),
10991 				    fold_convert_loc (loc, type, parg1));
10992 	      if (TREE_CODE (parg0) != MULT_EXPR
10993 		  && TREE_CODE (parg1) == MULT_EXPR)
10994 		return
10995 		  fold_build2_loc (loc, PLUS_EXPR, type,
10996 			       fold_convert_loc (loc, type, parg0),
10997 			       fold_build2_loc (loc, pcode, type,
10998 					    fold_convert_loc (loc, type, marg),
10999 					    fold_convert_loc (loc, type,
11000 							      parg1)));
11001 	    }
11002 	}
11003       else
11004 	{
11005 	  /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
11006 	     to __complex__ ( x, y ).  This is not the same for SNaNs or
11007 	     if signed zeros are involved.  */
11008 	  if (!HONOR_SNANS (element_mode (arg0))
11009               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
11010 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11011 	    {
11012 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11013 	      tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11014 	      tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11015 	      bool arg0rz = false, arg0iz = false;
11016 	      if ((arg0r && (arg0rz = real_zerop (arg0r)))
11017 		  || (arg0i && (arg0iz = real_zerop (arg0i))))
11018 		{
11019 		  tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11020 		  tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11021 		  if (arg0rz && arg1i && real_zerop (arg1i))
11022 		    {
11023 		      tree rp = arg1r ? arg1r
11024 				  : build1 (REALPART_EXPR, rtype, arg1);
11025 		      tree ip = arg0i ? arg0i
11026 				  : build1 (IMAGPART_EXPR, rtype, arg0);
11027 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11028 		    }
11029 		  else if (arg0iz && arg1r && real_zerop (arg1r))
11030 		    {
11031 		      tree rp = arg0r ? arg0r
11032 				  : build1 (REALPART_EXPR, rtype, arg0);
11033 		      tree ip = arg1i ? arg1i
11034 				  : build1 (IMAGPART_EXPR, rtype, arg1);
11035 		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11036 		    }
11037 		}
11038 	    }
11039 
11040           /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
11041              We associate floats only if the user has specified
11042              -fassociative-math.  */
11043           if (flag_associative_math
11044               && TREE_CODE (arg1) == PLUS_EXPR
11045               && TREE_CODE (arg0) != MULT_EXPR)
11046             {
11047               tree tree10 = TREE_OPERAND (arg1, 0);
11048               tree tree11 = TREE_OPERAND (arg1, 1);
11049               if (TREE_CODE (tree11) == MULT_EXPR
11050 		  && TREE_CODE (tree10) == MULT_EXPR)
11051                 {
11052                   tree tree0;
11053                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
11054                   return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
11055                 }
11056             }
11057           /* Convert (b*c + d*e) + a into b*c + (d*e +a).
11058              We associate floats only if the user has specified
11059              -fassociative-math.  */
11060           if (flag_associative_math
11061               && TREE_CODE (arg0) == PLUS_EXPR
11062               && TREE_CODE (arg1) != MULT_EXPR)
11063             {
11064               tree tree00 = TREE_OPERAND (arg0, 0);
11065               tree tree01 = TREE_OPERAND (arg0, 1);
11066               if (TREE_CODE (tree01) == MULT_EXPR
11067 		  && TREE_CODE (tree00) == MULT_EXPR)
11068                 {
11069                   tree tree0;
11070                   tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
11071                   return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
11072                 }
11073             }
11074 	}
11075 
11076      bit_rotate:
11077       /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
11078 	 is a rotate of A by C1 bits.  */
11079       /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
11080 	 is a rotate of A by B bits.
11081 	 Similarly for (A << B) | (A >> (-B & C3)) where C3 is Z-1,
11082 	 though in this case CODE must be | and not + or ^, otherwise
11083 	 it doesn't return A when B is 0.  */
11084       {
11085 	enum tree_code code0, code1;
11086 	tree rtype;
11087 	code0 = TREE_CODE (arg0);
11088 	code1 = TREE_CODE (arg1);
11089 	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
11090 	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
11091 	    && operand_equal_p (TREE_OPERAND (arg0, 0),
11092 			        TREE_OPERAND (arg1, 0), 0)
11093 	    && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
11094 	        TYPE_UNSIGNED (rtype))
11095 	    /* Only create rotates in complete modes.  Other cases are not
11096 	       expanded properly.  */
11097 	    && (element_precision (rtype)
11098 		== GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
11099 	  {
11100 	    tree tree01, tree11;
11101 	    tree orig_tree01, orig_tree11;
11102 	    enum tree_code code01, code11;
11103 
11104 	    tree01 = orig_tree01 = TREE_OPERAND (arg0, 1);
11105 	    tree11 = orig_tree11 = TREE_OPERAND (arg1, 1);
11106 	    STRIP_NOPS (tree01);
11107 	    STRIP_NOPS (tree11);
11108 	    code01 = TREE_CODE (tree01);
11109 	    code11 = TREE_CODE (tree11);
11110 	    if (code11 != MINUS_EXPR
11111 		&& (code01 == MINUS_EXPR || code01 == BIT_AND_EXPR))
11112 	      {
11113 		std::swap (code0, code1);
11114 		std::swap (code01, code11);
11115 		std::swap (tree01, tree11);
11116 		std::swap (orig_tree01, orig_tree11);
11117 	      }
11118 	    if (code01 == INTEGER_CST
11119 		&& code11 == INTEGER_CST
11120 		&& (wi::to_widest (tree01) + wi::to_widest (tree11)
11121 		    == element_precision (rtype)))
11122 	      {
11123 		tem = build2_loc (loc, LROTATE_EXPR,
11124 				  rtype, TREE_OPERAND (arg0, 0),
11125 				  code0 == LSHIFT_EXPR
11126 				  ? orig_tree01 : orig_tree11);
11127 		return fold_convert_loc (loc, type, tem);
11128 	      }
11129 	    else if (code11 == MINUS_EXPR)
11130 	      {
11131 		tree tree110, tree111;
11132 		tree110 = TREE_OPERAND (tree11, 0);
11133 		tree111 = TREE_OPERAND (tree11, 1);
11134 		STRIP_NOPS (tree110);
11135 		STRIP_NOPS (tree111);
11136 		if (TREE_CODE (tree110) == INTEGER_CST
11137 		    && compare_tree_int (tree110,
11138 					 element_precision (rtype)) == 0
11139 		    && operand_equal_p (tree01, tree111, 0))
11140 		  {
11141 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11142 					    ? LROTATE_EXPR : RROTATE_EXPR),
11143 				      rtype, TREE_OPERAND (arg0, 0),
11144 				      orig_tree01);
11145 		    return fold_convert_loc (loc, type, tem);
11146 		  }
11147 	      }
11148 	    else if (code == BIT_IOR_EXPR
11149 		     && code11 == BIT_AND_EXPR
11150 		     && pow2p_hwi (element_precision (rtype)))
11151 	      {
11152 		tree tree110, tree111;
11153 		tree110 = TREE_OPERAND (tree11, 0);
11154 		tree111 = TREE_OPERAND (tree11, 1);
11155 		STRIP_NOPS (tree110);
11156 		STRIP_NOPS (tree111);
11157 		if (TREE_CODE (tree110) == NEGATE_EXPR
11158 		    && TREE_CODE (tree111) == INTEGER_CST
11159 		    && compare_tree_int (tree111,
11160 					 element_precision (rtype) - 1) == 0
11161 		    && operand_equal_p (tree01, TREE_OPERAND (tree110, 0), 0))
11162 		  {
11163 		    tem = build2_loc (loc, (code0 == LSHIFT_EXPR
11164 					    ? LROTATE_EXPR : RROTATE_EXPR),
11165 				      rtype, TREE_OPERAND (arg0, 0),
11166 				      orig_tree01);
11167 		    return fold_convert_loc (loc, type, tem);
11168 		  }
11169 	      }
11170 	  }
11171       }
11172 
11173     associate:
11174       /* In most languages, can't associate operations on floats through
11175 	 parentheses.  Rather than remember where the parentheses were, we
11176 	 don't associate floats at all, unless the user has specified
11177 	 -fassociative-math.
11178 	 And, we need to make sure type is not saturating.  */
11179 
11180       if ((! FLOAT_TYPE_P (type) || flag_associative_math)
11181 	  && !TYPE_SATURATING (type))
11182 	{
11183 	  tree var0, minus_var0, con0, minus_con0, lit0, minus_lit0;
11184 	  tree var1, minus_var1, con1, minus_con1, lit1, minus_lit1;
11185 	  tree atype = type;
11186 	  bool ok = true;
11187 
11188 	  /* Split both trees into variables, constants, and literals.  Then
11189 	     associate each group together, the constants with literals,
11190 	     then the result with variables.  This increases the chances of
11191 	     literals being recombined later and of generating relocatable
11192 	     expressions for the sum of a constant and literal.  */
11193 	  var0 = split_tree (arg0, type, code,
11194 			     &minus_var0, &con0, &minus_con0,
11195 			     &lit0, &minus_lit0, 0);
11196 	  var1 = split_tree (arg1, type, code,
11197 			     &minus_var1, &con1, &minus_con1,
11198 			     &lit1, &minus_lit1, code == MINUS_EXPR);
11199 
11200 	  /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
11201 	  if (code == MINUS_EXPR)
11202 	    code = PLUS_EXPR;
11203 
11204 	  /* With undefined overflow prefer doing association in a type
11205 	     which wraps on overflow, if that is one of the operand types.  */
11206 	  if ((POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
11207 	      && !TYPE_OVERFLOW_WRAPS (type))
11208 	    {
11209 	      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11210 		  && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11211 		atype = TREE_TYPE (arg0);
11212 	      else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
11213 		       && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
11214 		atype = TREE_TYPE (arg1);
11215 	      gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
11216 	    }
11217 
11218 	  /* With undefined overflow we can only associate constants with one
11219 	     variable, and constants whose association doesn't overflow.  */
11220 	  if ((POINTER_TYPE_P (atype) || INTEGRAL_TYPE_P (atype))
11221 	      && !TYPE_OVERFLOW_WRAPS (atype))
11222 	    {
11223 	      if ((var0 && var1) || (minus_var0 && minus_var1))
11224 		{
11225 		  /* ???  If split_tree would handle NEGATE_EXPR we could
11226 		     simply reject these cases and the allowed cases would
11227 		     be the var0/minus_var1 ones.  */
11228 		  tree tmp0 = var0 ? var0 : minus_var0;
11229 		  tree tmp1 = var1 ? var1 : minus_var1;
11230 		  bool one_neg = false;
11231 
11232 		  if (TREE_CODE (tmp0) == NEGATE_EXPR)
11233 		    {
11234 		      tmp0 = TREE_OPERAND (tmp0, 0);
11235 		      one_neg = !one_neg;
11236 		    }
11237 		  if (CONVERT_EXPR_P (tmp0)
11238 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11239 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
11240 			  <= TYPE_PRECISION (atype)))
11241 		    tmp0 = TREE_OPERAND (tmp0, 0);
11242 		  if (TREE_CODE (tmp1) == NEGATE_EXPR)
11243 		    {
11244 		      tmp1 = TREE_OPERAND (tmp1, 0);
11245 		      one_neg = !one_neg;
11246 		    }
11247 		  if (CONVERT_EXPR_P (tmp1)
11248 		      && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11249 		      && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
11250 			  <= TYPE_PRECISION (atype)))
11251 		    tmp1 = TREE_OPERAND (tmp1, 0);
11252 		  /* The only case we can still associate with two variables
11253 		     is if they cancel out.  */
11254 		  if (!one_neg
11255 		      || !operand_equal_p (tmp0, tmp1, 0))
11256 		    ok = false;
11257 		}
11258 	      else if ((var0 && minus_var1
11259 			&& ! operand_equal_p (var0, minus_var1, 0))
11260 		       || (minus_var0 && var1
11261 			   && ! operand_equal_p (minus_var0, var1, 0)))
11262 		ok = false;
11263 	    }
11264 
11265 	  /* Only do something if we found more than two objects.  Otherwise,
11266 	     nothing has changed and we risk infinite recursion.  */
11267 	  if (ok
11268 	      && ((var0 != 0) + (var1 != 0)
11269 		  + (minus_var0 != 0) + (minus_var1 != 0)
11270 		  + (con0 != 0) + (con1 != 0)
11271 		  + (minus_con0 != 0) + (minus_con1 != 0)
11272 		  + (lit0 != 0) + (lit1 != 0)
11273 		  + (minus_lit0 != 0) + (minus_lit1 != 0)) > 2)
11274 	    {
11275 	      var0 = associate_trees (loc, var0, var1, code, atype);
11276 	      minus_var0 = associate_trees (loc, minus_var0, minus_var1,
11277 					    code, atype);
11278 	      con0 = associate_trees (loc, con0, con1, code, atype);
11279 	      minus_con0 = associate_trees (loc, minus_con0, minus_con1,
11280 					    code, atype);
11281 	      lit0 = associate_trees (loc, lit0, lit1, code, atype);
11282 	      minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
11283 					    code, atype);
11284 
11285 	      if (minus_var0 && var0)
11286 		{
11287 		  var0 = associate_trees (loc, var0, minus_var0,
11288 					  MINUS_EXPR, atype);
11289 		  minus_var0 = 0;
11290 		}
11291 	      if (minus_con0 && con0)
11292 		{
11293 		  con0 = associate_trees (loc, con0, minus_con0,
11294 					  MINUS_EXPR, atype);
11295 		  minus_con0 = 0;
11296 		}
11297 
11298 	      /* Preserve the MINUS_EXPR if the negative part of the literal is
11299 		 greater than the positive part.  Otherwise, the multiplicative
11300 		 folding code (i.e extract_muldiv) may be fooled in case
11301 		 unsigned constants are subtracted, like in the following
11302 		 example: ((X*2 + 4) - 8U)/2.  */
11303 	      if (minus_lit0 && lit0)
11304 		{
11305 		  if (TREE_CODE (lit0) == INTEGER_CST
11306 		      && TREE_CODE (minus_lit0) == INTEGER_CST
11307 		      && tree_int_cst_lt (lit0, minus_lit0)
11308 		      /* But avoid ending up with only negated parts.  */
11309 		      && (var0 || con0))
11310 		    {
11311 		      minus_lit0 = associate_trees (loc, minus_lit0, lit0,
11312 						    MINUS_EXPR, atype);
11313 		      lit0 = 0;
11314 		    }
11315 		  else
11316 		    {
11317 		      lit0 = associate_trees (loc, lit0, minus_lit0,
11318 					      MINUS_EXPR, atype);
11319 		      minus_lit0 = 0;
11320 		    }
11321 		}
11322 
11323 	      /* Don't introduce overflows through reassociation.  */
11324 	      if ((lit0 && TREE_OVERFLOW_P (lit0))
11325 		  || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0)))
11326 		return NULL_TREE;
11327 
11328 	      /* Eliminate lit0 and minus_lit0 to con0 and minus_con0. */
11329 	      con0 = associate_trees (loc, con0, lit0, code, atype);
11330 	      lit0 = 0;
11331 	      minus_con0 = associate_trees (loc, minus_con0, minus_lit0,
11332 					    code, atype);
11333 	      minus_lit0 = 0;
11334 
11335 	      /* Eliminate minus_con0.  */
11336 	      if (minus_con0)
11337 		{
11338 		  if (con0)
11339 		    con0 = associate_trees (loc, con0, minus_con0,
11340 					    MINUS_EXPR, atype);
11341 		  else if (var0)
11342 		    var0 = associate_trees (loc, var0, minus_con0,
11343 					    MINUS_EXPR, atype);
11344 		  else
11345 		    gcc_unreachable ();
11346 		  minus_con0 = 0;
11347 		}
11348 
11349 	      /* Eliminate minus_var0.  */
11350 	      if (minus_var0)
11351 		{
11352 		  if (con0)
11353 		    con0 = associate_trees (loc, con0, minus_var0,
11354 					    MINUS_EXPR, atype);
11355 		  else
11356 		    gcc_unreachable ();
11357 		  minus_var0 = 0;
11358 		}
11359 
11360 	      return
11361 		fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
11362 							      code, atype));
11363 	    }
11364 	}
11365 
11366       return NULL_TREE;
11367 
11368     case POINTER_DIFF_EXPR:
11369     case MINUS_EXPR:
11370       /* Fold &a[i] - &a[j] to i-j.  */
11371       if (TREE_CODE (arg0) == ADDR_EXPR
11372 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11373 	  && TREE_CODE (arg1) == ADDR_EXPR
11374 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11375         {
11376 	  tree tem = fold_addr_of_array_ref_difference (loc, type,
11377 							TREE_OPERAND (arg0, 0),
11378 							TREE_OPERAND (arg1, 0),
11379 							code
11380 							== POINTER_DIFF_EXPR);
11381 	  if (tem)
11382 	    return tem;
11383 	}
11384 
11385       /* Further transformations are not for pointers.  */
11386       if (code == POINTER_DIFF_EXPR)
11387 	return NULL_TREE;
11388 
11389       /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
11390       if (TREE_CODE (arg0) == NEGATE_EXPR
11391 	  && negate_expr_p (op1)
11392 	  /* If arg0 is e.g. unsigned int and type is int, then this could
11393 	     introduce UB, because if A is INT_MIN at runtime, the original
11394 	     expression can be well defined while the latter is not.
11395 	     See PR83269.  */
11396 	  && !(ANY_INTEGRAL_TYPE_P (type)
11397 	       && TYPE_OVERFLOW_UNDEFINED (type)
11398 	       && ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11399 	       && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
11400 	return fold_build2_loc (loc, MINUS_EXPR, type, negate_expr (op1),
11401 			        fold_convert_loc (loc, type,
11402 						  TREE_OPERAND (arg0, 0)));
11403 
11404       /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
11405 	 __complex__ ( x, -y ).  This is not the same for SNaNs or if
11406 	 signed zeros are involved.  */
11407       if (!HONOR_SNANS (element_mode (arg0))
11408 	  && !HONOR_SIGNED_ZEROS (element_mode (arg0))
11409 	  && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11410         {
11411 	  tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11412 	  tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
11413 	  tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
11414 	  bool arg0rz = false, arg0iz = false;
11415 	  if ((arg0r && (arg0rz = real_zerop (arg0r)))
11416 	      || (arg0i && (arg0iz = real_zerop (arg0i))))
11417 	    {
11418 	      tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
11419 	      tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
11420 	      if (arg0rz && arg1i && real_zerop (arg1i))
11421 	        {
11422 		  tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11423 					 arg1r ? arg1r
11424 					 : build1 (REALPART_EXPR, rtype, arg1));
11425 		  tree ip = arg0i ? arg0i
11426 		    : build1 (IMAGPART_EXPR, rtype, arg0);
11427 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11428 		}
11429 	      else if (arg0iz && arg1r && real_zerop (arg1r))
11430 	        {
11431 		  tree rp = arg0r ? arg0r
11432 		    : build1 (REALPART_EXPR, rtype, arg0);
11433 		  tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
11434 					 arg1i ? arg1i
11435 					 : build1 (IMAGPART_EXPR, rtype, arg1));
11436 		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
11437 		}
11438 	    }
11439 	}
11440 
11441       /* A - B -> A + (-B) if B is easily negatable.  */
11442       if (negate_expr_p (op1)
11443 	  && ! TYPE_OVERFLOW_SANITIZED (type)
11444 	  && ((FLOAT_TYPE_P (type)
11445                /* Avoid this transformation if B is a positive REAL_CST.  */
11446 	       && (TREE_CODE (op1) != REAL_CST
11447 		   || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
11448 	      || INTEGRAL_TYPE_P (type)))
11449 	return fold_build2_loc (loc, PLUS_EXPR, type,
11450 				fold_convert_loc (loc, type, arg0),
11451 				negate_expr (op1));
11452 
11453       /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11454 	 one.  Make sure the type is not saturating and has the signedness of
11455 	 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11456 	 ??? The latter condition should use TYPE_OVERFLOW_* flags instead.  */
11457       if ((TREE_CODE (arg0) == MULT_EXPR
11458 	   || TREE_CODE (arg1) == MULT_EXPR)
11459 	  && !TYPE_SATURATING (type)
11460 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11461 	  && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11462 	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
11463         {
11464 	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11465 	  if (tem)
11466 	    return tem;
11467 	}
11468 
11469       goto associate;
11470 
11471     case MULT_EXPR:
11472       if (! FLOAT_TYPE_P (type))
11473 	{
11474 	  /* Transform x * -C into -x * C if x is easily negatable.  */
11475 	  if (TREE_CODE (op1) == INTEGER_CST
11476 	      && tree_int_cst_sgn (op1) == -1
11477 	      && negate_expr_p (op0)
11478 	      && negate_expr_p (op1)
11479 	      && (tem = negate_expr (op1)) != op1
11480 	      && ! TREE_OVERFLOW (tem))
11481 	    return fold_build2_loc (loc, MULT_EXPR, type,
11482 				    fold_convert_loc (loc, type,
11483 						      negate_expr (op0)), tem);
11484 
11485 	  strict_overflow_p = false;
11486 	  if (TREE_CODE (arg1) == INTEGER_CST
11487 	      && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11488 					&strict_overflow_p)) != 0)
11489 	    {
11490 	      if (strict_overflow_p)
11491 		fold_overflow_warning (("assuming signed overflow does not "
11492 					"occur when simplifying "
11493 					"multiplication"),
11494 				       WARN_STRICT_OVERFLOW_MISC);
11495 	      return fold_convert_loc (loc, type, tem);
11496 	    }
11497 
11498 	  /* Optimize z * conj(z) for integer complex numbers.  */
11499 	  if (TREE_CODE (arg0) == CONJ_EXPR
11500 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11501 	    return fold_mult_zconjz (loc, type, arg1);
11502 	  if (TREE_CODE (arg1) == CONJ_EXPR
11503 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11504 	    return fold_mult_zconjz (loc, type, arg0);
11505 	}
11506       else
11507 	{
11508 	  /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11509 	     This is not the same for NaNs or if signed zeros are
11510 	     involved.  */
11511 	  if (!HONOR_NANS (arg0)
11512               && !HONOR_SIGNED_ZEROS (element_mode (arg0))
11513 	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11514 	      && TREE_CODE (arg1) == COMPLEX_CST
11515 	      && real_zerop (TREE_REALPART (arg1)))
11516 	    {
11517 	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11518 	      if (real_onep (TREE_IMAGPART (arg1)))
11519 		return
11520 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
11521 			       negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11522 							     rtype, arg0)),
11523 			       fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11524 	      else if (real_minus_onep (TREE_IMAGPART (arg1)))
11525 		return
11526 		  fold_build2_loc (loc, COMPLEX_EXPR, type,
11527 			       fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11528 			       negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11529 							     rtype, arg0)));
11530 	    }
11531 
11532 	  /* Optimize z * conj(z) for floating point complex numbers.
11533 	     Guarded by flag_unsafe_math_optimizations as non-finite
11534 	     imaginary components don't produce scalar results.  */
11535 	  if (flag_unsafe_math_optimizations
11536 	      && TREE_CODE (arg0) == CONJ_EXPR
11537 	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11538 	    return fold_mult_zconjz (loc, type, arg1);
11539 	  if (flag_unsafe_math_optimizations
11540 	      && TREE_CODE (arg1) == CONJ_EXPR
11541 	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11542 	    return fold_mult_zconjz (loc, type, arg0);
11543 	}
11544       goto associate;
11545 
11546     case BIT_IOR_EXPR:
11547       /* Canonicalize (X & C1) | C2.  */
11548       if (TREE_CODE (arg0) == BIT_AND_EXPR
11549 	  && TREE_CODE (arg1) == INTEGER_CST
11550 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11551 	{
11552 	  int width = TYPE_PRECISION (type), w;
11553 	  wide_int c1 = wi::to_wide (TREE_OPERAND (arg0, 1));
11554 	  wide_int c2 = wi::to_wide (arg1);
11555 
11556 	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
11557 	  if ((c1 & c2) == c1)
11558 	    return omit_one_operand_loc (loc, type, arg1,
11559 					 TREE_OPERAND (arg0, 0));
11560 
11561 	  wide_int msk = wi::mask (width, false,
11562 				   TYPE_PRECISION (TREE_TYPE (arg1)));
11563 
11564 	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
11565 	  if (wi::bit_and_not (msk, c1 | c2) == 0)
11566 	    {
11567 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11568 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11569 	    }
11570 
11571 	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11572 	     unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11573 	     mode which allows further optimizations.  */
11574 	  c1 &= msk;
11575 	  c2 &= msk;
11576 	  wide_int c3 = wi::bit_and_not (c1, c2);
11577 	  for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11578 	    {
11579 	      wide_int mask = wi::mask (w, false,
11580 					TYPE_PRECISION (type));
11581 	      if (((c1 | c2) & mask) == mask
11582 		  && wi::bit_and_not (c1, mask) == 0)
11583 		{
11584 		  c3 = mask;
11585 		  break;
11586 		}
11587 	    }
11588 
11589 	  if (c3 != c1)
11590 	    {
11591 	      tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11592 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, type, tem,
11593 				     wide_int_to_tree (type, c3));
11594 	      return fold_build2_loc (loc, BIT_IOR_EXPR, type, tem, arg1);
11595 	    }
11596 	}
11597 
11598       /* See if this can be simplified into a rotate first.  If that
11599 	 is unsuccessful continue in the association code.  */
11600       goto bit_rotate;
11601 
11602     case BIT_XOR_EXPR:
11603       /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
11604       if (TREE_CODE (arg0) == BIT_AND_EXPR
11605 	  && INTEGRAL_TYPE_P (type)
11606 	  && integer_onep (TREE_OPERAND (arg0, 1))
11607 	  && integer_onep (arg1))
11608 	return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11609 				build_zero_cst (TREE_TYPE (arg0)));
11610 
11611       /* See if this can be simplified into a rotate first.  If that
11612 	 is unsuccessful continue in the association code.  */
11613       goto bit_rotate;
11614 
11615     case BIT_AND_EXPR:
11616       /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
11617       if (TREE_CODE (arg0) == BIT_XOR_EXPR
11618 	  && INTEGRAL_TYPE_P (type)
11619 	  && integer_onep (TREE_OPERAND (arg0, 1))
11620 	  && integer_onep (arg1))
11621 	{
11622 	  tree tem2;
11623 	  tem = TREE_OPERAND (arg0, 0);
11624 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11625 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11626 				  tem, tem2);
11627 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11628 				  build_zero_cst (TREE_TYPE (tem)));
11629 	}
11630       /* Fold ~X & 1 as (X & 1) == 0.  */
11631       if (TREE_CODE (arg0) == BIT_NOT_EXPR
11632 	  && INTEGRAL_TYPE_P (type)
11633 	  && integer_onep (arg1))
11634 	{
11635 	  tree tem2;
11636 	  tem = TREE_OPERAND (arg0, 0);
11637 	  tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11638 	  tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11639 				  tem, tem2);
11640 	  return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11641 				  build_zero_cst (TREE_TYPE (tem)));
11642 	}
11643       /* Fold !X & 1 as X == 0.  */
11644       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11645 	  && integer_onep (arg1))
11646 	{
11647 	  tem = TREE_OPERAND (arg0, 0);
11648 	  return fold_build2_loc (loc, EQ_EXPR, type, tem,
11649 				  build_zero_cst (TREE_TYPE (tem)));
11650 	}
11651 
11652       /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11653          multiple of 1 << CST.  */
11654       if (TREE_CODE (arg1) == INTEGER_CST)
11655 	{
11656 	  wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
11657 	  wide_int ncst1 = -cst1;
11658 	  if ((cst1 & ncst1) == ncst1
11659 	      && multiple_of_p (type, arg0,
11660 				wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11661 	    return fold_convert_loc (loc, type, arg0);
11662 	}
11663 
11664       /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11665          bits from CST2.  */
11666       if (TREE_CODE (arg1) == INTEGER_CST
11667 	  && TREE_CODE (arg0) == MULT_EXPR
11668 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11669 	{
11670 	  wi::tree_to_wide_ref warg1 = wi::to_wide (arg1);
11671 	  wide_int masked
11672 	    = mask_with_tz (type, warg1, wi::to_wide (TREE_OPERAND (arg0, 1)));
11673 
11674 	  if (masked == 0)
11675 	    return omit_two_operands_loc (loc, type, build_zero_cst (type),
11676 	                                  arg0, arg1);
11677 	  else if (masked != warg1)
11678 	    {
11679 	      /* Avoid the transform if arg1 is a mask of some
11680 	         mode which allows further optimizations.  */
11681 	      int pop = wi::popcount (warg1);
11682 	      if (!(pop >= BITS_PER_UNIT
11683 		    && pow2p_hwi (pop)
11684 		    && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11685 		return fold_build2_loc (loc, code, type, op0,
11686 					wide_int_to_tree (type, masked));
11687 	    }
11688 	}
11689 
11690       /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
11691       if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11692 	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11693 	{
11694 	  prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11695 
11696 	  wide_int mask = wide_int::from (wi::to_wide (arg1), prec, UNSIGNED);
11697 	  if (mask == -1)
11698 	    return
11699 	      fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11700 	}
11701 
11702       goto associate;
11703 
11704     case RDIV_EXPR:
11705       /* Don't touch a floating-point divide by zero unless the mode
11706 	 of the constant can represent infinity.  */
11707       if (TREE_CODE (arg1) == REAL_CST
11708 	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11709 	  && real_zerop (arg1))
11710 	return NULL_TREE;
11711 
11712       /* (-A) / (-B) -> A / B  */
11713       if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11714 	return fold_build2_loc (loc, RDIV_EXPR, type,
11715 			    TREE_OPERAND (arg0, 0),
11716 			    negate_expr (arg1));
11717       if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11718 	return fold_build2_loc (loc, RDIV_EXPR, type,
11719 			    negate_expr (arg0),
11720 			    TREE_OPERAND (arg1, 0));
11721       return NULL_TREE;
11722 
11723     case TRUNC_DIV_EXPR:
11724       /* Fall through */
11725 
11726     case FLOOR_DIV_EXPR:
11727       /* Simplify A / (B << N) where A and B are positive and B is
11728 	 a power of 2, to A >> (N + log2(B)).  */
11729       strict_overflow_p = false;
11730       if (TREE_CODE (arg1) == LSHIFT_EXPR
11731 	  && (TYPE_UNSIGNED (type)
11732 	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11733 	{
11734 	  tree sval = TREE_OPERAND (arg1, 0);
11735 	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11736 	    {
11737 	      tree sh_cnt = TREE_OPERAND (arg1, 1);
11738 	      tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11739 					 wi::exact_log2 (wi::to_wide (sval)));
11740 
11741 	      if (strict_overflow_p)
11742 		fold_overflow_warning (("assuming signed overflow does not "
11743 					"occur when simplifying A / (B << N)"),
11744 				       WARN_STRICT_OVERFLOW_MISC);
11745 
11746 	      sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11747 					sh_cnt, pow2);
11748 	      return fold_build2_loc (loc, RSHIFT_EXPR, type,
11749 				      fold_convert_loc (loc, type, arg0), sh_cnt);
11750 	    }
11751 	}
11752 
11753       /* Fall through */
11754 
11755     case ROUND_DIV_EXPR:
11756     case CEIL_DIV_EXPR:
11757     case EXACT_DIV_EXPR:
11758       if (integer_zerop (arg1))
11759 	return NULL_TREE;
11760 
11761       /* Convert -A / -B to A / B when the type is signed and overflow is
11762 	 undefined.  */
11763       if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11764 	  && TREE_CODE (op0) == NEGATE_EXPR
11765 	  && negate_expr_p (op1))
11766 	{
11767 	  if (ANY_INTEGRAL_TYPE_P (type))
11768 	    fold_overflow_warning (("assuming signed overflow does not occur "
11769 				    "when distributing negation across "
11770 				    "division"),
11771 				   WARN_STRICT_OVERFLOW_MISC);
11772 	  return fold_build2_loc (loc, code, type,
11773 				  fold_convert_loc (loc, type,
11774 						    TREE_OPERAND (arg0, 0)),
11775 				  negate_expr (op1));
11776 	}
11777       if ((!ANY_INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11778 	  && TREE_CODE (arg1) == NEGATE_EXPR
11779 	  && negate_expr_p (op0))
11780 	{
11781 	  if (ANY_INTEGRAL_TYPE_P (type))
11782 	    fold_overflow_warning (("assuming signed overflow does not occur "
11783 				    "when distributing negation across "
11784 				    "division"),
11785 				   WARN_STRICT_OVERFLOW_MISC);
11786 	  return fold_build2_loc (loc, code, type,
11787 				  negate_expr (op0),
11788 				  fold_convert_loc (loc, type,
11789 						    TREE_OPERAND (arg1, 0)));
11790 	}
11791 
11792       /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11793 	 operation, EXACT_DIV_EXPR.
11794 
11795 	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11796 	 At one time others generated faster code, it's not clear if they do
11797 	 after the last round to changes to the DIV code in expmed.c.  */
11798       if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11799 	  && multiple_of_p (type, arg0, arg1))
11800 	return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
11801 				fold_convert (type, arg0),
11802 				fold_convert (type, arg1));
11803 
11804       strict_overflow_p = false;
11805       if (TREE_CODE (arg1) == INTEGER_CST
11806 	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11807 				    &strict_overflow_p)) != 0)
11808 	{
11809 	  if (strict_overflow_p)
11810 	    fold_overflow_warning (("assuming signed overflow does not occur "
11811 				    "when simplifying division"),
11812 				   WARN_STRICT_OVERFLOW_MISC);
11813 	  return fold_convert_loc (loc, type, tem);
11814 	}
11815 
11816       return NULL_TREE;
11817 
11818     case CEIL_MOD_EXPR:
11819     case FLOOR_MOD_EXPR:
11820     case ROUND_MOD_EXPR:
11821     case TRUNC_MOD_EXPR:
11822       strict_overflow_p = false;
11823       if (TREE_CODE (arg1) == INTEGER_CST
11824 	  && (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11825 				    &strict_overflow_p)) != 0)
11826 	{
11827 	  if (strict_overflow_p)
11828 	    fold_overflow_warning (("assuming signed overflow does not occur "
11829 				    "when simplifying modulus"),
11830 				   WARN_STRICT_OVERFLOW_MISC);
11831 	  return fold_convert_loc (loc, type, tem);
11832 	}
11833 
11834       return NULL_TREE;
11835 
11836     case LROTATE_EXPR:
11837     case RROTATE_EXPR:
11838     case RSHIFT_EXPR:
11839     case LSHIFT_EXPR:
11840       /* Since negative shift count is not well-defined,
11841 	 don't try to compute it in the compiler.  */
11842       if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11843 	return NULL_TREE;
11844 
11845       prec = element_precision (type);
11846 
11847       /* If we have a rotate of a bit operation with the rotate count and
11848 	 the second operand of the bit operation both constant,
11849 	 permute the two operations.  */
11850       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11851 	  && (TREE_CODE (arg0) == BIT_AND_EXPR
11852 	      || TREE_CODE (arg0) == BIT_IOR_EXPR
11853 	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
11854 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11855 	{
11856 	  tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11857 	  tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11858 	  return fold_build2_loc (loc, TREE_CODE (arg0), type,
11859 				  fold_build2_loc (loc, code, type,
11860 						   arg00, arg1),
11861 				  fold_build2_loc (loc, code, type,
11862 						   arg01, arg1));
11863 	}
11864 
11865       /* Two consecutive rotates adding up to the some integer
11866 	 multiple of the precision of the type can be ignored.  */
11867       if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11868 	  && TREE_CODE (arg0) == RROTATE_EXPR
11869 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11870 	  && wi::umod_trunc (wi::to_wide (arg1)
11871 			     + wi::to_wide (TREE_OPERAND (arg0, 1)),
11872 			     prec) == 0)
11873 	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11874 
11875       return NULL_TREE;
11876 
11877     case MIN_EXPR:
11878     case MAX_EXPR:
11879       goto associate;
11880 
11881     case TRUTH_ANDIF_EXPR:
11882       /* Note that the operands of this must be ints
11883 	 and their values must be 0 or 1.
11884 	 ("true" is a fixed value perhaps depending on the language.)  */
11885       /* If first arg is constant zero, return it.  */
11886       if (integer_zerop (arg0))
11887 	return fold_convert_loc (loc, type, arg0);
11888       /* FALLTHRU */
11889     case TRUTH_AND_EXPR:
11890       /* If either arg is constant true, drop it.  */
11891       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11892 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11893       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11894 	  /* Preserve sequence points.  */
11895 	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11896 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11897       /* If second arg is constant zero, result is zero, but first arg
11898 	 must be evaluated.  */
11899       if (integer_zerop (arg1))
11900 	return omit_one_operand_loc (loc, type, arg1, arg0);
11901       /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11902 	 case will be handled here.  */
11903       if (integer_zerop (arg0))
11904 	return omit_one_operand_loc (loc, type, arg0, arg1);
11905 
11906       /* !X && X is always false.  */
11907       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11908 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11909 	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11910       /* X && !X is always false.  */
11911       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11912 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11913 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11914 
11915       /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
11916 	 means A >= Y && A != MAX, but in this case we know that
11917 	 A < X <= MAX.  */
11918 
11919       if (!TREE_SIDE_EFFECTS (arg0)
11920 	  && !TREE_SIDE_EFFECTS (arg1))
11921 	{
11922 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
11923 	  if (tem && !operand_equal_p (tem, arg0, 0))
11924 	    return fold_build2_loc (loc, code, type, tem, arg1);
11925 
11926 	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
11927 	  if (tem && !operand_equal_p (tem, arg1, 0))
11928 	    return fold_build2_loc (loc, code, type, arg0, tem);
11929 	}
11930 
11931       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11932           != NULL_TREE)
11933         return tem;
11934 
11935       return NULL_TREE;
11936 
11937     case TRUTH_ORIF_EXPR:
11938       /* Note that the operands of this must be ints
11939 	 and their values must be 0 or true.
11940 	 ("true" is a fixed value perhaps depending on the language.)  */
11941       /* If first arg is constant true, return it.  */
11942       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11943 	return fold_convert_loc (loc, type, arg0);
11944       /* FALLTHRU */
11945     case TRUTH_OR_EXPR:
11946       /* If either arg is constant zero, drop it.  */
11947       if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11948 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
11949       if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11950 	  /* Preserve sequence points.  */
11951 	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11952 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11953       /* If second arg is constant true, result is true, but we must
11954 	 evaluate first arg.  */
11955       if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11956 	return omit_one_operand_loc (loc, type, arg1, arg0);
11957       /* Likewise for first arg, but note this only occurs here for
11958 	 TRUTH_OR_EXPR.  */
11959       if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11960 	return omit_one_operand_loc (loc, type, arg0, arg1);
11961 
11962       /* !X || X is always true.  */
11963       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11964 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11965 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
11966       /* X || !X is always true.  */
11967       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11968 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11969 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
11970 
11971       /* (X && !Y) || (!X && Y) is X ^ Y */
11972       if (TREE_CODE (arg0) == TRUTH_AND_EXPR
11973 	  && TREE_CODE (arg1) == TRUTH_AND_EXPR)
11974         {
11975 	  tree a0, a1, l0, l1, n0, n1;
11976 
11977 	  a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11978 	  a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11979 
11980 	  l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11981 	  l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11982 
11983 	  n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
11984 	  n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
11985 
11986 	  if ((operand_equal_p (n0, a0, 0)
11987 	       && operand_equal_p (n1, a1, 0))
11988 	      || (operand_equal_p (n0, a1, 0)
11989 		  && operand_equal_p (n1, a0, 0)))
11990 	    return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
11991 	}
11992 
11993       if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
11994           != NULL_TREE)
11995         return tem;
11996 
11997       return NULL_TREE;
11998 
11999     case TRUTH_XOR_EXPR:
12000       /* If the second arg is constant zero, drop it.  */
12001       if (integer_zerop (arg1))
12002 	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12003       /* If the second arg is constant true, this is a logical inversion.  */
12004       if (integer_onep (arg1))
12005 	{
12006 	  tem = invert_truthvalue_loc (loc, arg0);
12007 	  return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12008 	}
12009       /* Identical arguments cancel to zero.  */
12010       if (operand_equal_p (arg0, arg1, 0))
12011 	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12012 
12013       /* !X ^ X is always true.  */
12014       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12015 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12016 	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12017 
12018       /* X ^ !X is always true.  */
12019       if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12020 	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12021 	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12022 
12023       return NULL_TREE;
12024 
12025     case EQ_EXPR:
12026     case NE_EXPR:
12027       STRIP_NOPS (arg0);
12028       STRIP_NOPS (arg1);
12029 
12030       tem = fold_comparison (loc, code, type, op0, op1);
12031       if (tem != NULL_TREE)
12032 	return tem;
12033 
12034       /* bool_var != 1 becomes !bool_var. */
12035       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12036           && code == NE_EXPR)
12037         return fold_convert_loc (loc, type,
12038 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12039 						  TREE_TYPE (arg0), arg0));
12040 
12041       /* bool_var == 0 becomes !bool_var. */
12042       if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12043           && code == EQ_EXPR)
12044         return fold_convert_loc (loc, type,
12045 				 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12046 						  TREE_TYPE (arg0), arg0));
12047 
12048       /* !exp != 0 becomes !exp */
12049       if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12050 	  && code == NE_EXPR)
12051         return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12052 
12053       /* If this is an EQ or NE comparison with zero and ARG0 is
12054 	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
12055 	 two operations, but the latter can be done in one less insn
12056 	 on machines that have only two-operand insns or on which a
12057 	 constant cannot be the first operand.  */
12058       if (TREE_CODE (arg0) == BIT_AND_EXPR
12059 	  && integer_zerop (arg1))
12060 	{
12061 	  tree arg00 = TREE_OPERAND (arg0, 0);
12062 	  tree arg01 = TREE_OPERAND (arg0, 1);
12063 	  if (TREE_CODE (arg00) == LSHIFT_EXPR
12064 	      && integer_onep (TREE_OPERAND (arg00, 0)))
12065 	    {
12066 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12067 					  arg01, TREE_OPERAND (arg00, 1));
12068 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12069 				     build_one_cst (TREE_TYPE (arg0)));
12070 	      return fold_build2_loc (loc, code, type,
12071 				      fold_convert_loc (loc, TREE_TYPE (arg1),
12072 							tem), arg1);
12073 	    }
12074 	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
12075 		   && integer_onep (TREE_OPERAND (arg01, 0)))
12076 	    {
12077 	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12078 					  arg00, TREE_OPERAND (arg01, 1));
12079 	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12080 				     build_one_cst (TREE_TYPE (arg0)));
12081 	      return fold_build2_loc (loc, code, type,
12082 				      fold_convert_loc (loc, TREE_TYPE (arg1),
12083 							tem), arg1);
12084 	    }
12085 	}
12086 
12087       /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12088 	 C1 is a valid shift constant, and C2 is a power of two, i.e.
12089 	 a single bit.  */
12090       if (TREE_CODE (arg0) == BIT_AND_EXPR
12091 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12092 	  && integer_zerop (arg1))
12093 	{
12094 	  tree arg00 = TREE_OPERAND (arg0, 0);
12095 	  STRIP_NOPS (arg00);
12096 	  if (TREE_CODE (arg00) == RSHIFT_EXPR
12097 	      && TREE_CODE (TREE_OPERAND (arg00, 1)) == INTEGER_CST)
12098 	    {
12099 	      tree itype = TREE_TYPE (arg00);
12100 	      tree arg001 = TREE_OPERAND (arg00, 1);
12101 	      prec = TYPE_PRECISION (itype);
12102 
12103 	      /* Check for a valid shift count.  */
12104 	      if (wi::ltu_p (wi::to_wide (arg001), prec))
12105 		{
12106 		  tree arg01 = TREE_OPERAND (arg0, 1);
12107 		  tree arg000 = TREE_OPERAND (arg00, 0);
12108 		  unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12109 		  /* If (C2 << C1) doesn't overflow, then
12110 		     ((X >> C1) & C2) != 0 can be rewritten as
12111 		     (X & (C2 << C1)) != 0.  */
12112 		  if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12113 		    {
12114 		      tem = fold_build2_loc (loc, LSHIFT_EXPR, itype,
12115 					     arg01, arg001);
12116 		      tem = fold_build2_loc (loc, BIT_AND_EXPR, itype,
12117 					     arg000, tem);
12118 		      return fold_build2_loc (loc, code, type, tem,
12119 				fold_convert_loc (loc, itype, arg1));
12120 		    }
12121 		  /* Otherwise, for signed (arithmetic) shifts,
12122 		     ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12123 		     ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
12124 		  else if (!TYPE_UNSIGNED (itype))
12125 		    return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR
12126 								 : LT_EXPR,
12127 					    type, arg000,
12128 					    build_int_cst (itype, 0));
12129 		  /* Otherwise, of unsigned (logical) shifts,
12130 		     ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12131 		     ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
12132 		  else
12133 		    return omit_one_operand_loc (loc, type,
12134 					 code == EQ_EXPR ? integer_one_node
12135 							 : integer_zero_node,
12136 					 arg000);
12137 		}
12138 	    }
12139 	}
12140 
12141       /* If this is a comparison of a field, we may be able to simplify it.  */
12142       if ((TREE_CODE (arg0) == COMPONENT_REF
12143 	   || TREE_CODE (arg0) == BIT_FIELD_REF)
12144 	  /* Handle the constant case even without -O
12145 	     to make sure the warnings are given.  */
12146 	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12147 	{
12148 	  t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12149 	  if (t1)
12150 	    return t1;
12151 	}
12152 
12153       /* Optimize comparisons of strlen vs zero to a compare of the
12154 	 first character of the string vs zero.  To wit,
12155 		strlen(ptr) == 0   =>  *ptr == 0
12156 		strlen(ptr) != 0   =>  *ptr != 0
12157 	 Other cases should reduce to one of these two (or a constant)
12158 	 due to the return value of strlen being unsigned.  */
12159       if (TREE_CODE (arg0) == CALL_EXPR && integer_zerop (arg1))
12160 	{
12161 	  tree fndecl = get_callee_fndecl (arg0);
12162 
12163 	  if (fndecl
12164 	      && fndecl_built_in_p (fndecl, BUILT_IN_STRLEN)
12165 	      && call_expr_nargs (arg0) == 1
12166 	      && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0)))
12167 		  == POINTER_TYPE))
12168 	    {
12169 	      tree ptrtype
12170 		= build_pointer_type (build_qualified_type (char_type_node,
12171 							    TYPE_QUAL_CONST));
12172 	      tree ptr = fold_convert_loc (loc, ptrtype,
12173 					   CALL_EXPR_ARG (arg0, 0));
12174 	      tree iref = build_fold_indirect_ref_loc (loc, ptr);
12175 	      return fold_build2_loc (loc, code, type, iref,
12176 				      build_int_cst (TREE_TYPE (iref), 0));
12177 	    }
12178 	}
12179 
12180       /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12181 	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
12182       if (TREE_CODE (arg0) == RSHIFT_EXPR
12183 	  && integer_zerop (arg1)
12184 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12185 	{
12186 	  tree arg00 = TREE_OPERAND (arg0, 0);
12187 	  tree arg01 = TREE_OPERAND (arg0, 1);
12188 	  tree itype = TREE_TYPE (arg00);
12189 	  if (wi::to_wide (arg01) == element_precision (itype) - 1)
12190 	    {
12191 	      if (TYPE_UNSIGNED (itype))
12192 		{
12193 		  itype = signed_type_for (itype);
12194 		  arg00 = fold_convert_loc (loc, itype, arg00);
12195 		}
12196 	      return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12197 				  type, arg00, build_zero_cst (itype));
12198 	    }
12199 	}
12200 
12201       /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12202 	 (X & C) == 0 when C is a single bit.  */
12203       if (TREE_CODE (arg0) == BIT_AND_EXPR
12204 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12205 	  && integer_zerop (arg1)
12206 	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
12207 	{
12208 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12209 				 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12210 				 TREE_OPERAND (arg0, 1));
12211 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12212 				  type, tem,
12213 				  fold_convert_loc (loc, TREE_TYPE (arg0),
12214 						    arg1));
12215 	}
12216 
12217       /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12218 	 constant C is a power of two, i.e. a single bit.  */
12219       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12220 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12221 	  && integer_zerop (arg1)
12222 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12223 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12224 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12225 	{
12226 	  tree arg00 = TREE_OPERAND (arg0, 0);
12227 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12228 			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
12229 	}
12230 
12231       /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12232 	 when is C is a power of two, i.e. a single bit.  */
12233       if (TREE_CODE (arg0) == BIT_AND_EXPR
12234 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12235 	  && integer_zerop (arg1)
12236 	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12237 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12238 			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12239 	{
12240 	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12241 	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12242 			     arg000, TREE_OPERAND (arg0, 1));
12243 	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12244 			      tem, build_int_cst (TREE_TYPE (tem), 0));
12245 	}
12246 
12247       if (integer_zerop (arg1)
12248 	  && tree_expr_nonzero_p (arg0))
12249         {
12250 	  tree res = constant_boolean_node (code==NE_EXPR, type);
12251 	  return omit_one_operand_loc (loc, type, res, arg0);
12252 	}
12253 
12254       if (TREE_CODE (arg0) == BIT_XOR_EXPR
12255 	  && TREE_CODE (arg1) == BIT_XOR_EXPR)
12256 	{
12257 	  tree arg00 = TREE_OPERAND (arg0, 0);
12258 	  tree arg01 = TREE_OPERAND (arg0, 1);
12259 	  tree arg10 = TREE_OPERAND (arg1, 0);
12260 	  tree arg11 = TREE_OPERAND (arg1, 1);
12261 	  tree itype = TREE_TYPE (arg0);
12262 
12263 	  /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12264 	     operand_equal_p guarantees no side-effects so we don't need
12265 	     to use omit_one_operand on Z.  */
12266 	  if (operand_equal_p (arg01, arg11, 0))
12267 	    return fold_build2_loc (loc, code, type, arg00,
12268 				    fold_convert_loc (loc, TREE_TYPE (arg00),
12269 						      arg10));
12270 	  if (operand_equal_p (arg01, arg10, 0))
12271 	    return fold_build2_loc (loc, code, type, arg00,
12272 				    fold_convert_loc (loc, TREE_TYPE (arg00),
12273 						      arg11));
12274 	  if (operand_equal_p (arg00, arg11, 0))
12275 	    return fold_build2_loc (loc, code, type, arg01,
12276 				    fold_convert_loc (loc, TREE_TYPE (arg01),
12277 						      arg10));
12278 	  if (operand_equal_p (arg00, arg10, 0))
12279 	    return fold_build2_loc (loc, code, type, arg01,
12280 				    fold_convert_loc (loc, TREE_TYPE (arg01),
12281 						      arg11));
12282 
12283 	  /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y.  */
12284 	  if (TREE_CODE (arg01) == INTEGER_CST
12285 	      && TREE_CODE (arg11) == INTEGER_CST)
12286 	    {
12287 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12288 				     fold_convert_loc (loc, itype, arg11));
12289 	      tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12290 	      return fold_build2_loc (loc, code, type, tem,
12291 				      fold_convert_loc (loc, itype, arg10));
12292 	    }
12293 	}
12294 
12295       /* Attempt to simplify equality/inequality comparisons of complex
12296 	 values.  Only lower the comparison if the result is known or
12297 	 can be simplified to a single scalar comparison.  */
12298       if ((TREE_CODE (arg0) == COMPLEX_EXPR
12299 	   || TREE_CODE (arg0) == COMPLEX_CST)
12300 	  && (TREE_CODE (arg1) == COMPLEX_EXPR
12301 	      || TREE_CODE (arg1) == COMPLEX_CST))
12302 	{
12303 	  tree real0, imag0, real1, imag1;
12304 	  tree rcond, icond;
12305 
12306 	  if (TREE_CODE (arg0) == COMPLEX_EXPR)
12307 	    {
12308 	      real0 = TREE_OPERAND (arg0, 0);
12309 	      imag0 = TREE_OPERAND (arg0, 1);
12310 	    }
12311 	  else
12312 	    {
12313 	      real0 = TREE_REALPART (arg0);
12314 	      imag0 = TREE_IMAGPART (arg0);
12315 	    }
12316 
12317 	  if (TREE_CODE (arg1) == COMPLEX_EXPR)
12318 	    {
12319 	      real1 = TREE_OPERAND (arg1, 0);
12320 	      imag1 = TREE_OPERAND (arg1, 1);
12321 	    }
12322 	  else
12323 	    {
12324 	      real1 = TREE_REALPART (arg1);
12325 	      imag1 = TREE_IMAGPART (arg1);
12326 	    }
12327 
12328 	  rcond = fold_binary_loc (loc, code, type, real0, real1);
12329 	  if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12330 	    {
12331 	      if (integer_zerop (rcond))
12332 		{
12333 		  if (code == EQ_EXPR)
12334 		    return omit_two_operands_loc (loc, type, boolean_false_node,
12335 					      imag0, imag1);
12336 		  return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12337 		}
12338 	      else
12339 		{
12340 		  if (code == NE_EXPR)
12341 		    return omit_two_operands_loc (loc, type, boolean_true_node,
12342 					      imag0, imag1);
12343 		  return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12344 		}
12345 	    }
12346 
12347 	  icond = fold_binary_loc (loc, code, type, imag0, imag1);
12348 	  if (icond && TREE_CODE (icond) == INTEGER_CST)
12349 	    {
12350 	      if (integer_zerop (icond))
12351 		{
12352 		  if (code == EQ_EXPR)
12353 		    return omit_two_operands_loc (loc, type, boolean_false_node,
12354 					      real0, real1);
12355 		  return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12356 		}
12357 	      else
12358 		{
12359 		  if (code == NE_EXPR)
12360 		    return omit_two_operands_loc (loc, type, boolean_true_node,
12361 					      real0, real1);
12362 		  return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12363 		}
12364 	    }
12365 	}
12366 
12367       return NULL_TREE;
12368 
12369     case LT_EXPR:
12370     case GT_EXPR:
12371     case LE_EXPR:
12372     case GE_EXPR:
12373       tem = fold_comparison (loc, code, type, op0, op1);
12374       if (tem != NULL_TREE)
12375 	return tem;
12376 
12377       /* Transform comparisons of the form X +- C CMP X.  */
12378       if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12379 	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12380 	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12381 	  && !HONOR_SNANS (arg0))
12382 	{
12383 	  tree arg01 = TREE_OPERAND (arg0, 1);
12384 	  enum tree_code code0 = TREE_CODE (arg0);
12385 	  int is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12386 
12387 	  /* (X - c) > X becomes false.  */
12388 	  if (code == GT_EXPR
12389 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
12390 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
12391 	    return constant_boolean_node (0, type);
12392 
12393 	  /* Likewise (X + c) < X becomes false.  */
12394 	  if (code == LT_EXPR
12395 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
12396 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
12397 	    return constant_boolean_node (0, type);
12398 
12399 	  /* Convert (X - c) <= X to true.  */
12400 	  if (!HONOR_NANS (arg1)
12401 	      && code == LE_EXPR
12402 	      && ((code0 == MINUS_EXPR && is_positive >= 0)
12403 		  || (code0 == PLUS_EXPR && is_positive <= 0)))
12404 	    return constant_boolean_node (1, type);
12405 
12406 	  /* Convert (X + c) >= X to true.  */
12407 	  if (!HONOR_NANS (arg1)
12408 	      && code == GE_EXPR
12409 	      && ((code0 == PLUS_EXPR && is_positive >= 0)
12410 		  || (code0 == MINUS_EXPR && is_positive <= 0)))
12411 	    return constant_boolean_node (1, type);
12412 	}
12413 
12414       /* If we are comparing an ABS_EXPR with a constant, we can
12415 	 convert all the cases into explicit comparisons, but they may
12416 	 well not be faster than doing the ABS and one comparison.
12417 	 But ABS (X) <= C is a range comparison, which becomes a subtraction
12418 	 and a comparison, and is probably faster.  */
12419       if (code == LE_EXPR
12420 	  && TREE_CODE (arg1) == INTEGER_CST
12421 	  && TREE_CODE (arg0) == ABS_EXPR
12422 	  && ! TREE_SIDE_EFFECTS (arg0)
12423 	  && (tem = negate_expr (arg1)) != 0
12424 	  && TREE_CODE (tem) == INTEGER_CST
12425 	  && !TREE_OVERFLOW (tem))
12426 	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
12427 			    build2 (GE_EXPR, type,
12428 				    TREE_OPERAND (arg0, 0), tem),
12429 			    build2 (LE_EXPR, type,
12430 				    TREE_OPERAND (arg0, 0), arg1));
12431 
12432       /* Convert ABS_EXPR<x> >= 0 to true.  */
12433       strict_overflow_p = false;
12434       if (code == GE_EXPR
12435 	  && (integer_zerop (arg1)
12436 	      || (! HONOR_NANS (arg0)
12437 		  && real_zerop (arg1)))
12438 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12439 	{
12440 	  if (strict_overflow_p)
12441 	    fold_overflow_warning (("assuming signed overflow does not occur "
12442 				    "when simplifying comparison of "
12443 				    "absolute value and zero"),
12444 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
12445 	  return omit_one_operand_loc (loc, type,
12446 				       constant_boolean_node (true, type),
12447 				       arg0);
12448 	}
12449 
12450       /* Convert ABS_EXPR<x> < 0 to false.  */
12451       strict_overflow_p = false;
12452       if (code == LT_EXPR
12453 	  && (integer_zerop (arg1) || real_zerop (arg1))
12454 	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12455 	{
12456 	  if (strict_overflow_p)
12457 	    fold_overflow_warning (("assuming signed overflow does not occur "
12458 				    "when simplifying comparison of "
12459 				    "absolute value and zero"),
12460 				   WARN_STRICT_OVERFLOW_CONDITIONAL);
12461 	  return omit_one_operand_loc (loc, type,
12462 				       constant_boolean_node (false, type),
12463 				       arg0);
12464 	}
12465 
12466       /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12467 	 and similarly for >= into !=.  */
12468       if ((code == LT_EXPR || code == GE_EXPR)
12469 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
12470 	  && TREE_CODE (arg1) == LSHIFT_EXPR
12471 	  && integer_onep (TREE_OPERAND (arg1, 0)))
12472 	return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12473 			   build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12474 				   TREE_OPERAND (arg1, 1)),
12475 			   build_zero_cst (TREE_TYPE (arg0)));
12476 
12477       /* Similarly for X < (cast) (1 << Y).  But cast can't be narrowing,
12478 	 otherwise Y might be >= # of bits in X's type and thus e.g.
12479 	 (unsigned char) (1 << Y) for Y 15 might be 0.
12480 	 If the cast is widening, then 1 << Y should have unsigned type,
12481 	 otherwise if Y is number of bits in the signed shift type minus 1,
12482 	 we can't optimize this.  E.g. (unsigned long long) (1 << Y) for Y
12483 	 31 might be 0xffffffff80000000.  */
12484       if ((code == LT_EXPR || code == GE_EXPR)
12485 	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
12486 	  && CONVERT_EXPR_P (arg1)
12487 	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12488 	  && (element_precision (TREE_TYPE (arg1))
12489 	      >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
12490 	  && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
12491 	      || (element_precision (TREE_TYPE (arg1))
12492 		  == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
12493 	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12494 	{
12495 	  tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12496 			TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
12497 	  return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12498 			     fold_convert_loc (loc, TREE_TYPE (arg0), tem),
12499 			     build_zero_cst (TREE_TYPE (arg0)));
12500 	}
12501 
12502       return NULL_TREE;
12503 
12504     case UNORDERED_EXPR:
12505     case ORDERED_EXPR:
12506     case UNLT_EXPR:
12507     case UNLE_EXPR:
12508     case UNGT_EXPR:
12509     case UNGE_EXPR:
12510     case UNEQ_EXPR:
12511     case LTGT_EXPR:
12512       /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
12513       {
12514 	tree targ0 = strip_float_extensions (arg0);
12515 	tree targ1 = strip_float_extensions (arg1);
12516 	tree newtype = TREE_TYPE (targ0);
12517 
12518 	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12519 	  newtype = TREE_TYPE (targ1);
12520 
12521 	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12522 	  return fold_build2_loc (loc, code, type,
12523 			      fold_convert_loc (loc, newtype, targ0),
12524 			      fold_convert_loc (loc, newtype, targ1));
12525       }
12526 
12527       return NULL_TREE;
12528 
12529     case COMPOUND_EXPR:
12530       /* When pedantic, a compound expression can be neither an lvalue
12531 	 nor an integer constant expression.  */
12532       if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12533 	return NULL_TREE;
12534       /* Don't let (0, 0) be null pointer constant.  */
12535       tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12536 				 : fold_convert_loc (loc, type, arg1);
12537       return pedantic_non_lvalue_loc (loc, tem);
12538 
12539     case ASSERT_EXPR:
12540       /* An ASSERT_EXPR should never be passed to fold_binary.  */
12541       gcc_unreachable ();
12542 
12543     default:
12544       return NULL_TREE;
12545     } /* switch (code) */
12546 }
12547 
12548 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
12549    ((A & N) + B) & M -> (A + B) & M
12550    Similarly if (N & M) == 0,
12551    ((A | N) + B) & M -> (A + B) & M
12552    and for - instead of + (or unary - instead of +)
12553    and/or ^ instead of |.
12554    If B is constant and (B & M) == 0, fold into A & M.
12555 
12556    This function is a helper for match.pd patterns.  Return non-NULL
12557    type in which the simplified operation should be performed only
12558    if any optimization is possible.
12559 
12560    ARG1 is M above, ARG00 is left operand of +/-, if CODE00 is BIT_*_EXPR,
12561    then ARG00{0,1} are operands of that bitop, otherwise CODE00 is ERROR_MARK.
12562    Similarly for ARG01, CODE01 and ARG01{0,1}, just for the right operand of
12563    +/-.  */
12564 tree
fold_bit_and_mask(tree type,tree arg1,enum tree_code code,tree arg00,enum tree_code code00,tree arg000,tree arg001,tree arg01,enum tree_code code01,tree arg010,tree arg011,tree * pmop)12565 fold_bit_and_mask (tree type, tree arg1, enum tree_code code,
12566 		   tree arg00, enum tree_code code00, tree arg000, tree arg001,
12567 		   tree arg01, enum tree_code code01, tree arg010, tree arg011,
12568 		   tree *pmop)
12569 {
12570   gcc_assert (TREE_CODE (arg1) == INTEGER_CST);
12571   gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR || code == NEGATE_EXPR);
12572   wi::tree_to_wide_ref cst1 = wi::to_wide (arg1);
12573   if (~cst1 == 0
12574       || (cst1 & (cst1 + 1)) != 0
12575       || !INTEGRAL_TYPE_P (type)
12576       || (!TYPE_OVERFLOW_WRAPS (type)
12577 	  && TREE_CODE (type) != INTEGER_TYPE)
12578       || (wi::max_value (type) & cst1) != cst1)
12579     return NULL_TREE;
12580 
12581   enum tree_code codes[2] = { code00, code01 };
12582   tree arg0xx[4] = { arg000, arg001, arg010, arg011 };
12583   int which = 0;
12584   wide_int cst0;
12585 
12586   /* Now we know that arg0 is (C + D) or (C - D) or -C and
12587      arg1 (M) is == (1LL << cst) - 1.
12588      Store C into PMOP[0] and D into PMOP[1].  */
12589   pmop[0] = arg00;
12590   pmop[1] = arg01;
12591   which = code != NEGATE_EXPR;
12592 
12593   for (; which >= 0; which--)
12594     switch (codes[which])
12595       {
12596       case BIT_AND_EXPR:
12597       case BIT_IOR_EXPR:
12598       case BIT_XOR_EXPR:
12599 	gcc_assert (TREE_CODE (arg0xx[2 * which + 1]) == INTEGER_CST);
12600 	cst0 = wi::to_wide (arg0xx[2 * which + 1]) & cst1;
12601 	if (codes[which] == BIT_AND_EXPR)
12602 	  {
12603 	    if (cst0 != cst1)
12604 	      break;
12605 	  }
12606 	else if (cst0 != 0)
12607 	  break;
12608 	/* If C or D is of the form (A & N) where
12609 	   (N & M) == M, or of the form (A | N) or
12610 	   (A ^ N) where (N & M) == 0, replace it with A.  */
12611 	pmop[which] = arg0xx[2 * which];
12612 	break;
12613       case ERROR_MARK:
12614 	if (TREE_CODE (pmop[which]) != INTEGER_CST)
12615 	  break;
12616 	/* If C or D is a N where (N & M) == 0, it can be
12617 	   omitted (replaced with 0).  */
12618 	if ((code == PLUS_EXPR
12619 	     || (code == MINUS_EXPR && which == 0))
12620 	    && (cst1 & wi::to_wide (pmop[which])) == 0)
12621 	  pmop[which] = build_int_cst (type, 0);
12622 	/* Similarly, with C - N where (-N & M) == 0.  */
12623 	if (code == MINUS_EXPR
12624 	    && which == 1
12625 	    && (cst1 & -wi::to_wide (pmop[which])) == 0)
12626 	  pmop[which] = build_int_cst (type, 0);
12627 	break;
12628       default:
12629 	gcc_unreachable ();
12630       }
12631 
12632   /* Only build anything new if we optimized one or both arguments above.  */
12633   if (pmop[0] == arg00 && pmop[1] == arg01)
12634     return NULL_TREE;
12635 
12636   if (TYPE_OVERFLOW_WRAPS (type))
12637     return type;
12638   else
12639     return unsigned_type_for (type);
12640 }
12641 
12642 /* Used by contains_label_[p1].  */
12643 
12644 struct contains_label_data
12645 {
12646   hash_set<tree> *pset;
12647   bool inside_switch_p;
12648 };
12649 
12650 /* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
12651    a LABEL_EXPR or CASE_LABEL_EXPR not inside of another SWITCH_EXPR; otherwise
12652    return NULL_TREE.  Do not check the subtrees of GOTO_EXPR.  */
12653 
12654 static tree
contains_label_1(tree * tp,int * walk_subtrees,void * data)12655 contains_label_1 (tree *tp, int *walk_subtrees, void *data)
12656 {
12657   contains_label_data *d = (contains_label_data *) data;
12658   switch (TREE_CODE (*tp))
12659     {
12660     case LABEL_EXPR:
12661       return *tp;
12662 
12663     case CASE_LABEL_EXPR:
12664       if (!d->inside_switch_p)
12665 	return *tp;
12666       return NULL_TREE;
12667 
12668     case SWITCH_EXPR:
12669       if (!d->inside_switch_p)
12670 	{
12671 	  if (walk_tree (&SWITCH_COND (*tp), contains_label_1, data, d->pset))
12672 	    return *tp;
12673 	  d->inside_switch_p = true;
12674 	  if (walk_tree (&SWITCH_BODY (*tp), contains_label_1, data, d->pset))
12675 	    return *tp;
12676 	  d->inside_switch_p = false;
12677 	  *walk_subtrees = 0;
12678 	}
12679       return NULL_TREE;
12680 
12681     case GOTO_EXPR:
12682       *walk_subtrees = 0;
12683       return NULL_TREE;
12684 
12685     default:
12686       return NULL_TREE;
12687     }
12688 }
12689 
12690 /* Return whether the sub-tree ST contains a label which is accessible from
12691    outside the sub-tree.  */
12692 
12693 static bool
contains_label_p(tree st)12694 contains_label_p (tree st)
12695 {
12696   hash_set<tree> pset;
12697   contains_label_data data = { &pset, false };
12698   return walk_tree (&st, contains_label_1, &data, &pset) != NULL_TREE;
12699 }
12700 
12701 /* Fold a ternary expression of code CODE and type TYPE with operands
12702    OP0, OP1, and OP2.  Return the folded expression if folding is
12703    successful.  Otherwise, return NULL_TREE.  */
12704 
12705 tree
fold_ternary_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2)12706 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
12707 		  tree op0, tree op1, tree op2)
12708 {
12709   tree tem;
12710   tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
12711   enum tree_code_class kind = TREE_CODE_CLASS (code);
12712 
12713   gcc_assert (IS_EXPR_CODE_CLASS (kind)
12714 	      && TREE_CODE_LENGTH (code) == 3);
12715 
12716   /* If this is a commutative operation, and OP0 is a constant, move it
12717      to OP1 to reduce the number of tests below.  */
12718   if (commutative_ternary_tree_code (code)
12719       && tree_swap_operands_p (op0, op1))
12720     return fold_build3_loc (loc, code, type, op1, op0, op2);
12721 
12722   tem = generic_simplify (loc, code, type, op0, op1, op2);
12723   if (tem)
12724     return tem;
12725 
12726   /* Strip any conversions that don't change the mode.  This is safe
12727      for every expression, except for a comparison expression because
12728      its signedness is derived from its operands.  So, in the latter
12729      case, only strip conversions that don't change the signedness.
12730 
12731      Note that this is done as an internal manipulation within the
12732      constant folder, in order to find the simplest representation of
12733      the arguments so that their form can be studied.  In any cases,
12734      the appropriate type conversions should be put back in the tree
12735      that will get out of the constant folder.  */
12736   if (op0)
12737     {
12738       arg0 = op0;
12739       STRIP_NOPS (arg0);
12740     }
12741 
12742   if (op1)
12743     {
12744       arg1 = op1;
12745       STRIP_NOPS (arg1);
12746     }
12747 
12748   if (op2)
12749     {
12750       arg2 = op2;
12751       STRIP_NOPS (arg2);
12752     }
12753 
12754   switch (code)
12755     {
12756     case COMPONENT_REF:
12757       if (TREE_CODE (arg0) == CONSTRUCTOR
12758 	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12759 	{
12760 	  unsigned HOST_WIDE_INT idx;
12761 	  tree field, value;
12762 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12763 	    if (field == arg1)
12764 	      return value;
12765 	}
12766       return NULL_TREE;
12767 
12768     case COND_EXPR:
12769     case VEC_COND_EXPR:
12770       /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12771 	 so all simple results must be passed through pedantic_non_lvalue.  */
12772       if (TREE_CODE (arg0) == INTEGER_CST)
12773 	{
12774 	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
12775 	  tem = integer_zerop (arg0) ? op2 : op1;
12776 	  /* Only optimize constant conditions when the selected branch
12777 	     has the same type as the COND_EXPR.  This avoids optimizing
12778              away "c ? x : throw", where the throw has a void type.
12779              Avoid throwing away that operand which contains label.  */
12780           if ((!TREE_SIDE_EFFECTS (unused_op)
12781                || !contains_label_p (unused_op))
12782               && (! VOID_TYPE_P (TREE_TYPE (tem))
12783                   || VOID_TYPE_P (type)))
12784 	    return pedantic_non_lvalue_loc (loc, tem);
12785 	  return NULL_TREE;
12786 	}
12787       else if (TREE_CODE (arg0) == VECTOR_CST)
12788 	{
12789 	  unsigned HOST_WIDE_INT nelts;
12790 	  if ((TREE_CODE (arg1) == VECTOR_CST
12791 	       || TREE_CODE (arg1) == CONSTRUCTOR)
12792 	      && (TREE_CODE (arg2) == VECTOR_CST
12793 		  || TREE_CODE (arg2) == CONSTRUCTOR)
12794 	      && TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
12795 	    {
12796 	      vec_perm_builder sel (nelts, nelts, 1);
12797 	      for (unsigned int i = 0; i < nelts; i++)
12798 		{
12799 		  tree val = VECTOR_CST_ELT (arg0, i);
12800 		  if (integer_all_onesp (val))
12801 		    sel.quick_push (i);
12802 		  else if (integer_zerop (val))
12803 		    sel.quick_push (nelts + i);
12804 		  else /* Currently unreachable.  */
12805 		    return NULL_TREE;
12806 		}
12807 	      vec_perm_indices indices (sel, 2, nelts);
12808 	      tree t = fold_vec_perm (type, arg1, arg2, indices);
12809 	      if (t != NULL_TREE)
12810 		return t;
12811 	    }
12812 	}
12813 
12814       /* If we have A op B ? A : C, we may be able to convert this to a
12815 	 simpler expression, depending on the operation and the values
12816 	 of B and C.  Signed zeros prevent all of these transformations,
12817 	 for reasons given above each one.
12818 
12819          Also try swapping the arguments and inverting the conditional.  */
12820       if (COMPARISON_CLASS_P (arg0)
12821 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op1)
12822 	  && !HONOR_SIGNED_ZEROS (element_mode (op1)))
12823 	{
12824 	  tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
12825 	  if (tem)
12826 	    return tem;
12827 	}
12828 
12829       if (COMPARISON_CLASS_P (arg0)
12830 	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0), op2)
12831 	  && !HONOR_SIGNED_ZEROS (element_mode (op2)))
12832 	{
12833 	  location_t loc0 = expr_location_or (arg0, loc);
12834 	  tem = fold_invert_truthvalue (loc0, arg0);
12835 	  if (tem && COMPARISON_CLASS_P (tem))
12836 	    {
12837 	      tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
12838 	      if (tem)
12839 		return tem;
12840 	    }
12841 	}
12842 
12843       /* If the second operand is simpler than the third, swap them
12844 	 since that produces better jump optimization results.  */
12845       if (truth_value_p (TREE_CODE (arg0))
12846 	  && tree_swap_operands_p (op1, op2))
12847 	{
12848 	  location_t loc0 = expr_location_or (arg0, loc);
12849 	  /* See if this can be inverted.  If it can't, possibly because
12850 	     it was a floating-point inequality comparison, don't do
12851 	     anything.  */
12852 	  tem = fold_invert_truthvalue (loc0, arg0);
12853 	  if (tem)
12854 	    return fold_build3_loc (loc, code, type, tem, op2, op1);
12855 	}
12856 
12857       /* Convert A ? 1 : 0 to simply A.  */
12858       if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
12859 				 : (integer_onep (op1)
12860 				    && !VECTOR_TYPE_P (type)))
12861 	  && integer_zerop (op2)
12862 	  /* If we try to convert OP0 to our type, the
12863 	     call to fold will try to move the conversion inside
12864 	     a COND, which will recurse.  In that case, the COND_EXPR
12865 	     is probably the best choice, so leave it alone.  */
12866 	  && type == TREE_TYPE (arg0))
12867 	return pedantic_non_lvalue_loc (loc, arg0);
12868 
12869       /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
12870 	 over COND_EXPR in cases such as floating point comparisons.  */
12871       if (integer_zerop (op1)
12872 	  && code == COND_EXPR
12873 	  && integer_onep (op2)
12874 	  && !VECTOR_TYPE_P (type)
12875 	  && truth_value_p (TREE_CODE (arg0)))
12876 	return pedantic_non_lvalue_loc (loc,
12877 				    fold_convert_loc (loc, type,
12878 					      invert_truthvalue_loc (loc,
12879 								     arg0)));
12880 
12881       /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
12882       if (TREE_CODE (arg0) == LT_EXPR
12883 	  && integer_zerop (TREE_OPERAND (arg0, 1))
12884 	  && integer_zerop (op2)
12885 	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12886 	{
12887 	  /* sign_bit_p looks through both zero and sign extensions,
12888 	     but for this optimization only sign extensions are
12889 	     usable.  */
12890 	  tree tem2 = TREE_OPERAND (arg0, 0);
12891 	  while (tem != tem2)
12892 	    {
12893 	      if (TREE_CODE (tem2) != NOP_EXPR
12894 		  || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
12895 		{
12896 		  tem = NULL_TREE;
12897 		  break;
12898 		}
12899 	      tem2 = TREE_OPERAND (tem2, 0);
12900 	    }
12901 	  /* sign_bit_p only checks ARG1 bits within A's precision.
12902 	     If <sign bit of A> has wider type than A, bits outside
12903 	     of A's precision in <sign bit of A> need to be checked.
12904 	     If they are all 0, this optimization needs to be done
12905 	     in unsigned A's type, if they are all 1 in signed A's type,
12906 	     otherwise this can't be done.  */
12907 	  if (tem
12908 	      && TYPE_PRECISION (TREE_TYPE (tem))
12909 		 < TYPE_PRECISION (TREE_TYPE (arg1))
12910 	      && TYPE_PRECISION (TREE_TYPE (tem))
12911 		 < TYPE_PRECISION (type))
12912 	    {
12913 	      int inner_width, outer_width;
12914 	      tree tem_type;
12915 
12916 	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12917 	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12918 	      if (outer_width > TYPE_PRECISION (type))
12919 		outer_width = TYPE_PRECISION (type);
12920 
12921 	      wide_int mask = wi::shifted_mask
12922 		(inner_width, outer_width - inner_width, false,
12923 		 TYPE_PRECISION (TREE_TYPE (arg1)));
12924 
12925 	      wide_int common = mask & wi::to_wide (arg1);
12926 	      if (common == mask)
12927 		{
12928 		  tem_type = signed_type_for (TREE_TYPE (tem));
12929 		  tem = fold_convert_loc (loc, tem_type, tem);
12930 		}
12931 	      else if (common == 0)
12932 		{
12933 		  tem_type = unsigned_type_for (TREE_TYPE (tem));
12934 		  tem = fold_convert_loc (loc, tem_type, tem);
12935 		}
12936 	      else
12937 		tem = NULL;
12938 	    }
12939 
12940 	  if (tem)
12941 	    return
12942 	      fold_convert_loc (loc, type,
12943 				fold_build2_loc (loc, BIT_AND_EXPR,
12944 					     TREE_TYPE (tem), tem,
12945 					     fold_convert_loc (loc,
12946 							       TREE_TYPE (tem),
12947 							       arg1)));
12948 	}
12949 
12950       /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
12951 	 already handled above.  */
12952       if (TREE_CODE (arg0) == BIT_AND_EXPR
12953 	  && integer_onep (TREE_OPERAND (arg0, 1))
12954 	  && integer_zerop (op2)
12955 	  && integer_pow2p (arg1))
12956 	{
12957 	  tree tem = TREE_OPERAND (arg0, 0);
12958 	  STRIP_NOPS (tem);
12959 	  if (TREE_CODE (tem) == RSHIFT_EXPR
12960 	      && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
12961               && (unsigned HOST_WIDE_INT) tree_log2 (arg1)
12962 		 == tree_to_uhwi (TREE_OPERAND (tem, 1)))
12963 	    return fold_build2_loc (loc, BIT_AND_EXPR, type,
12964 				    fold_convert_loc (loc, type,
12965 						      TREE_OPERAND (tem, 0)),
12966 				    op1);
12967 	}
12968 
12969       /* A & N ? N : 0 is simply A & N if N is a power of two.  This
12970 	 is probably obsolete because the first operand should be a
12971 	 truth value (that's why we have the two cases above), but let's
12972 	 leave it in until we can confirm this for all front-ends.  */
12973       if (integer_zerop (op2)
12974 	  && TREE_CODE (arg0) == NE_EXPR
12975 	  && integer_zerop (TREE_OPERAND (arg0, 1))
12976 	  && integer_pow2p (arg1)
12977 	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12978 	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12979 			      arg1, OEP_ONLY_CONST)
12980 	  /* operand_equal_p compares just value, not precision, so e.g.
12981 	     arg1 could be 8-bit -128 and be power of two, but BIT_AND_EXPR
12982 	     second operand 32-bit -128, which is not a power of two (or vice
12983 	     versa.  */
12984 	  && integer_pow2p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)))
12985 	return pedantic_non_lvalue_loc (loc,
12986 					fold_convert_loc (loc, type,
12987 							  TREE_OPERAND (arg0,
12988 									0)));
12989 
12990       /* Disable the transformations below for vectors, since
12991 	 fold_binary_op_with_conditional_arg may undo them immediately,
12992 	 yielding an infinite loop.  */
12993       if (code == VEC_COND_EXPR)
12994 	return NULL_TREE;
12995 
12996       /* Convert A ? B : 0 into A && B if A and B are truth values.  */
12997       if (integer_zerop (op2)
12998 	  && truth_value_p (TREE_CODE (arg0))
12999 	  && truth_value_p (TREE_CODE (arg1))
13000 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13001 	return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13002 							   : TRUTH_ANDIF_EXPR,
13003 				type, fold_convert_loc (loc, type, arg0), op1);
13004 
13005       /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
13006       if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13007 	  && truth_value_p (TREE_CODE (arg0))
13008 	  && truth_value_p (TREE_CODE (arg1))
13009 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13010 	{
13011 	  location_t loc0 = expr_location_or (arg0, loc);
13012 	  /* Only perform transformation if ARG0 is easily inverted.  */
13013 	  tem = fold_invert_truthvalue (loc0, arg0);
13014 	  if (tem)
13015 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
13016 					 ? BIT_IOR_EXPR
13017 					 : TRUTH_ORIF_EXPR,
13018 				    type, fold_convert_loc (loc, type, tem),
13019 				    op1);
13020 	}
13021 
13022       /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
13023       if (integer_zerop (arg1)
13024 	  && truth_value_p (TREE_CODE (arg0))
13025 	  && truth_value_p (TREE_CODE (op2))
13026 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13027 	{
13028 	  location_t loc0 = expr_location_or (arg0, loc);
13029 	  /* Only perform transformation if ARG0 is easily inverted.  */
13030 	  tem = fold_invert_truthvalue (loc0, arg0);
13031 	  if (tem)
13032 	    return fold_build2_loc (loc, code == VEC_COND_EXPR
13033 					 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13034 				    type, fold_convert_loc (loc, type, tem),
13035 				    op2);
13036 	}
13037 
13038       /* Convert A ? 1 : B into A || B if A and B are truth values.  */
13039       if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13040 	  && truth_value_p (TREE_CODE (arg0))
13041 	  && truth_value_p (TREE_CODE (op2))
13042 	  && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13043 	return fold_build2_loc (loc, code == VEC_COND_EXPR
13044 				     ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13045 				type, fold_convert_loc (loc, type, arg0), op2);
13046 
13047       return NULL_TREE;
13048 
13049     case CALL_EXPR:
13050       /* CALL_EXPRs used to be ternary exprs.  Catch any mistaken uses
13051 	 of fold_ternary on them.  */
13052       gcc_unreachable ();
13053 
13054     case BIT_FIELD_REF:
13055       if (TREE_CODE (arg0) == VECTOR_CST
13056 	  && (type == TREE_TYPE (TREE_TYPE (arg0))
13057 	      || (VECTOR_TYPE_P (type)
13058 		  && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0))))
13059 	  && tree_fits_uhwi_p (op1)
13060 	  && tree_fits_uhwi_p (op2))
13061 	{
13062 	  tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13063 	  unsigned HOST_WIDE_INT width
13064 	    = (TREE_CODE (eltype) == BOOLEAN_TYPE
13065 	       ? TYPE_PRECISION (eltype) : tree_to_uhwi (TYPE_SIZE (eltype)));
13066 	  unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13067 	  unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13068 
13069 	  if (n != 0
13070 	      && (idx % width) == 0
13071 	      && (n % width) == 0
13072 	      && known_le ((idx + n) / width,
13073 			   TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))))
13074 	    {
13075 	      idx = idx / width;
13076 	      n = n / width;
13077 
13078 	      if (TREE_CODE (arg0) == VECTOR_CST)
13079 		{
13080 		  if (n == 1)
13081 		    {
13082 		      tem = VECTOR_CST_ELT (arg0, idx);
13083 		      if (VECTOR_TYPE_P (type))
13084 			tem = fold_build1 (VIEW_CONVERT_EXPR, type, tem);
13085 		      return tem;
13086 		    }
13087 
13088 		  tree_vector_builder vals (type, n, 1);
13089 		  for (unsigned i = 0; i < n; ++i)
13090 		    vals.quick_push (VECTOR_CST_ELT (arg0, idx + i));
13091 		  return vals.build ();
13092 		}
13093 	    }
13094 	}
13095 
13096       /* On constants we can use native encode/interpret to constant
13097          fold (nearly) all BIT_FIELD_REFs.  */
13098       if (CONSTANT_CLASS_P (arg0)
13099 	  && can_native_interpret_type_p (type)
13100 	  && BITS_PER_UNIT == 8
13101 	  && tree_fits_uhwi_p (op1)
13102 	  && tree_fits_uhwi_p (op2))
13103 	{
13104 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13105 	  unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13106 	  /* Limit us to a reasonable amount of work.  To relax the
13107 	     other limitations we need bit-shifting of the buffer
13108 	     and rounding up the size.  */
13109 	  if (bitpos % BITS_PER_UNIT == 0
13110 	      && bitsize % BITS_PER_UNIT == 0
13111 	      && bitsize <= MAX_BITSIZE_MODE_ANY_MODE)
13112 	    {
13113 	      unsigned char b[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
13114 	      unsigned HOST_WIDE_INT len
13115 		= native_encode_expr (arg0, b, bitsize / BITS_PER_UNIT,
13116 				      bitpos / BITS_PER_UNIT);
13117 	      if (len > 0
13118 		  && len * BITS_PER_UNIT >= bitsize)
13119 		{
13120 		  tree v = native_interpret_expr (type, b,
13121 						  bitsize / BITS_PER_UNIT);
13122 		  if (v)
13123 		    return v;
13124 		}
13125 	    }
13126 	}
13127 
13128       return NULL_TREE;
13129 
13130     case VEC_PERM_EXPR:
13131       /* Perform constant folding of BIT_INSERT_EXPR.  */
13132       if (TREE_CODE (arg2) == VECTOR_CST
13133 	  && TREE_CODE (op0) == VECTOR_CST
13134 	  && TREE_CODE (op1) == VECTOR_CST)
13135 	{
13136 	  /* Build a vector of integers from the tree mask.  */
13137 	  vec_perm_builder builder;
13138 	  if (!tree_to_vec_perm_builder (&builder, arg2))
13139 	    return NULL_TREE;
13140 
13141 	  /* Create a vec_perm_indices for the integer vector.  */
13142 	  poly_uint64 nelts = TYPE_VECTOR_SUBPARTS (type);
13143 	  bool single_arg = (op0 == op1);
13144 	  vec_perm_indices sel (builder, single_arg ? 1 : 2, nelts);
13145 	  return fold_vec_perm (type, op0, op1, sel);
13146 	}
13147       return NULL_TREE;
13148 
13149     case BIT_INSERT_EXPR:
13150       /* Perform (partial) constant folding of BIT_INSERT_EXPR.  */
13151       if (TREE_CODE (arg0) == INTEGER_CST
13152 	  && TREE_CODE (arg1) == INTEGER_CST)
13153 	{
13154 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13155 	  unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1));
13156 	  wide_int tem = (wi::to_wide (arg0)
13157 			  & wi::shifted_mask (bitpos, bitsize, true,
13158 					      TYPE_PRECISION (type)));
13159 	  wide_int tem2
13160 	    = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)),
13161 				    bitsize), bitpos);
13162 	  return wide_int_to_tree (type, wi::bit_or (tem, tem2));
13163 	}
13164       else if (TREE_CODE (arg0) == VECTOR_CST
13165 	       && CONSTANT_CLASS_P (arg1)
13166 	       && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)),
13167 				      TREE_TYPE (arg1)))
13168 	{
13169 	  unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13170 	  unsigned HOST_WIDE_INT elsize
13171 	    = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1)));
13172 	  if (bitpos % elsize == 0)
13173 	    {
13174 	      unsigned k = bitpos / elsize;
13175 	      unsigned HOST_WIDE_INT nelts;
13176 	      if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0))
13177 		return arg0;
13178 	      else if (VECTOR_CST_NELTS (arg0).is_constant (&nelts))
13179 		{
13180 		  tree_vector_builder elts (type, nelts, 1);
13181 		  elts.quick_grow (nelts);
13182 		  for (unsigned HOST_WIDE_INT i = 0; i < nelts; ++i)
13183 		    elts[i] = (i == k ? arg1 : VECTOR_CST_ELT (arg0, i));
13184 		  return elts.build ();
13185 		}
13186 	    }
13187 	}
13188       return NULL_TREE;
13189 
13190     default:
13191       return NULL_TREE;
13192     } /* switch (code) */
13193 }
13194 
13195 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
13196    of an array (or vector).  *CTOR_IDX if non-NULL is updated with the
13197    constructor element index of the value returned.  If the element is
13198    not found NULL_TREE is returned and *CTOR_IDX is updated to
13199    the index of the element after the ACCESS_INDEX position (which
13200    may be outside of the CTOR array).  */
13201 
13202 tree
get_array_ctor_element_at_index(tree ctor,offset_int access_index,unsigned * ctor_idx)13203 get_array_ctor_element_at_index (tree ctor, offset_int access_index,
13204 				 unsigned *ctor_idx)
13205 {
13206   tree index_type = NULL_TREE;
13207   signop index_sgn = UNSIGNED;
13208   offset_int low_bound = 0;
13209 
13210   if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
13211     {
13212       tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
13213       if (domain_type && TYPE_MIN_VALUE (domain_type))
13214 	{
13215 	  /* Static constructors for variably sized objects makes no sense.  */
13216 	  gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
13217 	  index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
13218 	  /* ???  When it is obvious that the range is signed, treat it so.  */
13219 	  if (TYPE_UNSIGNED (index_type)
13220 	      && TYPE_MAX_VALUE (domain_type)
13221 	      && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type),
13222 				  TYPE_MIN_VALUE (domain_type)))
13223 	    {
13224 	      index_sgn = SIGNED;
13225 	      low_bound
13226 		= offset_int::from (wi::to_wide (TYPE_MIN_VALUE (domain_type)),
13227 				    SIGNED);
13228 	    }
13229 	  else
13230 	    {
13231 	      index_sgn = TYPE_SIGN (index_type);
13232 	      low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
13233 	    }
13234 	}
13235     }
13236 
13237   if (index_type)
13238     access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
13239 			    index_sgn);
13240 
13241   offset_int index = low_bound;
13242   if (index_type)
13243     index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13244 
13245   offset_int max_index = index;
13246   unsigned cnt;
13247   tree cfield, cval;
13248   bool first_p = true;
13249 
13250   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
13251     {
13252       /* Array constructor might explicitly set index, or specify a range,
13253 	 or leave index NULL meaning that it is next index after previous
13254 	 one.  */
13255       if (cfield)
13256 	{
13257 	  if (TREE_CODE (cfield) == INTEGER_CST)
13258 	    max_index = index
13259 	      = offset_int::from (wi::to_wide (cfield), index_sgn);
13260 	  else
13261 	    {
13262 	      gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
13263 	      index = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 0)),
13264 					index_sgn);
13265 	      max_index
13266 	        = offset_int::from (wi::to_wide (TREE_OPERAND (cfield, 1)),
13267 				    index_sgn);
13268 	      gcc_checking_assert (wi::le_p (index, max_index, index_sgn));
13269 	    }
13270 	}
13271       else if (!first_p)
13272 	{
13273 	  index = max_index + 1;
13274 	  if (index_type)
13275 	    index = wi::ext (index, TYPE_PRECISION (index_type), index_sgn);
13276 	  gcc_checking_assert (wi::gt_p (index, max_index, index_sgn));
13277 	  max_index = index;
13278 	}
13279       else
13280 	first_p = false;
13281 
13282       /* Do we have match?  */
13283       if (wi::cmp (access_index, index, index_sgn) >= 0)
13284 	{
13285 	  if (wi::cmp (access_index, max_index, index_sgn) <= 0)
13286 	    {
13287 	      if (ctor_idx)
13288 		*ctor_idx = cnt;
13289 	      return cval;
13290 	    }
13291 	}
13292       else if (in_gimple_form)
13293 	/* We're past the element we search for.  Note during parsing
13294 	   the elements might not be sorted.
13295 	   ???  We should use a binary search and a flag on the
13296 	   CONSTRUCTOR as to whether elements are sorted in declaration
13297 	   order.  */
13298 	break;
13299     }
13300   if (ctor_idx)
13301     *ctor_idx = cnt;
13302   return NULL_TREE;
13303 }
13304 
13305 /* Perform constant folding and related simplification of EXPR.
13306    The related simplifications include x*1 => x, x*0 => 0, etc.,
13307    and application of the associative law.
13308    NOP_EXPR conversions may be removed freely (as long as we
13309    are careful not to change the type of the overall expression).
13310    We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13311    but we can constant-fold them if they have constant operands.  */
13312 
13313 #ifdef ENABLE_FOLD_CHECKING
13314 # define fold(x) fold_1 (x)
13315 static tree fold_1 (tree);
13316 static
13317 #endif
13318 tree
fold(tree expr)13319 fold (tree expr)
13320 {
13321   const tree t = expr;
13322   enum tree_code code = TREE_CODE (t);
13323   enum tree_code_class kind = TREE_CODE_CLASS (code);
13324   tree tem;
13325   location_t loc = EXPR_LOCATION (expr);
13326 
13327   /* Return right away if a constant.  */
13328   if (kind == tcc_constant)
13329     return t;
13330 
13331   /* CALL_EXPR-like objects with variable numbers of operands are
13332      treated specially.  */
13333   if (kind == tcc_vl_exp)
13334     {
13335       if (code == CALL_EXPR)
13336 	{
13337 	  tem = fold_call_expr (loc, expr, false);
13338 	  return tem ? tem : expr;
13339 	}
13340       return expr;
13341     }
13342 
13343   if (IS_EXPR_CODE_CLASS (kind))
13344     {
13345       tree type = TREE_TYPE (t);
13346       tree op0, op1, op2;
13347 
13348       switch (TREE_CODE_LENGTH (code))
13349 	{
13350 	case 1:
13351 	  op0 = TREE_OPERAND (t, 0);
13352 	  tem = fold_unary_loc (loc, code, type, op0);
13353 	  return tem ? tem : expr;
13354 	case 2:
13355 	  op0 = TREE_OPERAND (t, 0);
13356 	  op1 = TREE_OPERAND (t, 1);
13357 	  tem = fold_binary_loc (loc, code, type, op0, op1);
13358 	  return tem ? tem : expr;
13359 	case 3:
13360 	  op0 = TREE_OPERAND (t, 0);
13361 	  op1 = TREE_OPERAND (t, 1);
13362 	  op2 = TREE_OPERAND (t, 2);
13363 	  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13364 	  return tem ? tem : expr;
13365 	default:
13366 	  break;
13367 	}
13368     }
13369 
13370   switch (code)
13371     {
13372     case ARRAY_REF:
13373       {
13374 	tree op0 = TREE_OPERAND (t, 0);
13375 	tree op1 = TREE_OPERAND (t, 1);
13376 
13377 	if (TREE_CODE (op1) == INTEGER_CST
13378 	    && TREE_CODE (op0) == CONSTRUCTOR
13379 	    && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13380 	  {
13381 	    tree val = get_array_ctor_element_at_index (op0,
13382 							wi::to_offset (op1));
13383 	    if (val)
13384 	      return val;
13385 	  }
13386 
13387 	return t;
13388       }
13389 
13390       /* Return a VECTOR_CST if possible.  */
13391     case CONSTRUCTOR:
13392       {
13393 	tree type = TREE_TYPE (t);
13394 	if (TREE_CODE (type) != VECTOR_TYPE)
13395 	  return t;
13396 
13397 	unsigned i;
13398 	tree val;
13399 	FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
13400 	  if (! CONSTANT_CLASS_P (val))
13401 	    return t;
13402 
13403 	return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
13404       }
13405 
13406     case CONST_DECL:
13407       return fold (DECL_INITIAL (t));
13408 
13409     default:
13410       return t;
13411     } /* switch (code) */
13412 }
13413 
13414 #ifdef ENABLE_FOLD_CHECKING
13415 #undef fold
13416 
13417 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13418 				hash_table<nofree_ptr_hash<const tree_node> > *);
13419 static void fold_check_failed (const_tree, const_tree);
13420 void print_fold_checksum (const_tree);
13421 
13422 /* When --enable-checking=fold, compute a digest of expr before
13423    and after actual fold call to see if fold did not accidentally
13424    change original expr.  */
13425 
13426 tree
fold(tree expr)13427 fold (tree expr)
13428 {
13429   tree ret;
13430   struct md5_ctx ctx;
13431   unsigned char checksum_before[16], checksum_after[16];
13432   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13433 
13434   md5_init_ctx (&ctx);
13435   fold_checksum_tree (expr, &ctx, &ht);
13436   md5_finish_ctx (&ctx, checksum_before);
13437   ht.empty ();
13438 
13439   ret = fold_1 (expr);
13440 
13441   md5_init_ctx (&ctx);
13442   fold_checksum_tree (expr, &ctx, &ht);
13443   md5_finish_ctx (&ctx, checksum_after);
13444 
13445   if (memcmp (checksum_before, checksum_after, 16))
13446     fold_check_failed (expr, ret);
13447 
13448   return ret;
13449 }
13450 
13451 void
print_fold_checksum(const_tree expr)13452 print_fold_checksum (const_tree expr)
13453 {
13454   struct md5_ctx ctx;
13455   unsigned char checksum[16], cnt;
13456   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13457 
13458   md5_init_ctx (&ctx);
13459   fold_checksum_tree (expr, &ctx, &ht);
13460   md5_finish_ctx (&ctx, checksum);
13461   for (cnt = 0; cnt < 16; ++cnt)
13462     fprintf (stderr, "%02x", checksum[cnt]);
13463   putc ('\n', stderr);
13464 }
13465 
13466 static void
fold_check_failed(const_tree expr ATTRIBUTE_UNUSED,const_tree ret ATTRIBUTE_UNUSED)13467 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13468 {
13469   internal_error ("fold check: original tree changed by fold");
13470 }
13471 
13472 static void
fold_checksum_tree(const_tree expr,struct md5_ctx * ctx,hash_table<nofree_ptr_hash<const tree_node>> * ht)13473 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
13474 		    hash_table<nofree_ptr_hash <const tree_node> > *ht)
13475 {
13476   const tree_node **slot;
13477   enum tree_code code;
13478   union tree_node *buf;
13479   int i, len;
13480 
13481  recursive_label:
13482   if (expr == NULL)
13483     return;
13484   slot = ht->find_slot (expr, INSERT);
13485   if (*slot != NULL)
13486     return;
13487   *slot = expr;
13488   code = TREE_CODE (expr);
13489   if (TREE_CODE_CLASS (code) == tcc_declaration
13490       && HAS_DECL_ASSEMBLER_NAME_P (expr))
13491     {
13492       /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified.  */
13493       size_t sz = tree_size (expr);
13494       buf = XALLOCAVAR (union tree_node, sz);
13495       memcpy ((char *) buf, expr, sz);
13496       SET_DECL_ASSEMBLER_NAME ((tree) buf, NULL);
13497       buf->decl_with_vis.symtab_node = NULL;
13498       buf->base.nowarning_flag = 0;
13499       expr = (tree) buf;
13500     }
13501   else if (TREE_CODE_CLASS (code) == tcc_type
13502 	   && (TYPE_POINTER_TO (expr)
13503 	       || TYPE_REFERENCE_TO (expr)
13504 	       || TYPE_CACHED_VALUES_P (expr)
13505 	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13506 	       || TYPE_NEXT_VARIANT (expr)
13507 	       || TYPE_ALIAS_SET_KNOWN_P (expr)))
13508     {
13509       /* Allow these fields to be modified.  */
13510       tree tmp;
13511       size_t sz = tree_size (expr);
13512       buf = XALLOCAVAR (union tree_node, sz);
13513       memcpy ((char *) buf, expr, sz);
13514       expr = tmp = (tree) buf;
13515       TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13516       TYPE_POINTER_TO (tmp) = NULL;
13517       TYPE_REFERENCE_TO (tmp) = NULL;
13518       TYPE_NEXT_VARIANT (tmp) = NULL;
13519       TYPE_ALIAS_SET (tmp) = -1;
13520       if (TYPE_CACHED_VALUES_P (tmp))
13521 	{
13522 	  TYPE_CACHED_VALUES_P (tmp) = 0;
13523 	  TYPE_CACHED_VALUES (tmp) = NULL;
13524 	}
13525     }
13526   else if (TREE_NO_WARNING (expr) && (DECL_P (expr) || EXPR_P (expr)))
13527     {
13528       /* Allow TREE_NO_WARNING to be set.  Perhaps we shouldn't allow that
13529 	 and change builtins.c etc. instead - see PR89543.  */
13530       size_t sz = tree_size (expr);
13531       buf = XALLOCAVAR (union tree_node, sz);
13532       memcpy ((char *) buf, expr, sz);
13533       buf->base.nowarning_flag = 0;
13534       expr = (tree) buf;
13535     }
13536   md5_process_bytes (expr, tree_size (expr), ctx);
13537   if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
13538     fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13539   if (TREE_CODE_CLASS (code) != tcc_type
13540       && TREE_CODE_CLASS (code) != tcc_declaration
13541       && code != TREE_LIST
13542       && code != SSA_NAME
13543       && CODE_CONTAINS_STRUCT (code, TS_COMMON))
13544     fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13545   switch (TREE_CODE_CLASS (code))
13546     {
13547     case tcc_constant:
13548       switch (code)
13549 	{
13550 	case STRING_CST:
13551 	  md5_process_bytes (TREE_STRING_POINTER (expr),
13552 			     TREE_STRING_LENGTH (expr), ctx);
13553 	  break;
13554 	case COMPLEX_CST:
13555 	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13556 	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13557 	  break;
13558 	case VECTOR_CST:
13559 	  len = vector_cst_encoded_nelts (expr);
13560 	  for (i = 0; i < len; ++i)
13561 	    fold_checksum_tree (VECTOR_CST_ENCODED_ELT (expr, i), ctx, ht);
13562 	  break;
13563 	default:
13564 	  break;
13565 	}
13566       break;
13567     case tcc_exceptional:
13568       switch (code)
13569 	{
13570 	case TREE_LIST:
13571 	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13572 	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13573 	  expr = TREE_CHAIN (expr);
13574 	  goto recursive_label;
13575 	  break;
13576 	case TREE_VEC:
13577 	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13578 	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13579 	  break;
13580 	default:
13581 	  break;
13582 	}
13583       break;
13584     case tcc_expression:
13585     case tcc_reference:
13586     case tcc_comparison:
13587     case tcc_unary:
13588     case tcc_binary:
13589     case tcc_statement:
13590     case tcc_vl_exp:
13591       len = TREE_OPERAND_LENGTH (expr);
13592       for (i = 0; i < len; ++i)
13593 	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13594       break;
13595     case tcc_declaration:
13596       fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13597       fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13598       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13599 	{
13600 	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13601 	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13602 	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13603 	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13604 	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13605 	}
13606 
13607       if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13608 	{
13609 	  if (TREE_CODE (expr) == FUNCTION_DECL)
13610 	    {
13611 	      fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13612 	      fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
13613 	    }
13614 	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13615 	}
13616       break;
13617     case tcc_type:
13618       if (TREE_CODE (expr) == ENUMERAL_TYPE)
13619         fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13620       fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13621       fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13622       fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13623       fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13624       if (INTEGRAL_TYPE_P (expr)
13625           || SCALAR_FLOAT_TYPE_P (expr))
13626 	{
13627 	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13628 	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13629 	}
13630       fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13631       if (TREE_CODE (expr) == RECORD_TYPE
13632 	  || TREE_CODE (expr) == UNION_TYPE
13633 	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
13634 	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13635       fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13636       break;
13637     default:
13638       break;
13639     }
13640 }
13641 
13642 /* Helper function for outputting the checksum of a tree T.  When
13643    debugging with gdb, you can "define mynext" to be "next" followed
13644    by "call debug_fold_checksum (op0)", then just trace down till the
13645    outputs differ.  */
13646 
13647 DEBUG_FUNCTION void
debug_fold_checksum(const_tree t)13648 debug_fold_checksum (const_tree t)
13649 {
13650   int i;
13651   unsigned char checksum[16];
13652   struct md5_ctx ctx;
13653   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13654 
13655   md5_init_ctx (&ctx);
13656   fold_checksum_tree (t, &ctx, &ht);
13657   md5_finish_ctx (&ctx, checksum);
13658   ht.empty ();
13659 
13660   for (i = 0; i < 16; i++)
13661     fprintf (stderr, "%d ", checksum[i]);
13662 
13663   fprintf (stderr, "\n");
13664 }
13665 
13666 #endif
13667 
13668 /* Fold a unary tree expression with code CODE of type TYPE with an
13669    operand OP0.  LOC is the location of the resulting expression.
13670    Return a folded expression if successful.  Otherwise, return a tree
13671    expression with code CODE of type TYPE with an operand OP0.  */
13672 
13673 tree
fold_build1_loc(location_t loc,enum tree_code code,tree type,tree op0 MEM_STAT_DECL)13674 fold_build1_loc (location_t loc,
13675 		 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13676 {
13677   tree tem;
13678 #ifdef ENABLE_FOLD_CHECKING
13679   unsigned char checksum_before[16], checksum_after[16];
13680   struct md5_ctx ctx;
13681   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13682 
13683   md5_init_ctx (&ctx);
13684   fold_checksum_tree (op0, &ctx, &ht);
13685   md5_finish_ctx (&ctx, checksum_before);
13686   ht.empty ();
13687 #endif
13688 
13689   tem = fold_unary_loc (loc, code, type, op0);
13690   if (!tem)
13691     tem = build1_loc (loc, code, type, op0 PASS_MEM_STAT);
13692 
13693 #ifdef ENABLE_FOLD_CHECKING
13694   md5_init_ctx (&ctx);
13695   fold_checksum_tree (op0, &ctx, &ht);
13696   md5_finish_ctx (&ctx, checksum_after);
13697 
13698   if (memcmp (checksum_before, checksum_after, 16))
13699     fold_check_failed (op0, tem);
13700 #endif
13701   return tem;
13702 }
13703 
13704 /* Fold a binary tree expression with code CODE of type TYPE with
13705    operands OP0 and OP1.  LOC is the location of the resulting
13706    expression.  Return a folded expression if successful.  Otherwise,
13707    return a tree expression with code CODE of type TYPE with operands
13708    OP0 and OP1.  */
13709 
13710 tree
fold_build2_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1 MEM_STAT_DECL)13711 fold_build2_loc (location_t loc,
13712 		      enum tree_code code, tree type, tree op0, tree op1
13713 		      MEM_STAT_DECL)
13714 {
13715   tree tem;
13716 #ifdef ENABLE_FOLD_CHECKING
13717   unsigned char checksum_before_op0[16],
13718                 checksum_before_op1[16],
13719 		checksum_after_op0[16],
13720 		checksum_after_op1[16];
13721   struct md5_ctx ctx;
13722   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13723 
13724   md5_init_ctx (&ctx);
13725   fold_checksum_tree (op0, &ctx, &ht);
13726   md5_finish_ctx (&ctx, checksum_before_op0);
13727   ht.empty ();
13728 
13729   md5_init_ctx (&ctx);
13730   fold_checksum_tree (op1, &ctx, &ht);
13731   md5_finish_ctx (&ctx, checksum_before_op1);
13732   ht.empty ();
13733 #endif
13734 
13735   tem = fold_binary_loc (loc, code, type, op0, op1);
13736   if (!tem)
13737     tem = build2_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
13738 
13739 #ifdef ENABLE_FOLD_CHECKING
13740   md5_init_ctx (&ctx);
13741   fold_checksum_tree (op0, &ctx, &ht);
13742   md5_finish_ctx (&ctx, checksum_after_op0);
13743   ht.empty ();
13744 
13745   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13746     fold_check_failed (op0, tem);
13747 
13748   md5_init_ctx (&ctx);
13749   fold_checksum_tree (op1, &ctx, &ht);
13750   md5_finish_ctx (&ctx, checksum_after_op1);
13751 
13752   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13753     fold_check_failed (op1, tem);
13754 #endif
13755   return tem;
13756 }
13757 
13758 /* Fold a ternary tree expression with code CODE of type TYPE with
13759    operands OP0, OP1, and OP2.  Return a folded expression if
13760    successful.  Otherwise, return a tree expression with code CODE of
13761    type TYPE with operands OP0, OP1, and OP2.  */
13762 
13763 tree
fold_build3_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1,tree op2 MEM_STAT_DECL)13764 fold_build3_loc (location_t loc, enum tree_code code, tree type,
13765 		      tree op0, tree op1, tree op2 MEM_STAT_DECL)
13766 {
13767   tree tem;
13768 #ifdef ENABLE_FOLD_CHECKING
13769   unsigned char checksum_before_op0[16],
13770                 checksum_before_op1[16],
13771                 checksum_before_op2[16],
13772 		checksum_after_op0[16],
13773 		checksum_after_op1[16],
13774 		checksum_after_op2[16];
13775   struct md5_ctx ctx;
13776   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13777 
13778   md5_init_ctx (&ctx);
13779   fold_checksum_tree (op0, &ctx, &ht);
13780   md5_finish_ctx (&ctx, checksum_before_op0);
13781   ht.empty ();
13782 
13783   md5_init_ctx (&ctx);
13784   fold_checksum_tree (op1, &ctx, &ht);
13785   md5_finish_ctx (&ctx, checksum_before_op1);
13786   ht.empty ();
13787 
13788   md5_init_ctx (&ctx);
13789   fold_checksum_tree (op2, &ctx, &ht);
13790   md5_finish_ctx (&ctx, checksum_before_op2);
13791   ht.empty ();
13792 #endif
13793 
13794   gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13795   tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13796   if (!tem)
13797     tem = build3_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
13798 
13799 #ifdef ENABLE_FOLD_CHECKING
13800   md5_init_ctx (&ctx);
13801   fold_checksum_tree (op0, &ctx, &ht);
13802   md5_finish_ctx (&ctx, checksum_after_op0);
13803   ht.empty ();
13804 
13805   if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13806     fold_check_failed (op0, tem);
13807 
13808   md5_init_ctx (&ctx);
13809   fold_checksum_tree (op1, &ctx, &ht);
13810   md5_finish_ctx (&ctx, checksum_after_op1);
13811   ht.empty ();
13812 
13813   if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13814     fold_check_failed (op1, tem);
13815 
13816   md5_init_ctx (&ctx);
13817   fold_checksum_tree (op2, &ctx, &ht);
13818   md5_finish_ctx (&ctx, checksum_after_op2);
13819 
13820   if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13821     fold_check_failed (op2, tem);
13822 #endif
13823   return tem;
13824 }
13825 
13826 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13827    arguments in ARGARRAY, and a null static chain.
13828    Return a folded expression if successful.  Otherwise, return a CALL_EXPR
13829    of type TYPE from the given operands as constructed by build_call_array.  */
13830 
13831 tree
fold_build_call_array_loc(location_t loc,tree type,tree fn,int nargs,tree * argarray)13832 fold_build_call_array_loc (location_t loc, tree type, tree fn,
13833 			   int nargs, tree *argarray)
13834 {
13835   tree tem;
13836 #ifdef ENABLE_FOLD_CHECKING
13837   unsigned char checksum_before_fn[16],
13838                 checksum_before_arglist[16],
13839 		checksum_after_fn[16],
13840 		checksum_after_arglist[16];
13841   struct md5_ctx ctx;
13842   hash_table<nofree_ptr_hash<const tree_node> > ht (32);
13843   int i;
13844 
13845   md5_init_ctx (&ctx);
13846   fold_checksum_tree (fn, &ctx, &ht);
13847   md5_finish_ctx (&ctx, checksum_before_fn);
13848   ht.empty ();
13849 
13850   md5_init_ctx (&ctx);
13851   for (i = 0; i < nargs; i++)
13852     fold_checksum_tree (argarray[i], &ctx, &ht);
13853   md5_finish_ctx (&ctx, checksum_before_arglist);
13854   ht.empty ();
13855 #endif
13856 
13857   tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
13858   if (!tem)
13859     tem = build_call_array_loc (loc, type, fn, nargs, argarray);
13860 
13861 #ifdef ENABLE_FOLD_CHECKING
13862   md5_init_ctx (&ctx);
13863   fold_checksum_tree (fn, &ctx, &ht);
13864   md5_finish_ctx (&ctx, checksum_after_fn);
13865   ht.empty ();
13866 
13867   if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13868     fold_check_failed (fn, tem);
13869 
13870   md5_init_ctx (&ctx);
13871   for (i = 0; i < nargs; i++)
13872     fold_checksum_tree (argarray[i], &ctx, &ht);
13873   md5_finish_ctx (&ctx, checksum_after_arglist);
13874 
13875   if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13876     fold_check_failed (NULL_TREE, tem);
13877 #endif
13878   return tem;
13879 }
13880 
13881 /* Perform constant folding and related simplification of initializer
13882    expression EXPR.  These behave identically to "fold_buildN" but ignore
13883    potential run-time traps and exceptions that fold must preserve.  */
13884 
13885 #define START_FOLD_INIT \
13886   int saved_signaling_nans = flag_signaling_nans;\
13887   int saved_trapping_math = flag_trapping_math;\
13888   int saved_rounding_math = flag_rounding_math;\
13889   int saved_trapv = flag_trapv;\
13890   int saved_folding_initializer = folding_initializer;\
13891   flag_signaling_nans = 0;\
13892   flag_trapping_math = 0;\
13893   flag_rounding_math = 0;\
13894   flag_trapv = 0;\
13895   folding_initializer = 1;
13896 
13897 #define END_FOLD_INIT \
13898   flag_signaling_nans = saved_signaling_nans;\
13899   flag_trapping_math = saved_trapping_math;\
13900   flag_rounding_math = saved_rounding_math;\
13901   flag_trapv = saved_trapv;\
13902   folding_initializer = saved_folding_initializer;
13903 
13904 tree
fold_build1_initializer_loc(location_t loc,enum tree_code code,tree type,tree op)13905 fold_build1_initializer_loc (location_t loc, enum tree_code code,
13906 			     tree type, tree op)
13907 {
13908   tree result;
13909   START_FOLD_INIT;
13910 
13911   result = fold_build1_loc (loc, code, type, op);
13912 
13913   END_FOLD_INIT;
13914   return result;
13915 }
13916 
13917 tree
fold_build2_initializer_loc(location_t loc,enum tree_code code,tree type,tree op0,tree op1)13918 fold_build2_initializer_loc (location_t loc, enum tree_code code,
13919 			     tree type, tree op0, tree op1)
13920 {
13921   tree result;
13922   START_FOLD_INIT;
13923 
13924   result = fold_build2_loc (loc, code, type, op0, op1);
13925 
13926   END_FOLD_INIT;
13927   return result;
13928 }
13929 
13930 tree
fold_build_call_array_initializer_loc(location_t loc,tree type,tree fn,int nargs,tree * argarray)13931 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
13932 				       int nargs, tree *argarray)
13933 {
13934   tree result;
13935   START_FOLD_INIT;
13936 
13937   result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
13938 
13939   END_FOLD_INIT;
13940   return result;
13941 }
13942 
13943 #undef START_FOLD_INIT
13944 #undef END_FOLD_INIT
13945 
13946 /* Determine if first argument is a multiple of second argument.  Return 0 if
13947    it is not, or we cannot easily determined it to be.
13948 
13949    An example of the sort of thing we care about (at this point; this routine
13950    could surely be made more general, and expanded to do what the *_DIV_EXPR's
13951    fold cases do now) is discovering that
13952 
13953      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13954 
13955    is a multiple of
13956 
13957      SAVE_EXPR (J * 8)
13958 
13959    when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13960 
13961    This code also handles discovering that
13962 
13963      SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13964 
13965    is a multiple of 8 so we don't have to worry about dealing with a
13966    possible remainder.
13967 
13968    Note that we *look* inside a SAVE_EXPR only to determine how it was
13969    calculated; it is not safe for fold to do much of anything else with the
13970    internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13971    at run time.  For example, the latter example above *cannot* be implemented
13972    as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13973    evaluation time of the original SAVE_EXPR is not necessarily the same at
13974    the time the new expression is evaluated.  The only optimization of this
13975    sort that would be valid is changing
13976 
13977      SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13978 
13979    divided by 8 to
13980 
13981      SAVE_EXPR (I) * SAVE_EXPR (J)
13982 
13983    (where the same SAVE_EXPR (J) is used in the original and the
13984    transformed version).  */
13985 
13986 int
multiple_of_p(tree type,const_tree top,const_tree bottom)13987 multiple_of_p (tree type, const_tree top, const_tree bottom)
13988 {
13989   gimple *stmt;
13990   tree t1, op1, op2;
13991 
13992   if (operand_equal_p (top, bottom, 0))
13993     return 1;
13994 
13995   if (TREE_CODE (type) != INTEGER_TYPE)
13996     return 0;
13997 
13998   switch (TREE_CODE (top))
13999     {
14000     case BIT_AND_EXPR:
14001       /* Bitwise and provides a power of two multiple.  If the mask is
14002 	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
14003       if (!integer_pow2p (bottom))
14004 	return 0;
14005       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14006 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14007 
14008     case MULT_EXPR:
14009       if (TREE_CODE (bottom) == INTEGER_CST)
14010 	{
14011 	  op1 = TREE_OPERAND (top, 0);
14012 	  op2 = TREE_OPERAND (top, 1);
14013 	  if (TREE_CODE (op1) == INTEGER_CST)
14014 	    std::swap (op1, op2);
14015 	  if (TREE_CODE (op2) == INTEGER_CST)
14016 	    {
14017 	      if (multiple_of_p (type, op2, bottom))
14018 		return 1;
14019 	      /* Handle multiple_of_p ((x * 2 + 2) * 4, 8).  */
14020 	      if (multiple_of_p (type, bottom, op2))
14021 		{
14022 		  widest_int w = wi::sdiv_trunc (wi::to_widest (bottom),
14023 						 wi::to_widest (op2));
14024 		  if (wi::fits_to_tree_p (w, TREE_TYPE (bottom)))
14025 		    {
14026 		      op2 = wide_int_to_tree (TREE_TYPE (bottom), w);
14027 		      return multiple_of_p (type, op1, op2);
14028 		    }
14029 		}
14030 	      return multiple_of_p (type, op1, bottom);
14031 	    }
14032 	}
14033       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14034 	      || multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14035 
14036     case MINUS_EXPR:
14037       /* It is impossible to prove if op0 - op1 is multiple of bottom
14038 	 precisely, so be conservative here checking if both op0 and op1
14039 	 are multiple of bottom.  Note we check the second operand first
14040 	 since it's usually simpler.  */
14041       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14042 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14043 
14044     case PLUS_EXPR:
14045       /* The same as MINUS_EXPR, but handle cases like op0 + 0xfffffffd
14046 	 as op0 - 3 if the expression has unsigned type.  For example,
14047 	 (X / 3) + 0xfffffffd is multiple of 3, but 0xfffffffd is not.  */
14048       op1 = TREE_OPERAND (top, 1);
14049       if (TYPE_UNSIGNED (type)
14050 	  && TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sign_bit (op1))
14051 	op1 = fold_build1 (NEGATE_EXPR, type, op1);
14052       return (multiple_of_p (type, op1, bottom)
14053 	      && multiple_of_p (type, TREE_OPERAND (top, 0), bottom));
14054 
14055     case LSHIFT_EXPR:
14056       if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14057 	{
14058 	  op1 = TREE_OPERAND (top, 1);
14059 	  /* const_binop may not detect overflow correctly,
14060 	     so check for it explicitly here.  */
14061 	  if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)),
14062 			 wi::to_wide (op1))
14063 	      && (t1 = fold_convert (type,
14064 				     const_binop (LSHIFT_EXPR, size_one_node,
14065 						  op1))) != 0
14066 	      && !TREE_OVERFLOW (t1))
14067 	    return multiple_of_p (type, t1, bottom);
14068 	}
14069       return 0;
14070 
14071     case NOP_EXPR:
14072       /* Can't handle conversions from non-integral or wider integral type.  */
14073       if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14074 	  || (TYPE_PRECISION (type)
14075 	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14076 	return 0;
14077 
14078       /* fall through */
14079 
14080     case SAVE_EXPR:
14081       return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14082 
14083     case COND_EXPR:
14084       return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14085 	      && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14086 
14087     case INTEGER_CST:
14088       if (TREE_CODE (bottom) != INTEGER_CST
14089 	  || integer_zerop (bottom)
14090 	  || (TYPE_UNSIGNED (type)
14091 	      && (tree_int_cst_sgn (top) < 0
14092 		  || tree_int_cst_sgn (bottom) < 0)))
14093 	return 0;
14094       return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14095 				SIGNED);
14096 
14097     case SSA_NAME:
14098       if (TREE_CODE (bottom) == INTEGER_CST
14099 	  && (stmt = SSA_NAME_DEF_STMT (top)) != NULL
14100 	  && gimple_code (stmt) == GIMPLE_ASSIGN)
14101 	{
14102 	  enum tree_code code = gimple_assign_rhs_code (stmt);
14103 
14104 	  /* Check for special cases to see if top is defined as multiple
14105 	     of bottom:
14106 
14107 	       top = (X & ~(bottom - 1) ; bottom is power of 2
14108 
14109 	     or
14110 
14111 	       Y = X % bottom
14112 	       top = X - Y.  */
14113 	  if (code == BIT_AND_EXPR
14114 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14115 	      && TREE_CODE (op2) == INTEGER_CST
14116 	      && integer_pow2p (bottom)
14117 	      && wi::multiple_of_p (wi::to_widest (op2),
14118 				    wi::to_widest (bottom), UNSIGNED))
14119 	    return 1;
14120 
14121 	  op1 = gimple_assign_rhs1 (stmt);
14122 	  if (code == MINUS_EXPR
14123 	      && (op2 = gimple_assign_rhs2 (stmt)) != NULL_TREE
14124 	      && TREE_CODE (op2) == SSA_NAME
14125 	      && (stmt = SSA_NAME_DEF_STMT (op2)) != NULL
14126 	      && gimple_code (stmt) == GIMPLE_ASSIGN
14127 	      && (code = gimple_assign_rhs_code (stmt)) == TRUNC_MOD_EXPR
14128 	      && operand_equal_p (op1, gimple_assign_rhs1 (stmt), 0)
14129 	      && operand_equal_p (bottom, gimple_assign_rhs2 (stmt), 0))
14130 	    return 1;
14131 	}
14132 
14133       /* fall through */
14134 
14135     default:
14136       if (POLY_INT_CST_P (top) && poly_int_tree_p (bottom))
14137 	return multiple_p (wi::to_poly_widest (top),
14138 			   wi::to_poly_widest (bottom));
14139 
14140       return 0;
14141     }
14142 }
14143 
14144 /* Return true if expression X cannot be (or contain) a NaN or infinity.
14145    This function returns true for integer expressions, and returns
14146    false if uncertain.  */
14147 
14148 bool
tree_expr_finite_p(const_tree x)14149 tree_expr_finite_p (const_tree x)
14150 {
14151   machine_mode mode = element_mode (x);
14152   if (!HONOR_NANS (mode) && !HONOR_INFINITIES (mode))
14153     return true;
14154   switch (TREE_CODE (x))
14155     {
14156     case REAL_CST:
14157       return real_isfinite (TREE_REAL_CST_PTR (x));
14158     case COMPLEX_CST:
14159       return tree_expr_finite_p (TREE_REALPART (x))
14160 	     && tree_expr_finite_p (TREE_IMAGPART (x));
14161     case FLOAT_EXPR:
14162       return true;
14163     case ABS_EXPR:
14164     case CONVERT_EXPR:
14165     case NON_LVALUE_EXPR:
14166     case NEGATE_EXPR:
14167     case SAVE_EXPR:
14168       return tree_expr_finite_p (TREE_OPERAND (x, 0));
14169     case MIN_EXPR:
14170     case MAX_EXPR:
14171       return tree_expr_finite_p (TREE_OPERAND (x, 0))
14172 	     && tree_expr_finite_p (TREE_OPERAND (x, 1));
14173     case COND_EXPR:
14174       return tree_expr_finite_p (TREE_OPERAND (x, 1))
14175 	     && tree_expr_finite_p (TREE_OPERAND (x, 2));
14176     case CALL_EXPR:
14177       switch (get_call_combined_fn (x))
14178 	{
14179 	CASE_CFN_FABS:
14180 	  return tree_expr_finite_p (CALL_EXPR_ARG (x, 0));
14181 	CASE_CFN_FMAX:
14182 	CASE_CFN_FMIN:
14183 	  return tree_expr_finite_p (CALL_EXPR_ARG (x, 0))
14184 		 && tree_expr_finite_p (CALL_EXPR_ARG (x, 1));
14185 	default:
14186 	  return false;
14187 	}
14188 
14189     default:
14190       return false;
14191     }
14192 }
14193 
14194 /* Return true if expression X evaluates to an infinity.
14195    This function returns false for integer expressions.  */
14196 
14197 bool
tree_expr_infinite_p(const_tree x)14198 tree_expr_infinite_p (const_tree x)
14199 {
14200   if (!HONOR_INFINITIES (x))
14201     return false;
14202   switch (TREE_CODE (x))
14203     {
14204     case REAL_CST:
14205       return real_isinf (TREE_REAL_CST_PTR (x));
14206     case ABS_EXPR:
14207     case NEGATE_EXPR:
14208     case NON_LVALUE_EXPR:
14209     case SAVE_EXPR:
14210       return tree_expr_infinite_p (TREE_OPERAND (x, 0));
14211     case COND_EXPR:
14212       return tree_expr_infinite_p (TREE_OPERAND (x, 1))
14213 	     && tree_expr_infinite_p (TREE_OPERAND (x, 2));
14214     default:
14215       return false;
14216     }
14217 }
14218 
14219 /* Return true if expression X could evaluate to an infinity.
14220    This function returns false for integer expressions, and returns
14221    true if uncertain.  */
14222 
14223 bool
tree_expr_maybe_infinite_p(const_tree x)14224 tree_expr_maybe_infinite_p (const_tree x)
14225 {
14226   if (!HONOR_INFINITIES (x))
14227     return false;
14228   switch (TREE_CODE (x))
14229     {
14230     case REAL_CST:
14231       return real_isinf (TREE_REAL_CST_PTR (x));
14232     case FLOAT_EXPR:
14233       return false;
14234     case ABS_EXPR:
14235     case NEGATE_EXPR:
14236       return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 0));
14237     case COND_EXPR:
14238       return tree_expr_maybe_infinite_p (TREE_OPERAND (x, 1))
14239 	     || tree_expr_maybe_infinite_p (TREE_OPERAND (x, 2));
14240     default:
14241       return true;
14242     }
14243 }
14244 
14245 /* Return true if expression X evaluates to a signaling NaN.
14246    This function returns false for integer expressions.  */
14247 
14248 bool
tree_expr_signaling_nan_p(const_tree x)14249 tree_expr_signaling_nan_p (const_tree x)
14250 {
14251   if (!HONOR_SNANS (x))
14252     return false;
14253   switch (TREE_CODE (x))
14254     {
14255     case REAL_CST:
14256       return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14257     case NON_LVALUE_EXPR:
14258     case SAVE_EXPR:
14259       return tree_expr_signaling_nan_p (TREE_OPERAND (x, 0));
14260     case COND_EXPR:
14261       return tree_expr_signaling_nan_p (TREE_OPERAND (x, 1))
14262 	     && tree_expr_signaling_nan_p (TREE_OPERAND (x, 2));
14263     default:
14264       return false;
14265     }
14266 }
14267 
14268 /* Return true if expression X could evaluate to a signaling NaN.
14269    This function returns false for integer expressions, and returns
14270    true if uncertain.  */
14271 
14272 bool
tree_expr_maybe_signaling_nan_p(const_tree x)14273 tree_expr_maybe_signaling_nan_p (const_tree x)
14274 {
14275   if (!HONOR_SNANS (x))
14276     return false;
14277   switch (TREE_CODE (x))
14278     {
14279     case REAL_CST:
14280       return real_issignaling_nan (TREE_REAL_CST_PTR (x));
14281     case FLOAT_EXPR:
14282       return false;
14283     case ABS_EXPR:
14284     case CONVERT_EXPR:
14285     case NEGATE_EXPR:
14286     case NON_LVALUE_EXPR:
14287     case SAVE_EXPR:
14288       return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0));
14289     case MIN_EXPR:
14290     case MAX_EXPR:
14291       return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 0))
14292 	     || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1));
14293     case COND_EXPR:
14294       return tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 1))
14295 	     || tree_expr_maybe_signaling_nan_p (TREE_OPERAND (x, 2));
14296     case CALL_EXPR:
14297       switch (get_call_combined_fn (x))
14298 	{
14299 	CASE_CFN_FABS:
14300 	  return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0));
14301 	CASE_CFN_FMAX:
14302 	CASE_CFN_FMIN:
14303 	  return tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 0))
14304 		 || tree_expr_maybe_signaling_nan_p (CALL_EXPR_ARG (x, 1));
14305 	default:
14306 	  return true;
14307 	}
14308     default:
14309       return true;
14310     }
14311 }
14312 
14313 /* Return true if expression X evaluates to a NaN.
14314    This function returns false for integer expressions.  */
14315 
14316 bool
tree_expr_nan_p(const_tree x)14317 tree_expr_nan_p (const_tree x)
14318 {
14319   if (!HONOR_NANS (x))
14320     return false;
14321   switch (TREE_CODE (x))
14322     {
14323     case REAL_CST:
14324       return real_isnan (TREE_REAL_CST_PTR (x));
14325     case NON_LVALUE_EXPR:
14326     case SAVE_EXPR:
14327       return tree_expr_nan_p (TREE_OPERAND (x, 0));
14328     case COND_EXPR:
14329       return tree_expr_nan_p (TREE_OPERAND (x, 1))
14330 	     && tree_expr_nan_p (TREE_OPERAND (x, 2));
14331     default:
14332       return false;
14333     }
14334 }
14335 
14336 /* Return true if expression X could evaluate to a NaN.
14337    This function returns false for integer expressions, and returns
14338    true if uncertain.  */
14339 
14340 bool
tree_expr_maybe_nan_p(const_tree x)14341 tree_expr_maybe_nan_p (const_tree x)
14342 {
14343   if (!HONOR_NANS (x))
14344     return false;
14345   switch (TREE_CODE (x))
14346     {
14347     case REAL_CST:
14348       return real_isnan (TREE_REAL_CST_PTR (x));
14349     case FLOAT_EXPR:
14350       return false;
14351     case PLUS_EXPR:
14352     case MINUS_EXPR:
14353     case MULT_EXPR:
14354       return !tree_expr_finite_p (TREE_OPERAND (x, 0))
14355 	     || !tree_expr_finite_p (TREE_OPERAND (x, 1));
14356     case ABS_EXPR:
14357     case CONVERT_EXPR:
14358     case NEGATE_EXPR:
14359     case NON_LVALUE_EXPR:
14360     case SAVE_EXPR:
14361       return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0));
14362     case MIN_EXPR:
14363     case MAX_EXPR:
14364       return tree_expr_maybe_nan_p (TREE_OPERAND (x, 0))
14365 	     || tree_expr_maybe_nan_p (TREE_OPERAND (x, 1));
14366     case COND_EXPR:
14367       return tree_expr_maybe_nan_p (TREE_OPERAND (x, 1))
14368 	     || tree_expr_maybe_nan_p (TREE_OPERAND (x, 2));
14369     case CALL_EXPR:
14370       switch (get_call_combined_fn (x))
14371 	{
14372 	CASE_CFN_FABS:
14373 	  return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0));
14374 	CASE_CFN_FMAX:
14375 	CASE_CFN_FMIN:
14376 	  return tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 0))
14377 		 || tree_expr_maybe_nan_p (CALL_EXPR_ARG (x, 1));
14378 	default:
14379 	  return true;
14380 	}
14381     default:
14382       return true;
14383     }
14384 }
14385 
14386 #define tree_expr_nonnegative_warnv_p(X, Y) \
14387   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
14388 
14389 #define RECURSE(X) \
14390   ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
14391 
14392 /* Return true if CODE or TYPE is known to be non-negative. */
14393 
14394 static bool
tree_simple_nonnegative_warnv_p(enum tree_code code,tree type)14395 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14396 {
14397   if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14398       && truth_value_p (code))
14399     /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14400        have a signed:1 type (where the value is -1 and 0).  */
14401     return true;
14402   return false;
14403 }
14404 
14405 /* Return true if (CODE OP0) is known to be non-negative.  If the return
14406    value is based on the assumption that signed overflow is undefined,
14407    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14408    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
14409 
14410 bool
tree_unary_nonnegative_warnv_p(enum tree_code code,tree type,tree op0,bool * strict_overflow_p,int depth)14411 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14412 				bool *strict_overflow_p, int depth)
14413 {
14414   if (TYPE_UNSIGNED (type))
14415     return true;
14416 
14417   switch (code)
14418     {
14419     case ABS_EXPR:
14420       /* We can't return 1 if flag_wrapv is set because
14421 	 ABS_EXPR<INT_MIN> = INT_MIN.  */
14422       if (!ANY_INTEGRAL_TYPE_P (type))
14423 	return true;
14424       if (TYPE_OVERFLOW_UNDEFINED (type))
14425 	{
14426 	  *strict_overflow_p = true;
14427 	  return true;
14428 	}
14429       break;
14430 
14431     case NON_LVALUE_EXPR:
14432     case FLOAT_EXPR:
14433     case FIX_TRUNC_EXPR:
14434       return RECURSE (op0);
14435 
14436     CASE_CONVERT:
14437       {
14438 	tree inner_type = TREE_TYPE (op0);
14439 	tree outer_type = type;
14440 
14441 	if (TREE_CODE (outer_type) == REAL_TYPE)
14442 	  {
14443 	    if (TREE_CODE (inner_type) == REAL_TYPE)
14444 	      return RECURSE (op0);
14445 	    if (INTEGRAL_TYPE_P (inner_type))
14446 	      {
14447 		if (TYPE_UNSIGNED (inner_type))
14448 		  return true;
14449 		return RECURSE (op0);
14450 	      }
14451 	  }
14452 	else if (INTEGRAL_TYPE_P (outer_type))
14453 	  {
14454 	    if (TREE_CODE (inner_type) == REAL_TYPE)
14455 	      return RECURSE (op0);
14456 	    if (INTEGRAL_TYPE_P (inner_type))
14457 	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14458 		      && TYPE_UNSIGNED (inner_type);
14459 	  }
14460       }
14461       break;
14462 
14463     default:
14464       return tree_simple_nonnegative_warnv_p (code, type);
14465     }
14466 
14467   /* We don't know sign of `t', so be conservative and return false.  */
14468   return false;
14469 }
14470 
14471 /* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
14472    value is based on the assumption that signed overflow is undefined,
14473    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14474    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
14475 
14476 bool
tree_binary_nonnegative_warnv_p(enum tree_code code,tree type,tree op0,tree op1,bool * strict_overflow_p,int depth)14477 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14478 				 tree op1, bool *strict_overflow_p,
14479 				 int depth)
14480 {
14481   if (TYPE_UNSIGNED (type))
14482     return true;
14483 
14484   switch (code)
14485     {
14486     case POINTER_PLUS_EXPR:
14487     case PLUS_EXPR:
14488       if (FLOAT_TYPE_P (type))
14489 	return RECURSE (op0) && RECURSE (op1);
14490 
14491       /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14492 	 both unsigned and at least 2 bits shorter than the result.  */
14493       if (TREE_CODE (type) == INTEGER_TYPE
14494 	  && TREE_CODE (op0) == NOP_EXPR
14495 	  && TREE_CODE (op1) == NOP_EXPR)
14496 	{
14497 	  tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14498 	  tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14499 	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14500 	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14501 	    {
14502 	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
14503 				       TYPE_PRECISION (inner2)) + 1;
14504 	      return prec < TYPE_PRECISION (type);
14505 	    }
14506 	}
14507       break;
14508 
14509     case MULT_EXPR:
14510       if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14511 	{
14512 	  /* x * x is always non-negative for floating point x
14513 	     or without overflow.  */
14514 	  if (operand_equal_p (op0, op1, 0)
14515 	      || (RECURSE (op0) && RECURSE (op1)))
14516 	    {
14517 	      if (ANY_INTEGRAL_TYPE_P (type)
14518 		  && TYPE_OVERFLOW_UNDEFINED (type))
14519 		*strict_overflow_p = true;
14520 	      return true;
14521 	    }
14522 	}
14523 
14524       /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14525 	 both unsigned and their total bits is shorter than the result.  */
14526       if (TREE_CODE (type) == INTEGER_TYPE
14527 	  && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14528 	  && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14529 	{
14530 	  tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14531 	    ? TREE_TYPE (TREE_OPERAND (op0, 0))
14532 	    : TREE_TYPE (op0);
14533 	  tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14534 	    ? TREE_TYPE (TREE_OPERAND (op1, 0))
14535 	    : TREE_TYPE (op1);
14536 
14537 	  bool unsigned0 = TYPE_UNSIGNED (inner0);
14538 	  bool unsigned1 = TYPE_UNSIGNED (inner1);
14539 
14540 	  if (TREE_CODE (op0) == INTEGER_CST)
14541 	    unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14542 
14543 	  if (TREE_CODE (op1) == INTEGER_CST)
14544 	    unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14545 
14546 	  if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14547 	      && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14548 	    {
14549 	      unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14550 		? tree_int_cst_min_precision (op0, UNSIGNED)
14551 		: TYPE_PRECISION (inner0);
14552 
14553 	      unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14554 		? tree_int_cst_min_precision (op1, UNSIGNED)
14555 		: TYPE_PRECISION (inner1);
14556 
14557 	      return precision0 + precision1 < TYPE_PRECISION (type);
14558 	    }
14559 	}
14560       return false;
14561 
14562     case BIT_AND_EXPR:
14563       return RECURSE (op0) || RECURSE (op1);
14564 
14565     case MAX_EXPR:
14566       /* Usually RECURSE (op0) || RECURSE (op1) but NaNs complicate
14567 	 things.  */
14568       if (tree_expr_maybe_nan_p (op0) || tree_expr_maybe_nan_p (op1))
14569 	return RECURSE (op0) && RECURSE (op1);
14570       return RECURSE (op0) || RECURSE (op1);
14571 
14572     case BIT_IOR_EXPR:
14573     case BIT_XOR_EXPR:
14574     case MIN_EXPR:
14575     case RDIV_EXPR:
14576     case TRUNC_DIV_EXPR:
14577     case CEIL_DIV_EXPR:
14578     case FLOOR_DIV_EXPR:
14579     case ROUND_DIV_EXPR:
14580       return RECURSE (op0) && RECURSE (op1);
14581 
14582     case TRUNC_MOD_EXPR:
14583       return RECURSE (op0);
14584 
14585     case FLOOR_MOD_EXPR:
14586       return RECURSE (op1);
14587 
14588     case CEIL_MOD_EXPR:
14589     case ROUND_MOD_EXPR:
14590     default:
14591       return tree_simple_nonnegative_warnv_p (code, type);
14592     }
14593 
14594   /* We don't know sign of `t', so be conservative and return false.  */
14595   return false;
14596 }
14597 
14598 /* Return true if T is known to be non-negative.  If the return
14599    value is based on the assumption that signed overflow is undefined,
14600    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14601    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
14602 
14603 bool
tree_single_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)14604 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14605 {
14606   if (TYPE_UNSIGNED (TREE_TYPE (t)))
14607     return true;
14608 
14609   switch (TREE_CODE (t))
14610     {
14611     case INTEGER_CST:
14612       return tree_int_cst_sgn (t) >= 0;
14613 
14614     case REAL_CST:
14615       return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14616 
14617     case FIXED_CST:
14618       return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14619 
14620     case COND_EXPR:
14621       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
14622 
14623     case SSA_NAME:
14624       /* Limit the depth of recursion to avoid quadratic behavior.
14625 	 This is expected to catch almost all occurrences in practice.
14626 	 If this code misses important cases that unbounded recursion
14627 	 would not, passes that need this information could be revised
14628 	 to provide it through dataflow propagation.  */
14629       return (!name_registered_for_update_p (t)
14630 	      && depth < param_max_ssa_name_query_depth
14631 	      && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
14632 						  strict_overflow_p, depth));
14633 
14634     default:
14635       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14636     }
14637 }
14638 
14639 /* Return true if T is known to be non-negative.  If the return
14640    value is based on the assumption that signed overflow is undefined,
14641    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14642    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
14643 
14644 bool
tree_call_nonnegative_warnv_p(tree type,combined_fn fn,tree arg0,tree arg1,bool * strict_overflow_p,int depth)14645 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
14646 			       bool *strict_overflow_p, int depth)
14647 {
14648   switch (fn)
14649     {
14650     CASE_CFN_ACOS:
14651     CASE_CFN_ACOSH:
14652     CASE_CFN_CABS:
14653     CASE_CFN_COSH:
14654     CASE_CFN_ERFC:
14655     CASE_CFN_EXP:
14656     CASE_CFN_EXP10:
14657     CASE_CFN_EXP2:
14658     CASE_CFN_FABS:
14659     CASE_CFN_FDIM:
14660     CASE_CFN_HYPOT:
14661     CASE_CFN_POW10:
14662     CASE_CFN_FFS:
14663     CASE_CFN_PARITY:
14664     CASE_CFN_POPCOUNT:
14665     CASE_CFN_CLZ:
14666     CASE_CFN_CLRSB:
14667     case CFN_BUILT_IN_BSWAP16:
14668     case CFN_BUILT_IN_BSWAP32:
14669     case CFN_BUILT_IN_BSWAP64:
14670     case CFN_BUILT_IN_BSWAP128:
14671       /* Always true.  */
14672       return true;
14673 
14674     CASE_CFN_SQRT:
14675     CASE_CFN_SQRT_FN:
14676       /* sqrt(-0.0) is -0.0.  */
14677       if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14678 	return true;
14679       return RECURSE (arg0);
14680 
14681     CASE_CFN_ASINH:
14682     CASE_CFN_ATAN:
14683     CASE_CFN_ATANH:
14684     CASE_CFN_CBRT:
14685     CASE_CFN_CEIL:
14686     CASE_CFN_CEIL_FN:
14687     CASE_CFN_ERF:
14688     CASE_CFN_EXPM1:
14689     CASE_CFN_FLOOR:
14690     CASE_CFN_FLOOR_FN:
14691     CASE_CFN_FMOD:
14692     CASE_CFN_FREXP:
14693     CASE_CFN_ICEIL:
14694     CASE_CFN_IFLOOR:
14695     CASE_CFN_IRINT:
14696     CASE_CFN_IROUND:
14697     CASE_CFN_LCEIL:
14698     CASE_CFN_LDEXP:
14699     CASE_CFN_LFLOOR:
14700     CASE_CFN_LLCEIL:
14701     CASE_CFN_LLFLOOR:
14702     CASE_CFN_LLRINT:
14703     CASE_CFN_LLROUND:
14704     CASE_CFN_LRINT:
14705     CASE_CFN_LROUND:
14706     CASE_CFN_MODF:
14707     CASE_CFN_NEARBYINT:
14708     CASE_CFN_NEARBYINT_FN:
14709     CASE_CFN_RINT:
14710     CASE_CFN_RINT_FN:
14711     CASE_CFN_ROUND:
14712     CASE_CFN_ROUND_FN:
14713     CASE_CFN_ROUNDEVEN:
14714     CASE_CFN_ROUNDEVEN_FN:
14715     CASE_CFN_SCALB:
14716     CASE_CFN_SCALBLN:
14717     CASE_CFN_SCALBN:
14718     CASE_CFN_SIGNBIT:
14719     CASE_CFN_SIGNIFICAND:
14720     CASE_CFN_SINH:
14721     CASE_CFN_TANH:
14722     CASE_CFN_TRUNC:
14723     CASE_CFN_TRUNC_FN:
14724       /* True if the 1st argument is nonnegative.  */
14725       return RECURSE (arg0);
14726 
14727     CASE_CFN_FMAX:
14728     CASE_CFN_FMAX_FN:
14729       /* Usually RECURSE (arg0) || RECURSE (arg1) but NaNs complicate
14730 	 things.  In the presence of sNaNs, we're only guaranteed to be
14731 	 non-negative if both operands are non-negative.  In the presence
14732 	 of qNaNs, we're non-negative if either operand is non-negative
14733 	 and can't be a qNaN, or if both operands are non-negative.  */
14734       if (tree_expr_maybe_signaling_nan_p (arg0) ||
14735 	  tree_expr_maybe_signaling_nan_p (arg1))
14736         return RECURSE (arg0) && RECURSE (arg1);
14737       return RECURSE (arg0) ? (!tree_expr_maybe_nan_p (arg0)
14738 			       || RECURSE (arg1))
14739 			    : (RECURSE (arg1)
14740 			       && !tree_expr_maybe_nan_p (arg1));
14741 
14742     CASE_CFN_FMIN:
14743     CASE_CFN_FMIN_FN:
14744       /* True if the 1st AND 2nd arguments are nonnegative.  */
14745       return RECURSE (arg0) && RECURSE (arg1);
14746 
14747     CASE_CFN_COPYSIGN:
14748     CASE_CFN_COPYSIGN_FN:
14749       /* True if the 2nd argument is nonnegative.  */
14750       return RECURSE (arg1);
14751 
14752     CASE_CFN_POWI:
14753       /* True if the 1st argument is nonnegative or the second
14754 	 argument is an even integer.  */
14755       if (TREE_CODE (arg1) == INTEGER_CST
14756 	  && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14757 	return true;
14758       return RECURSE (arg0);
14759 
14760     CASE_CFN_POW:
14761       /* True if the 1st argument is nonnegative or the second
14762 	 argument is an even integer valued real.  */
14763       if (TREE_CODE (arg1) == REAL_CST)
14764 	{
14765 	  REAL_VALUE_TYPE c;
14766 	  HOST_WIDE_INT n;
14767 
14768 	  c = TREE_REAL_CST (arg1);
14769 	  n = real_to_integer (&c);
14770 	  if ((n & 1) == 0)
14771 	    {
14772 	      REAL_VALUE_TYPE cint;
14773 	      real_from_integer (&cint, VOIDmode, n, SIGNED);
14774 	      if (real_identical (&c, &cint))
14775 		return true;
14776 	    }
14777 	}
14778       return RECURSE (arg0);
14779 
14780     default:
14781       break;
14782     }
14783   return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
14784 }
14785 
14786 /* Return true if T is known to be non-negative.  If the return
14787    value is based on the assumption that signed overflow is undefined,
14788    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14789    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
14790 
14791 static bool
tree_invalid_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)14792 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14793 {
14794   enum tree_code code = TREE_CODE (t);
14795   if (TYPE_UNSIGNED (TREE_TYPE (t)))
14796     return true;
14797 
14798   switch (code)
14799     {
14800     case TARGET_EXPR:
14801       {
14802 	tree temp = TARGET_EXPR_SLOT (t);
14803 	t = TARGET_EXPR_INITIAL (t);
14804 
14805 	/* If the initializer is non-void, then it's a normal expression
14806 	   that will be assigned to the slot.  */
14807 	if (!VOID_TYPE_P (t))
14808 	  return RECURSE (t);
14809 
14810 	/* Otherwise, the initializer sets the slot in some way.  One common
14811 	   way is an assignment statement at the end of the initializer.  */
14812 	while (1)
14813 	  {
14814 	    if (TREE_CODE (t) == BIND_EXPR)
14815 	      t = expr_last (BIND_EXPR_BODY (t));
14816 	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14817 		     || TREE_CODE (t) == TRY_CATCH_EXPR)
14818 	      t = expr_last (TREE_OPERAND (t, 0));
14819 	    else if (TREE_CODE (t) == STATEMENT_LIST)
14820 	      t = expr_last (t);
14821 	    else
14822 	      break;
14823 	  }
14824 	if (TREE_CODE (t) == MODIFY_EXPR
14825 	    && TREE_OPERAND (t, 0) == temp)
14826 	  return RECURSE (TREE_OPERAND (t, 1));
14827 
14828 	return false;
14829       }
14830 
14831     case CALL_EXPR:
14832       {
14833 	tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
14834 	tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
14835 
14836 	return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14837 					      get_call_combined_fn (t),
14838 					      arg0,
14839 					      arg1,
14840 					      strict_overflow_p, depth);
14841       }
14842     case COMPOUND_EXPR:
14843     case MODIFY_EXPR:
14844       return RECURSE (TREE_OPERAND (t, 1));
14845 
14846     case BIND_EXPR:
14847       return RECURSE (expr_last (TREE_OPERAND (t, 1)));
14848 
14849     case SAVE_EXPR:
14850       return RECURSE (TREE_OPERAND (t, 0));
14851 
14852     default:
14853       return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
14854     }
14855 }
14856 
14857 #undef RECURSE
14858 #undef tree_expr_nonnegative_warnv_p
14859 
14860 /* Return true if T is known to be non-negative.  If the return
14861    value is based on the assumption that signed overflow is undefined,
14862    set *STRICT_OVERFLOW_P to true; otherwise, don't change
14863    *STRICT_OVERFLOW_P.  DEPTH is the current nesting depth of the query.  */
14864 
14865 bool
tree_expr_nonnegative_warnv_p(tree t,bool * strict_overflow_p,int depth)14866 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
14867 {
14868   enum tree_code code;
14869   if (t == error_mark_node)
14870     return false;
14871 
14872   code = TREE_CODE (t);
14873   switch (TREE_CODE_CLASS (code))
14874     {
14875     case tcc_binary:
14876     case tcc_comparison:
14877       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14878 					      TREE_TYPE (t),
14879 					      TREE_OPERAND (t, 0),
14880 					      TREE_OPERAND (t, 1),
14881 					      strict_overflow_p, depth);
14882 
14883     case tcc_unary:
14884       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14885 					     TREE_TYPE (t),
14886 					     TREE_OPERAND (t, 0),
14887 					     strict_overflow_p, depth);
14888 
14889     case tcc_constant:
14890     case tcc_declaration:
14891     case tcc_reference:
14892       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14893 
14894     default:
14895       break;
14896     }
14897 
14898   switch (code)
14899     {
14900     case TRUTH_AND_EXPR:
14901     case TRUTH_OR_EXPR:
14902     case TRUTH_XOR_EXPR:
14903       return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14904 					      TREE_TYPE (t),
14905 					      TREE_OPERAND (t, 0),
14906 					      TREE_OPERAND (t, 1),
14907 					      strict_overflow_p, depth);
14908     case TRUTH_NOT_EXPR:
14909       return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14910 					     TREE_TYPE (t),
14911 					     TREE_OPERAND (t, 0),
14912 					     strict_overflow_p, depth);
14913 
14914     case COND_EXPR:
14915     case CONSTRUCTOR:
14916     case OBJ_TYPE_REF:
14917     case ASSERT_EXPR:
14918     case ADDR_EXPR:
14919     case WITH_SIZE_EXPR:
14920     case SSA_NAME:
14921       return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
14922 
14923     default:
14924       return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
14925     }
14926 }
14927 
14928 /* Return true if `t' is known to be non-negative.  Handle warnings
14929    about undefined signed overflow.  */
14930 
14931 bool
tree_expr_nonnegative_p(tree t)14932 tree_expr_nonnegative_p (tree t)
14933 {
14934   bool ret, strict_overflow_p;
14935 
14936   strict_overflow_p = false;
14937   ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14938   if (strict_overflow_p)
14939     fold_overflow_warning (("assuming signed overflow does not occur when "
14940 			    "determining that expression is always "
14941 			    "non-negative"),
14942 			   WARN_STRICT_OVERFLOW_MISC);
14943   return ret;
14944 }
14945 
14946 
14947 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14948    For floating point we further ensure that T is not denormal.
14949    Similar logic is present in nonzero_address in rtlanal.h.
14950 
14951    If the return value is based on the assumption that signed overflow
14952    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14953    change *STRICT_OVERFLOW_P.  */
14954 
14955 bool
tree_unary_nonzero_warnv_p(enum tree_code code,tree type,tree op0,bool * strict_overflow_p)14956 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14957 				 bool *strict_overflow_p)
14958 {
14959   switch (code)
14960     {
14961     case ABS_EXPR:
14962       return tree_expr_nonzero_warnv_p (op0,
14963 					strict_overflow_p);
14964 
14965     case NOP_EXPR:
14966       {
14967 	tree inner_type = TREE_TYPE (op0);
14968 	tree outer_type = type;
14969 
14970 	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14971 		&& tree_expr_nonzero_warnv_p (op0,
14972 					      strict_overflow_p));
14973       }
14974       break;
14975 
14976     case NON_LVALUE_EXPR:
14977       return tree_expr_nonzero_warnv_p (op0,
14978 					strict_overflow_p);
14979 
14980     default:
14981       break;
14982   }
14983 
14984   return false;
14985 }
14986 
14987 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14988    For floating point we further ensure that T is not denormal.
14989    Similar logic is present in nonzero_address in rtlanal.h.
14990 
14991    If the return value is based on the assumption that signed overflow
14992    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14993    change *STRICT_OVERFLOW_P.  */
14994 
14995 bool
tree_binary_nonzero_warnv_p(enum tree_code code,tree type,tree op0,tree op1,bool * strict_overflow_p)14996 tree_binary_nonzero_warnv_p (enum tree_code code,
14997 			     tree type,
14998 			     tree op0,
14999 			     tree op1, bool *strict_overflow_p)
15000 {
15001   bool sub_strict_overflow_p;
15002   switch (code)
15003     {
15004     case POINTER_PLUS_EXPR:
15005     case PLUS_EXPR:
15006       if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15007 	{
15008 	  /* With the presence of negative values it is hard
15009 	     to say something.  */
15010 	  sub_strict_overflow_p = false;
15011 	  if (!tree_expr_nonnegative_warnv_p (op0,
15012 					      &sub_strict_overflow_p)
15013 	      || !tree_expr_nonnegative_warnv_p (op1,
15014 						 &sub_strict_overflow_p))
15015 	    return false;
15016 	  /* One of operands must be positive and the other non-negative.  */
15017 	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
15018 	     overflows, on a twos-complement machine the sum of two
15019 	     nonnegative numbers can never be zero.  */
15020 	  return (tree_expr_nonzero_warnv_p (op0,
15021 					     strict_overflow_p)
15022 		  || tree_expr_nonzero_warnv_p (op1,
15023 						strict_overflow_p));
15024 	}
15025       break;
15026 
15027     case MULT_EXPR:
15028       if (TYPE_OVERFLOW_UNDEFINED (type))
15029 	{
15030 	  if (tree_expr_nonzero_warnv_p (op0,
15031 					 strict_overflow_p)
15032 	      && tree_expr_nonzero_warnv_p (op1,
15033 					    strict_overflow_p))
15034 	    {
15035 	      *strict_overflow_p = true;
15036 	      return true;
15037 	    }
15038 	}
15039       break;
15040 
15041     case MIN_EXPR:
15042       sub_strict_overflow_p = false;
15043       if (tree_expr_nonzero_warnv_p (op0,
15044 				     &sub_strict_overflow_p)
15045 	  && tree_expr_nonzero_warnv_p (op1,
15046 					&sub_strict_overflow_p))
15047 	{
15048 	  if (sub_strict_overflow_p)
15049 	    *strict_overflow_p = true;
15050 	}
15051       break;
15052 
15053     case MAX_EXPR:
15054       sub_strict_overflow_p = false;
15055       if (tree_expr_nonzero_warnv_p (op0,
15056 				     &sub_strict_overflow_p))
15057 	{
15058 	  if (sub_strict_overflow_p)
15059 	    *strict_overflow_p = true;
15060 
15061 	  /* When both operands are nonzero, then MAX must be too.  */
15062 	  if (tree_expr_nonzero_warnv_p (op1,
15063 					 strict_overflow_p))
15064 	    return true;
15065 
15066 	  /* MAX where operand 0 is positive is positive.  */
15067 	  return tree_expr_nonnegative_warnv_p (op0,
15068 					       strict_overflow_p);
15069 	}
15070       /* MAX where operand 1 is positive is positive.  */
15071       else if (tree_expr_nonzero_warnv_p (op1,
15072 					  &sub_strict_overflow_p)
15073 	       && tree_expr_nonnegative_warnv_p (op1,
15074 						 &sub_strict_overflow_p))
15075 	{
15076 	  if (sub_strict_overflow_p)
15077 	    *strict_overflow_p = true;
15078 	  return true;
15079 	}
15080       break;
15081 
15082     case BIT_IOR_EXPR:
15083       return (tree_expr_nonzero_warnv_p (op1,
15084 					 strict_overflow_p)
15085 	      || tree_expr_nonzero_warnv_p (op0,
15086 					    strict_overflow_p));
15087 
15088     default:
15089       break;
15090   }
15091 
15092   return false;
15093 }
15094 
15095 /* Return true when T is an address and is known to be nonzero.
15096    For floating point we further ensure that T is not denormal.
15097    Similar logic is present in nonzero_address in rtlanal.h.
15098 
15099    If the return value is based on the assumption that signed overflow
15100    is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15101    change *STRICT_OVERFLOW_P.  */
15102 
15103 bool
tree_single_nonzero_warnv_p(tree t,bool * strict_overflow_p)15104 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15105 {
15106   bool sub_strict_overflow_p;
15107   switch (TREE_CODE (t))
15108     {
15109     case INTEGER_CST:
15110       return !integer_zerop (t);
15111 
15112     case ADDR_EXPR:
15113       {
15114 	tree base = TREE_OPERAND (t, 0);
15115 
15116 	if (!DECL_P (base))
15117 	  base = get_base_address (base);
15118 
15119 	if (base && TREE_CODE (base) == TARGET_EXPR)
15120 	  base = TARGET_EXPR_SLOT (base);
15121 
15122 	if (!base)
15123 	  return false;
15124 
15125 	/* For objects in symbol table check if we know they are non-zero.
15126 	   Don't do anything for variables and functions before symtab is built;
15127 	   it is quite possible that they will be declared weak later.  */
15128 	int nonzero_addr = maybe_nonzero_address (base);
15129 	if (nonzero_addr >= 0)
15130 	  return nonzero_addr;
15131 
15132 	/* Constants are never weak.  */
15133 	if (CONSTANT_CLASS_P (base))
15134 	  return true;
15135 
15136 	return false;
15137       }
15138 
15139     case COND_EXPR:
15140       sub_strict_overflow_p = false;
15141       if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15142 				     &sub_strict_overflow_p)
15143 	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15144 					&sub_strict_overflow_p))
15145 	{
15146 	  if (sub_strict_overflow_p)
15147 	    *strict_overflow_p = true;
15148 	  return true;
15149 	}
15150       break;
15151 
15152     case SSA_NAME:
15153       if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
15154 	break;
15155       return expr_not_equal_to (t, wi::zero (TYPE_PRECISION (TREE_TYPE (t))));
15156 
15157     default:
15158       break;
15159     }
15160   return false;
15161 }
15162 
15163 #define integer_valued_real_p(X) \
15164   _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
15165 
15166 #define RECURSE(X) \
15167   ((integer_valued_real_p) (X, depth + 1))
15168 
15169 /* Return true if the floating point result of (CODE OP0) has an
15170    integer value.  We also allow +Inf, -Inf and NaN to be considered
15171    integer values. Return false for signaling NaN.
15172 
15173    DEPTH is the current nesting depth of the query.  */
15174 
15175 bool
integer_valued_real_unary_p(tree_code code,tree op0,int depth)15176 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
15177 {
15178   switch (code)
15179     {
15180     case FLOAT_EXPR:
15181       return true;
15182 
15183     case ABS_EXPR:
15184       return RECURSE (op0);
15185 
15186     CASE_CONVERT:
15187       {
15188 	tree type = TREE_TYPE (op0);
15189 	if (TREE_CODE (type) == INTEGER_TYPE)
15190 	  return true;
15191 	if (TREE_CODE (type) == REAL_TYPE)
15192 	  return RECURSE (op0);
15193 	break;
15194       }
15195 
15196     default:
15197       break;
15198     }
15199   return false;
15200 }
15201 
15202 /* Return true if the floating point result of (CODE OP0 OP1) has an
15203    integer value.  We also allow +Inf, -Inf and NaN to be considered
15204    integer values. Return false for signaling NaN.
15205 
15206    DEPTH is the current nesting depth of the query.  */
15207 
15208 bool
integer_valued_real_binary_p(tree_code code,tree op0,tree op1,int depth)15209 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
15210 {
15211   switch (code)
15212     {
15213     case PLUS_EXPR:
15214     case MINUS_EXPR:
15215     case MULT_EXPR:
15216     case MIN_EXPR:
15217     case MAX_EXPR:
15218       return RECURSE (op0) && RECURSE (op1);
15219 
15220     default:
15221       break;
15222     }
15223   return false;
15224 }
15225 
15226 /* Return true if the floating point result of calling FNDECL with arguments
15227    ARG0 and ARG1 has an integer value.  We also allow +Inf, -Inf and NaN to be
15228    considered integer values. Return false for signaling NaN.  If FNDECL
15229    takes fewer than 2 arguments, the remaining ARGn are null.
15230 
15231    DEPTH is the current nesting depth of the query.  */
15232 
15233 bool
integer_valued_real_call_p(combined_fn fn,tree arg0,tree arg1,int depth)15234 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
15235 {
15236   switch (fn)
15237     {
15238     CASE_CFN_CEIL:
15239     CASE_CFN_CEIL_FN:
15240     CASE_CFN_FLOOR:
15241     CASE_CFN_FLOOR_FN:
15242     CASE_CFN_NEARBYINT:
15243     CASE_CFN_NEARBYINT_FN:
15244     CASE_CFN_RINT:
15245     CASE_CFN_RINT_FN:
15246     CASE_CFN_ROUND:
15247     CASE_CFN_ROUND_FN:
15248     CASE_CFN_ROUNDEVEN:
15249     CASE_CFN_ROUNDEVEN_FN:
15250     CASE_CFN_TRUNC:
15251     CASE_CFN_TRUNC_FN:
15252       return true;
15253 
15254     CASE_CFN_FMIN:
15255     CASE_CFN_FMIN_FN:
15256     CASE_CFN_FMAX:
15257     CASE_CFN_FMAX_FN:
15258       return RECURSE (arg0) && RECURSE (arg1);
15259 
15260     default:
15261       break;
15262     }
15263   return false;
15264 }
15265 
15266 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
15267    has an integer value.  We also allow +Inf, -Inf and NaN to be
15268    considered integer values. Return false for signaling NaN.
15269 
15270    DEPTH is the current nesting depth of the query.  */
15271 
15272 bool
integer_valued_real_single_p(tree t,int depth)15273 integer_valued_real_single_p (tree t, int depth)
15274 {
15275   switch (TREE_CODE (t))
15276     {
15277     case REAL_CST:
15278       return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
15279 
15280     case COND_EXPR:
15281       return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
15282 
15283     case SSA_NAME:
15284       /* Limit the depth of recursion to avoid quadratic behavior.
15285 	 This is expected to catch almost all occurrences in practice.
15286 	 If this code misses important cases that unbounded recursion
15287 	 would not, passes that need this information could be revised
15288 	 to provide it through dataflow propagation.  */
15289       return (!name_registered_for_update_p (t)
15290 	      && depth < param_max_ssa_name_query_depth
15291 	      && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
15292 						    depth));
15293 
15294     default:
15295       break;
15296     }
15297   return false;
15298 }
15299 
15300 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
15301    has an integer value.  We also allow +Inf, -Inf and NaN to be
15302    considered integer values. Return false for signaling NaN.
15303 
15304    DEPTH is the current nesting depth of the query.  */
15305 
15306 static bool
integer_valued_real_invalid_p(tree t,int depth)15307 integer_valued_real_invalid_p (tree t, int depth)
15308 {
15309   switch (TREE_CODE (t))
15310     {
15311     case COMPOUND_EXPR:
15312     case MODIFY_EXPR:
15313     case BIND_EXPR:
15314       return RECURSE (TREE_OPERAND (t, 1));
15315 
15316     case SAVE_EXPR:
15317       return RECURSE (TREE_OPERAND (t, 0));
15318 
15319     default:
15320       break;
15321     }
15322   return false;
15323 }
15324 
15325 #undef RECURSE
15326 #undef integer_valued_real_p
15327 
15328 /* Return true if the floating point expression T has an integer value.
15329    We also allow +Inf, -Inf and NaN to be considered integer values.
15330    Return false for signaling NaN.
15331 
15332    DEPTH is the current nesting depth of the query.  */
15333 
15334 bool
integer_valued_real_p(tree t,int depth)15335 integer_valued_real_p (tree t, int depth)
15336 {
15337   if (t == error_mark_node)
15338     return false;
15339 
15340   STRIP_ANY_LOCATION_WRAPPER (t);
15341 
15342   tree_code code = TREE_CODE (t);
15343   switch (TREE_CODE_CLASS (code))
15344     {
15345     case tcc_binary:
15346     case tcc_comparison:
15347       return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
15348 					   TREE_OPERAND (t, 1), depth);
15349 
15350     case tcc_unary:
15351       return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
15352 
15353     case tcc_constant:
15354     case tcc_declaration:
15355     case tcc_reference:
15356       return integer_valued_real_single_p (t, depth);
15357 
15358     default:
15359       break;
15360     }
15361 
15362   switch (code)
15363     {
15364     case COND_EXPR:
15365     case SSA_NAME:
15366       return integer_valued_real_single_p (t, depth);
15367 
15368     case CALL_EXPR:
15369       {
15370 	tree arg0 = (call_expr_nargs (t) > 0
15371 		     ? CALL_EXPR_ARG (t, 0)
15372 		     : NULL_TREE);
15373 	tree arg1 = (call_expr_nargs (t) > 1
15374 		     ? CALL_EXPR_ARG (t, 1)
15375 		     : NULL_TREE);
15376 	return integer_valued_real_call_p (get_call_combined_fn (t),
15377 					   arg0, arg1, depth);
15378       }
15379 
15380     default:
15381       return integer_valued_real_invalid_p (t, depth);
15382     }
15383 }
15384 
15385 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15386    attempt to fold the expression to a constant without modifying TYPE,
15387    OP0 or OP1.
15388 
15389    If the expression could be simplified to a constant, then return
15390    the constant.  If the expression would not be simplified to a
15391    constant, then return NULL_TREE.  */
15392 
15393 tree
fold_binary_to_constant(enum tree_code code,tree type,tree op0,tree op1)15394 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15395 {
15396   tree tem = fold_binary (code, type, op0, op1);
15397   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15398 }
15399 
15400 /* Given the components of a unary expression CODE, TYPE and OP0,
15401    attempt to fold the expression to a constant without modifying
15402    TYPE or OP0.
15403 
15404    If the expression could be simplified to a constant, then return
15405    the constant.  If the expression would not be simplified to a
15406    constant, then return NULL_TREE.  */
15407 
15408 tree
fold_unary_to_constant(enum tree_code code,tree type,tree op0)15409 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15410 {
15411   tree tem = fold_unary (code, type, op0);
15412   return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15413 }
15414 
15415 /* If EXP represents referencing an element in a constant string
15416    (either via pointer arithmetic or array indexing), return the
15417    tree representing the value accessed, otherwise return NULL.  */
15418 
15419 tree
fold_read_from_constant_string(tree exp)15420 fold_read_from_constant_string (tree exp)
15421 {
15422   if ((TREE_CODE (exp) == INDIRECT_REF
15423        || TREE_CODE (exp) == ARRAY_REF)
15424       && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15425     {
15426       tree exp1 = TREE_OPERAND (exp, 0);
15427       tree index;
15428       tree string;
15429       location_t loc = EXPR_LOCATION (exp);
15430 
15431       if (TREE_CODE (exp) == INDIRECT_REF)
15432 	string = string_constant (exp1, &index, NULL, NULL);
15433       else
15434 	{
15435 	  tree low_bound = array_ref_low_bound (exp);
15436 	  index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15437 
15438 	  /* Optimize the special-case of a zero lower bound.
15439 
15440 	     We convert the low_bound to sizetype to avoid some problems
15441 	     with constant folding.  (E.g. suppose the lower bound is 1,
15442 	     and its mode is QI.  Without the conversion,l (ARRAY
15443 	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15444 	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
15445 	  if (! integer_zerop (low_bound))
15446 	    index = size_diffop_loc (loc, index,
15447 				 fold_convert_loc (loc, sizetype, low_bound));
15448 
15449 	  string = exp1;
15450 	}
15451 
15452       scalar_int_mode char_mode;
15453       if (string
15454 	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15455 	  && TREE_CODE (string) == STRING_CST
15456 	  && tree_fits_uhwi_p (index)
15457 	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15458 	  && is_int_mode (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))),
15459 			  &char_mode)
15460 	  && GET_MODE_SIZE (char_mode) == 1)
15461 	return build_int_cst_type (TREE_TYPE (exp),
15462 				   (TREE_STRING_POINTER (string)
15463 				    [TREE_INT_CST_LOW (index)]));
15464     }
15465   return NULL;
15466 }
15467 
15468 /* Folds a read from vector element at IDX of vector ARG.  */
15469 
15470 tree
fold_read_from_vector(tree arg,poly_uint64 idx)15471 fold_read_from_vector (tree arg, poly_uint64 idx)
15472 {
15473   unsigned HOST_WIDE_INT i;
15474   if (known_lt (idx, TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)))
15475       && known_ge (idx, 0u)
15476       && idx.is_constant (&i))
15477     {
15478       if (TREE_CODE (arg) == VECTOR_CST)
15479 	return VECTOR_CST_ELT (arg, i);
15480       else if (TREE_CODE (arg) == CONSTRUCTOR)
15481 	{
15482 	  if (CONSTRUCTOR_NELTS (arg)
15483 	      && VECTOR_TYPE_P (TREE_TYPE (CONSTRUCTOR_ELT (arg, 0)->value)))
15484 	    return NULL_TREE;
15485 	  if (i >= CONSTRUCTOR_NELTS (arg))
15486 	    return build_zero_cst (TREE_TYPE (TREE_TYPE (arg)));
15487 	  return CONSTRUCTOR_ELT (arg, i)->value;
15488 	}
15489     }
15490   return NULL_TREE;
15491 }
15492 
15493 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15494    an integer constant, real, or fixed-point constant.
15495 
15496    TYPE is the type of the result.  */
15497 
15498 static tree
fold_negate_const(tree arg0,tree type)15499 fold_negate_const (tree arg0, tree type)
15500 {
15501   tree t = NULL_TREE;
15502 
15503   switch (TREE_CODE (arg0))
15504     {
15505     case REAL_CST:
15506       t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15507       break;
15508 
15509     case FIXED_CST:
15510       {
15511         FIXED_VALUE_TYPE f;
15512         bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15513 					    &(TREE_FIXED_CST (arg0)), NULL,
15514 					    TYPE_SATURATING (type));
15515 	t = build_fixed (type, f);
15516 	/* Propagate overflow flags.  */
15517 	if (overflow_p | TREE_OVERFLOW (arg0))
15518 	  TREE_OVERFLOW (t) = 1;
15519 	break;
15520       }
15521 
15522     default:
15523       if (poly_int_tree_p (arg0))
15524 	{
15525 	  wi::overflow_type overflow;
15526 	  poly_wide_int res = wi::neg (wi::to_poly_wide (arg0), &overflow);
15527 	  t = force_fit_type (type, res, 1,
15528 			      (overflow && ! TYPE_UNSIGNED (type))
15529 			      || TREE_OVERFLOW (arg0));
15530 	  break;
15531 	}
15532 
15533       gcc_unreachable ();
15534     }
15535 
15536   return t;
15537 }
15538 
15539 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15540    an integer constant or real constant.
15541 
15542    TYPE is the type of the result.  */
15543 
15544 tree
fold_abs_const(tree arg0,tree type)15545 fold_abs_const (tree arg0, tree type)
15546 {
15547   tree t = NULL_TREE;
15548 
15549   switch (TREE_CODE (arg0))
15550     {
15551     case INTEGER_CST:
15552       {
15553         /* If the value is unsigned or non-negative, then the absolute value
15554 	   is the same as the ordinary value.  */
15555 	wide_int val = wi::to_wide (arg0);
15556 	wi::overflow_type overflow = wi::OVF_NONE;
15557 	if (!wi::neg_p (val, TYPE_SIGN (TREE_TYPE (arg0))))
15558 	  ;
15559 
15560 	/* If the value is negative, then the absolute value is
15561 	   its negation.  */
15562 	else
15563 	  val = wi::neg (val, &overflow);
15564 
15565 	/* Force to the destination type, set TREE_OVERFLOW for signed
15566 	   TYPE only.  */
15567 	t = force_fit_type (type, val, 1, overflow | TREE_OVERFLOW (arg0));
15568       }
15569     break;
15570 
15571     case REAL_CST:
15572       if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15573 	t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15574       else
15575 	t =  arg0;
15576       break;
15577 
15578     default:
15579       gcc_unreachable ();
15580     }
15581 
15582   return t;
15583 }
15584 
15585 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15586    constant.  TYPE is the type of the result.  */
15587 
15588 static tree
fold_not_const(const_tree arg0,tree type)15589 fold_not_const (const_tree arg0, tree type)
15590 {
15591   gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15592 
15593   return force_fit_type (type, ~wi::to_wide (arg0), 0, TREE_OVERFLOW (arg0));
15594 }
15595 
15596 /* Given CODE, a relational operator, the target type, TYPE and two
15597    constant operands OP0 and OP1, return the result of the
15598    relational operation.  If the result is not a compile time
15599    constant, then return NULL_TREE.  */
15600 
15601 static tree
fold_relational_const(enum tree_code code,tree type,tree op0,tree op1)15602 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15603 {
15604   int result, invert;
15605 
15606   /* From here on, the only cases we handle are when the result is
15607      known to be a constant.  */
15608 
15609   if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15610     {
15611       const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15612       const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15613 
15614       /* Handle the cases where either operand is a NaN.  */
15615       if (real_isnan (c0) || real_isnan (c1))
15616 	{
15617 	  switch (code)
15618 	    {
15619 	    case EQ_EXPR:
15620 	    case ORDERED_EXPR:
15621 	      result = 0;
15622 	      break;
15623 
15624 	    case NE_EXPR:
15625 	    case UNORDERED_EXPR:
15626 	    case UNLT_EXPR:
15627 	    case UNLE_EXPR:
15628 	    case UNGT_EXPR:
15629 	    case UNGE_EXPR:
15630 	    case UNEQ_EXPR:
15631               result = 1;
15632 	      break;
15633 
15634 	    case LT_EXPR:
15635 	    case LE_EXPR:
15636 	    case GT_EXPR:
15637 	    case GE_EXPR:
15638 	    case LTGT_EXPR:
15639 	      if (flag_trapping_math)
15640 		return NULL_TREE;
15641 	      result = 0;
15642 	      break;
15643 
15644 	    default:
15645 	      gcc_unreachable ();
15646 	    }
15647 
15648 	  return constant_boolean_node (result, type);
15649 	}
15650 
15651       return constant_boolean_node (real_compare (code, c0, c1), type);
15652     }
15653 
15654   if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15655     {
15656       const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15657       const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15658       return constant_boolean_node (fixed_compare (code, c0, c1), type);
15659     }
15660 
15661   /* Handle equality/inequality of complex constants.  */
15662   if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15663     {
15664       tree rcond = fold_relational_const (code, type,
15665 					  TREE_REALPART (op0),
15666 					  TREE_REALPART (op1));
15667       tree icond = fold_relational_const (code, type,
15668 					  TREE_IMAGPART (op0),
15669 					  TREE_IMAGPART (op1));
15670       if (code == EQ_EXPR)
15671 	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15672       else if (code == NE_EXPR)
15673 	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15674       else
15675 	return NULL_TREE;
15676     }
15677 
15678   if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15679     {
15680       if (!VECTOR_TYPE_P (type))
15681 	{
15682 	  /* Have vector comparison with scalar boolean result.  */
15683 	  gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
15684 		      && known_eq (VECTOR_CST_NELTS (op0),
15685 				   VECTOR_CST_NELTS (op1)));
15686 	  unsigned HOST_WIDE_INT nunits;
15687 	  if (!VECTOR_CST_NELTS (op0).is_constant (&nunits))
15688 	    return NULL_TREE;
15689 	  for (unsigned i = 0; i < nunits; i++)
15690 	    {
15691 	      tree elem0 = VECTOR_CST_ELT (op0, i);
15692 	      tree elem1 = VECTOR_CST_ELT (op1, i);
15693 	      tree tmp = fold_relational_const (EQ_EXPR, type, elem0, elem1);
15694 	      if (tmp == NULL_TREE)
15695 		return NULL_TREE;
15696 	      if (integer_zerop (tmp))
15697 		return constant_boolean_node (code == NE_EXPR, type);
15698 	    }
15699 	  return constant_boolean_node (code == EQ_EXPR, type);
15700 	}
15701       tree_vector_builder elts;
15702       if (!elts.new_binary_operation (type, op0, op1, false))
15703 	return NULL_TREE;
15704       unsigned int count = elts.encoded_nelts ();
15705       for (unsigned i = 0; i < count; i++)
15706 	{
15707 	  tree elem_type = TREE_TYPE (type);
15708 	  tree elem0 = VECTOR_CST_ELT (op0, i);
15709 	  tree elem1 = VECTOR_CST_ELT (op1, i);
15710 
15711 	  tree tem = fold_relational_const (code, elem_type,
15712 					    elem0, elem1);
15713 
15714 	  if (tem == NULL_TREE)
15715 	    return NULL_TREE;
15716 
15717 	  elts.quick_push (build_int_cst (elem_type,
15718 					  integer_zerop (tem) ? 0 : -1));
15719 	}
15720 
15721       return elts.build ();
15722     }
15723 
15724   /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15725 
15726      To compute GT, swap the arguments and do LT.
15727      To compute GE, do LT and invert the result.
15728      To compute LE, swap the arguments, do LT and invert the result.
15729      To compute NE, do EQ and invert the result.
15730 
15731      Therefore, the code below must handle only EQ and LT.  */
15732 
15733   if (code == LE_EXPR || code == GT_EXPR)
15734     {
15735       std::swap (op0, op1);
15736       code = swap_tree_comparison (code);
15737     }
15738 
15739   /* Note that it is safe to invert for real values here because we
15740      have already handled the one case that it matters.  */
15741 
15742   invert = 0;
15743   if (code == NE_EXPR || code == GE_EXPR)
15744     {
15745       invert = 1;
15746       code = invert_tree_comparison (code, false);
15747     }
15748 
15749   /* Compute a result for LT or EQ if args permit;
15750      Otherwise return T.  */
15751   if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15752     {
15753       if (code == EQ_EXPR)
15754 	result = tree_int_cst_equal (op0, op1);
15755       else
15756 	result = tree_int_cst_lt (op0, op1);
15757     }
15758   else
15759     return NULL_TREE;
15760 
15761   if (invert)
15762     result ^= 1;
15763   return constant_boolean_node (result, type);
15764 }
15765 
15766 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15767    indicated TYPE.  If no CLEANUP_POINT_EXPR is necessary, return EXPR
15768    itself.  */
15769 
15770 tree
fold_build_cleanup_point_expr(tree type,tree expr)15771 fold_build_cleanup_point_expr (tree type, tree expr)
15772 {
15773   /* If the expression does not have side effects then we don't have to wrap
15774      it with a cleanup point expression.  */
15775   if (!TREE_SIDE_EFFECTS (expr))
15776     return expr;
15777 
15778   /* If the expression is a return, check to see if the expression inside the
15779      return has no side effects or the right hand side of the modify expression
15780      inside the return. If either don't have side effects set we don't need to
15781      wrap the expression in a cleanup point expression.  Note we don't check the
15782      left hand side of the modify because it should always be a return decl.  */
15783   if (TREE_CODE (expr) == RETURN_EXPR)
15784     {
15785       tree op = TREE_OPERAND (expr, 0);
15786       if (!op || !TREE_SIDE_EFFECTS (op))
15787         return expr;
15788       op = TREE_OPERAND (op, 1);
15789       if (!TREE_SIDE_EFFECTS (op))
15790         return expr;
15791     }
15792 
15793   return build1_loc (EXPR_LOCATION (expr), CLEANUP_POINT_EXPR, type, expr);
15794 }
15795 
15796 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15797    of an indirection through OP0, or NULL_TREE if no simplification is
15798    possible.  */
15799 
15800 tree
fold_indirect_ref_1(location_t loc,tree type,tree op0)15801 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15802 {
15803   tree sub = op0;
15804   tree subtype;
15805   poly_uint64 const_op01;
15806 
15807   STRIP_NOPS (sub);
15808   subtype = TREE_TYPE (sub);
15809   if (!POINTER_TYPE_P (subtype)
15810       || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (op0)))
15811     return NULL_TREE;
15812 
15813   if (TREE_CODE (sub) == ADDR_EXPR)
15814     {
15815       tree op = TREE_OPERAND (sub, 0);
15816       tree optype = TREE_TYPE (op);
15817 
15818       /* *&CONST_DECL -> to the value of the const decl.  */
15819       if (TREE_CODE (op) == CONST_DECL)
15820 	return DECL_INITIAL (op);
15821       /* *&p => p;  make sure to handle *&"str"[cst] here.  */
15822       if (type == optype)
15823 	{
15824 	  tree fop = fold_read_from_constant_string (op);
15825 	  if (fop)
15826 	    return fop;
15827 	  else
15828 	    return op;
15829 	}
15830       /* *(foo *)&fooarray => fooarray[0] */
15831       else if (TREE_CODE (optype) == ARRAY_TYPE
15832 	       && type == TREE_TYPE (optype)
15833 	       && (!in_gimple_form
15834 		   || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15835 	{
15836 	  tree type_domain = TYPE_DOMAIN (optype);
15837 	  tree min_val = size_zero_node;
15838 	  if (type_domain && TYPE_MIN_VALUE (type_domain))
15839 	    min_val = TYPE_MIN_VALUE (type_domain);
15840 	  if (in_gimple_form
15841 	      && TREE_CODE (min_val) != INTEGER_CST)
15842 	    return NULL_TREE;
15843 	  return build4_loc (loc, ARRAY_REF, type, op, min_val,
15844 			     NULL_TREE, NULL_TREE);
15845 	}
15846       /* *(foo *)&complexfoo => __real__ complexfoo */
15847       else if (TREE_CODE (optype) == COMPLEX_TYPE
15848 	       && type == TREE_TYPE (optype))
15849 	return fold_build1_loc (loc, REALPART_EXPR, type, op);
15850       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15851       else if (VECTOR_TYPE_P (optype)
15852 	       && type == TREE_TYPE (optype))
15853 	{
15854 	  tree part_width = TYPE_SIZE (type);
15855 	  tree index = bitsize_int (0);
15856 	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width,
15857 				  index);
15858 	}
15859     }
15860 
15861   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15862       && poly_int_tree_p (TREE_OPERAND (sub, 1), &const_op01))
15863     {
15864       tree op00 = TREE_OPERAND (sub, 0);
15865       tree op01 = TREE_OPERAND (sub, 1);
15866 
15867       STRIP_NOPS (op00);
15868       if (TREE_CODE (op00) == ADDR_EXPR)
15869 	{
15870 	  tree op00type;
15871 	  op00 = TREE_OPERAND (op00, 0);
15872 	  op00type = TREE_TYPE (op00);
15873 
15874 	  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15875 	  if (VECTOR_TYPE_P (op00type)
15876 	      && type == TREE_TYPE (op00type)
15877 	      /* POINTER_PLUS_EXPR second operand is sizetype, unsigned,
15878 		 but we want to treat offsets with MSB set as negative.
15879 		 For the code below negative offsets are invalid and
15880 		 TYPE_SIZE of the element is something unsigned, so
15881 		 check whether op01 fits into poly_int64, which implies
15882 		 it is from 0 to INTTYPE_MAXIMUM (HOST_WIDE_INT), and
15883 		 then just use poly_uint64 because we want to treat the
15884 		 value as unsigned.  */
15885 	      && tree_fits_poly_int64_p (op01))
15886 	    {
15887 	      tree part_width = TYPE_SIZE (type);
15888 	      poly_uint64 max_offset
15889 		= (tree_to_uhwi (part_width) / BITS_PER_UNIT
15890 		   * TYPE_VECTOR_SUBPARTS (op00type));
15891 	      if (known_lt (const_op01, max_offset))
15892 		{
15893 		  tree index = bitsize_int (const_op01 * BITS_PER_UNIT);
15894 		  return fold_build3_loc (loc,
15895 					  BIT_FIELD_REF, type, op00,
15896 					  part_width, index);
15897 		}
15898 	    }
15899 	  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15900 	  else if (TREE_CODE (op00type) == COMPLEX_TYPE
15901 		   && type == TREE_TYPE (op00type))
15902 	    {
15903 	      if (known_eq (wi::to_poly_offset (TYPE_SIZE_UNIT (type)),
15904 			    const_op01))
15905 		return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15906 	    }
15907 	  /* ((foo *)&fooarray)[1] => fooarray[1] */
15908 	  else if (TREE_CODE (op00type) == ARRAY_TYPE
15909 		   && type == TREE_TYPE (op00type))
15910 	    {
15911 	      tree type_domain = TYPE_DOMAIN (op00type);
15912 	      tree min_val = size_zero_node;
15913 	      if (type_domain && TYPE_MIN_VALUE (type_domain))
15914 		min_val = TYPE_MIN_VALUE (type_domain);
15915 	      poly_uint64 type_size, index;
15916 	      if (poly_int_tree_p (min_val)
15917 		  && poly_int_tree_p (TYPE_SIZE_UNIT (type), &type_size)
15918 		  && multiple_p (const_op01, type_size, &index))
15919 		{
15920 		  poly_offset_int off = index + wi::to_poly_offset (min_val);
15921 		  op01 = wide_int_to_tree (sizetype, off);
15922 		  return build4_loc (loc, ARRAY_REF, type, op00, op01,
15923 				     NULL_TREE, NULL_TREE);
15924 		}
15925 	    }
15926 	}
15927     }
15928 
15929   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15930   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15931       && type == TREE_TYPE (TREE_TYPE (subtype))
15932       && (!in_gimple_form
15933 	  || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15934     {
15935       tree type_domain;
15936       tree min_val = size_zero_node;
15937       sub = build_fold_indirect_ref_loc (loc, sub);
15938       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15939       if (type_domain && TYPE_MIN_VALUE (type_domain))
15940 	min_val = TYPE_MIN_VALUE (type_domain);
15941       if (in_gimple_form
15942 	  && TREE_CODE (min_val) != INTEGER_CST)
15943 	return NULL_TREE;
15944       return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15945 			 NULL_TREE);
15946     }
15947 
15948   return NULL_TREE;
15949 }
15950 
15951 /* Builds an expression for an indirection through T, simplifying some
15952    cases.  */
15953 
15954 tree
build_fold_indirect_ref_loc(location_t loc,tree t)15955 build_fold_indirect_ref_loc (location_t loc, tree t)
15956 {
15957   tree type = TREE_TYPE (TREE_TYPE (t));
15958   tree sub = fold_indirect_ref_1 (loc, type, t);
15959 
15960   if (sub)
15961     return sub;
15962 
15963   return build1_loc (loc, INDIRECT_REF, type, t);
15964 }
15965 
15966 /* Given an INDIRECT_REF T, return either T or a simplified version.  */
15967 
15968 tree
fold_indirect_ref_loc(location_t loc,tree t)15969 fold_indirect_ref_loc (location_t loc, tree t)
15970 {
15971   tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15972 
15973   if (sub)
15974     return sub;
15975   else
15976     return t;
15977 }
15978 
15979 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15980    whose result is ignored.  The type of the returned tree need not be
15981    the same as the original expression.  */
15982 
15983 tree
fold_ignored_result(tree t)15984 fold_ignored_result (tree t)
15985 {
15986   if (!TREE_SIDE_EFFECTS (t))
15987     return integer_zero_node;
15988 
15989   for (;;)
15990     switch (TREE_CODE_CLASS (TREE_CODE (t)))
15991       {
15992       case tcc_unary:
15993 	t = TREE_OPERAND (t, 0);
15994 	break;
15995 
15996       case tcc_binary:
15997       case tcc_comparison:
15998 	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15999 	  t = TREE_OPERAND (t, 0);
16000 	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16001 	  t = TREE_OPERAND (t, 1);
16002 	else
16003 	  return t;
16004 	break;
16005 
16006       case tcc_expression:
16007 	switch (TREE_CODE (t))
16008 	  {
16009 	  case COMPOUND_EXPR:
16010 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16011 	      return t;
16012 	    t = TREE_OPERAND (t, 0);
16013 	    break;
16014 
16015 	  case COND_EXPR:
16016 	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16017 		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16018 	      return t;
16019 	    t = TREE_OPERAND (t, 0);
16020 	    break;
16021 
16022 	  default:
16023 	    return t;
16024 	  }
16025 	break;
16026 
16027       default:
16028 	return t;
16029       }
16030 }
16031 
16032 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16033 
16034 tree
round_up_loc(location_t loc,tree value,unsigned int divisor)16035 round_up_loc (location_t loc, tree value, unsigned int divisor)
16036 {
16037   tree div = NULL_TREE;
16038 
16039   if (divisor == 1)
16040     return value;
16041 
16042   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16043      have to do anything.  Only do this when we are not given a const,
16044      because in that case, this check is more expensive than just
16045      doing it.  */
16046   if (TREE_CODE (value) != INTEGER_CST)
16047     {
16048       div = build_int_cst (TREE_TYPE (value), divisor);
16049 
16050       if (multiple_of_p (TREE_TYPE (value), value, div))
16051 	return value;
16052     }
16053 
16054   /* If divisor is a power of two, simplify this to bit manipulation.  */
16055   if (pow2_or_zerop (divisor))
16056     {
16057       if (TREE_CODE (value) == INTEGER_CST)
16058 	{
16059 	  wide_int val = wi::to_wide (value);
16060 	  bool overflow_p;
16061 
16062 	  if ((val & (divisor - 1)) == 0)
16063 	    return value;
16064 
16065 	  overflow_p = TREE_OVERFLOW (value);
16066 	  val += divisor - 1;
16067 	  val &= (int) -divisor;
16068 	  if (val == 0)
16069 	    overflow_p = true;
16070 
16071 	  return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16072 	}
16073       else
16074 	{
16075 	  tree t;
16076 
16077 	  t = build_int_cst (TREE_TYPE (value), divisor - 1);
16078 	  value = size_binop_loc (loc, PLUS_EXPR, value, t);
16079 	  t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16080 	  value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16081 	}
16082     }
16083   else
16084     {
16085       if (!div)
16086 	div = build_int_cst (TREE_TYPE (value), divisor);
16087       value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16088       value = size_binop_loc (loc, MULT_EXPR, value, div);
16089     }
16090 
16091   return value;
16092 }
16093 
16094 /* Likewise, but round down.  */
16095 
16096 tree
round_down_loc(location_t loc,tree value,int divisor)16097 round_down_loc (location_t loc, tree value, int divisor)
16098 {
16099   tree div = NULL_TREE;
16100 
16101   gcc_assert (divisor > 0);
16102   if (divisor == 1)
16103     return value;
16104 
16105   /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16106      have to do anything.  Only do this when we are not given a const,
16107      because in that case, this check is more expensive than just
16108      doing it.  */
16109   if (TREE_CODE (value) != INTEGER_CST)
16110     {
16111       div = build_int_cst (TREE_TYPE (value), divisor);
16112 
16113       if (multiple_of_p (TREE_TYPE (value), value, div))
16114 	return value;
16115     }
16116 
16117   /* If divisor is a power of two, simplify this to bit manipulation.  */
16118   if (pow2_or_zerop (divisor))
16119     {
16120       tree t;
16121 
16122       t = build_int_cst (TREE_TYPE (value), -divisor);
16123       value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16124     }
16125   else
16126     {
16127       if (!div)
16128 	div = build_int_cst (TREE_TYPE (value), divisor);
16129       value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16130       value = size_binop_loc (loc, MULT_EXPR, value, div);
16131     }
16132 
16133   return value;
16134 }
16135 
16136 /* Returns the pointer to the base of the object addressed by EXP and
16137    extracts the information about the offset of the access, storing it
16138    to PBITPOS and POFFSET.  */
16139 
16140 static tree
split_address_to_core_and_offset(tree exp,poly_int64_pod * pbitpos,tree * poffset)16141 split_address_to_core_and_offset (tree exp,
16142 				  poly_int64_pod *pbitpos, tree *poffset)
16143 {
16144   tree core;
16145   machine_mode mode;
16146   int unsignedp, reversep, volatilep;
16147   poly_int64 bitsize;
16148   location_t loc = EXPR_LOCATION (exp);
16149 
16150   if (TREE_CODE (exp) == ADDR_EXPR)
16151     {
16152       core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16153 				  poffset, &mode, &unsignedp, &reversep,
16154 				  &volatilep);
16155       core = build_fold_addr_expr_loc (loc, core);
16156     }
16157   else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
16158     {
16159       core = TREE_OPERAND (exp, 0);
16160       STRIP_NOPS (core);
16161       *pbitpos = 0;
16162       *poffset = TREE_OPERAND (exp, 1);
16163       if (poly_int_tree_p (*poffset))
16164 	{
16165 	  poly_offset_int tem
16166 	    = wi::sext (wi::to_poly_offset (*poffset),
16167 			TYPE_PRECISION (TREE_TYPE (*poffset)));
16168 	  tem <<= LOG2_BITS_PER_UNIT;
16169 	  if (tem.to_shwi (pbitpos))
16170 	    *poffset = NULL_TREE;
16171 	}
16172     }
16173   else
16174     {
16175       core = exp;
16176       *pbitpos = 0;
16177       *poffset = NULL_TREE;
16178     }
16179 
16180   return core;
16181 }
16182 
16183 /* Returns true if addresses of E1 and E2 differ by a constant, false
16184    otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
16185 
16186 bool
ptr_difference_const(tree e1,tree e2,poly_int64_pod * diff)16187 ptr_difference_const (tree e1, tree e2, poly_int64_pod *diff)
16188 {
16189   tree core1, core2;
16190   poly_int64 bitpos1, bitpos2;
16191   tree toffset1, toffset2, tdiff, type;
16192 
16193   core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16194   core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16195 
16196   poly_int64 bytepos1, bytepos2;
16197   if (!multiple_p (bitpos1, BITS_PER_UNIT, &bytepos1)
16198       || !multiple_p (bitpos2, BITS_PER_UNIT, &bytepos2)
16199       || !operand_equal_p (core1, core2, 0))
16200     return false;
16201 
16202   if (toffset1 && toffset2)
16203     {
16204       type = TREE_TYPE (toffset1);
16205       if (type != TREE_TYPE (toffset2))
16206 	toffset2 = fold_convert (type, toffset2);
16207 
16208       tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16209       if (!cst_and_fits_in_hwi (tdiff))
16210 	return false;
16211 
16212       *diff = int_cst_value (tdiff);
16213     }
16214   else if (toffset1 || toffset2)
16215     {
16216       /* If only one of the offsets is non-constant, the difference cannot
16217 	 be a constant.  */
16218       return false;
16219     }
16220   else
16221     *diff = 0;
16222 
16223   *diff += bytepos1 - bytepos2;
16224   return true;
16225 }
16226 
16227 /* Return OFF converted to a pointer offset type suitable as offset for
16228    POINTER_PLUS_EXPR.  Use location LOC for this conversion.  */
16229 tree
convert_to_ptrofftype_loc(location_t loc,tree off)16230 convert_to_ptrofftype_loc (location_t loc, tree off)
16231 {
16232   if (ptrofftype_p (TREE_TYPE (off)))
16233     return off;
16234   return fold_convert_loc (loc, sizetype, off);
16235 }
16236 
16237 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
16238 tree
fold_build_pointer_plus_loc(location_t loc,tree ptr,tree off)16239 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16240 {
16241   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16242 			  ptr, convert_to_ptrofftype_loc (loc, off));
16243 }
16244 
16245 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF.  */
16246 tree
fold_build_pointer_plus_hwi_loc(location_t loc,tree ptr,HOST_WIDE_INT off)16247 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16248 {
16249   return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16250 			  ptr, size_int (off));
16251 }
16252 
16253 /* Return a pointer to a NUL-terminated string containing the sequence
16254    of bytes corresponding to the representation of the object referred to
16255    by SRC (or a subsequence of such bytes within it if SRC is a reference
16256    to an initialized constant array plus some constant offset).
16257    Set *STRSIZE the number of bytes in the constant sequence including
16258    the terminating NUL byte.  *STRSIZE is equal to sizeof(A) - OFFSET
16259    where A is the array that stores the constant sequence that SRC points
16260    to and OFFSET is the byte offset of SRC from the beginning of A.  SRC
16261    need not point to a string or even an array of characters but may point
16262    to an object of any type.  */
16263 
16264 const char *
getbyterep(tree src,unsigned HOST_WIDE_INT * strsize)16265 getbyterep (tree src, unsigned HOST_WIDE_INT *strsize)
16266 {
16267   /* The offset into the array A storing the string, and A's byte size.  */
16268   tree offset_node;
16269   tree mem_size;
16270 
16271   if (strsize)
16272     *strsize = 0;
16273 
16274   if (strsize)
16275     src = byte_representation (src, &offset_node, &mem_size, NULL);
16276   else
16277     src = string_constant (src, &offset_node, &mem_size, NULL);
16278   if (!src)
16279     return NULL;
16280 
16281   unsigned HOST_WIDE_INT offset = 0;
16282   if (offset_node != NULL_TREE)
16283     {
16284       if (!tree_fits_uhwi_p (offset_node))
16285 	return NULL;
16286       else
16287 	offset = tree_to_uhwi (offset_node);
16288     }
16289 
16290   if (!tree_fits_uhwi_p (mem_size))
16291     return NULL;
16292 
16293   /* ARRAY_SIZE is the byte size of the array the constant sequence
16294      is stored in and equal to sizeof A.  INIT_BYTES is the number
16295      of bytes in the constant sequence used to initialize the array,
16296      including any embedded NULs as well as the terminating NUL (for
16297      strings), but not including any trailing zeros/NULs past
16298      the terminating one appended implicitly to a string literal to
16299      zero out the remainder of the array it's stored in.  For example,
16300      given:
16301        const char a[7] = "abc\0d";
16302        n = strlen (a + 1);
16303      ARRAY_SIZE is 7, INIT_BYTES is 6, and OFFSET is 1.  For a valid
16304      (i.e., nul-terminated) string with no embedded nuls, INIT_BYTES
16305      is equal to strlen (A) + 1.  */
16306   const unsigned HOST_WIDE_INT array_size = tree_to_uhwi (mem_size);
16307   unsigned HOST_WIDE_INT init_bytes = TREE_STRING_LENGTH (src);
16308   const char *string = TREE_STRING_POINTER (src);
16309 
16310   /* Ideally this would turn into a gcc_checking_assert over time.  */
16311   if (init_bytes > array_size)
16312     init_bytes = array_size;
16313 
16314   if (init_bytes == 0 || offset >= array_size)
16315     return NULL;
16316 
16317   if (strsize)
16318     {
16319       /* Compute and store the number of characters from the beginning
16320 	 of the substring at OFFSET to the end, including the terminating
16321 	 nul.  Offsets past the initial length refer to null strings.  */
16322       if (offset < init_bytes)
16323 	*strsize = init_bytes - offset;
16324       else
16325 	*strsize = 1;
16326     }
16327   else
16328     {
16329       tree eltype = TREE_TYPE (TREE_TYPE (src));
16330       /* Support only properly NUL-terminated single byte strings.  */
16331       if (tree_to_uhwi (TYPE_SIZE_UNIT (eltype)) != 1)
16332 	return NULL;
16333       if (string[init_bytes - 1] != '\0')
16334 	return NULL;
16335     }
16336 
16337   return offset < init_bytes ? string + offset : "";
16338 }
16339 
16340 /* Return a pointer to a NUL-terminated string corresponding to
16341    the expression STR referencing a constant string, possibly
16342    involving a constant offset.  Return null if STR either doesn't
16343    reference a constant string or if it involves a nonconstant
16344    offset.  */
16345 
16346 const char *
c_getstr(tree str)16347 c_getstr (tree str)
16348 {
16349   return getbyterep (str, NULL);
16350 }
16351 
16352 /* Given a tree T, compute which bits in T may be nonzero.  */
16353 
16354 wide_int
tree_nonzero_bits(const_tree t)16355 tree_nonzero_bits (const_tree t)
16356 {
16357   switch (TREE_CODE (t))
16358     {
16359     case INTEGER_CST:
16360       return wi::to_wide (t);
16361     case SSA_NAME:
16362       return get_nonzero_bits (t);
16363     case NON_LVALUE_EXPR:
16364     case SAVE_EXPR:
16365       return tree_nonzero_bits (TREE_OPERAND (t, 0));
16366     case BIT_AND_EXPR:
16367       return wi::bit_and (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16368 			  tree_nonzero_bits (TREE_OPERAND (t, 1)));
16369     case BIT_IOR_EXPR:
16370     case BIT_XOR_EXPR:
16371       return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16372 			 tree_nonzero_bits (TREE_OPERAND (t, 1)));
16373     case COND_EXPR:
16374       return wi::bit_or (tree_nonzero_bits (TREE_OPERAND (t, 1)),
16375 			 tree_nonzero_bits (TREE_OPERAND (t, 2)));
16376     CASE_CONVERT:
16377       return wide_int::from (tree_nonzero_bits (TREE_OPERAND (t, 0)),
16378 			     TYPE_PRECISION (TREE_TYPE (t)),
16379 			     TYPE_SIGN (TREE_TYPE (TREE_OPERAND (t, 0))));
16380     case PLUS_EXPR:
16381       if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
16382 	{
16383 	  wide_int nzbits1 = tree_nonzero_bits (TREE_OPERAND (t, 0));
16384 	  wide_int nzbits2 = tree_nonzero_bits (TREE_OPERAND (t, 1));
16385 	  if (wi::bit_and (nzbits1, nzbits2) == 0)
16386 	    return wi::bit_or (nzbits1, nzbits2);
16387 	}
16388       break;
16389     case LSHIFT_EXPR:
16390       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16391 	{
16392 	  tree type = TREE_TYPE (t);
16393 	  wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16394 	  wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16395 				       TYPE_PRECISION (type));
16396 	  return wi::neg_p (arg1)
16397 		 ? wi::rshift (nzbits, -arg1, TYPE_SIGN (type))
16398 		 : wi::lshift (nzbits, arg1);
16399 	}
16400       break;
16401     case RSHIFT_EXPR:
16402       if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
16403         {
16404 	  tree type = TREE_TYPE (t);
16405 	  wide_int nzbits = tree_nonzero_bits (TREE_OPERAND (t, 0));
16406 	  wide_int arg1 = wi::to_wide (TREE_OPERAND (t, 1),
16407 				       TYPE_PRECISION (type));
16408 	  return wi::neg_p (arg1)
16409 		 ? wi::lshift (nzbits, -arg1)
16410 		 : wi::rshift (nzbits, arg1, TYPE_SIGN (type));
16411         }
16412       break;
16413     default:
16414       break;
16415     }
16416 
16417   return wi::shwi (-1, TYPE_PRECISION (TREE_TYPE (t)));
16418 }
16419 
16420 #if CHECKING_P
16421 
16422 namespace selftest {
16423 
16424 /* Helper functions for writing tests of folding trees.  */
16425 
16426 /* Verify that the binary op (LHS CODE RHS) folds to CONSTANT.  */
16427 
16428 static void
assert_binop_folds_to_const(tree lhs,enum tree_code code,tree rhs,tree constant)16429 assert_binop_folds_to_const (tree lhs, enum tree_code code, tree rhs,
16430 			     tree constant)
16431 {
16432   ASSERT_EQ (constant, fold_build2 (code, TREE_TYPE (lhs), lhs, rhs));
16433 }
16434 
16435 /* Verify that the binary op (LHS CODE RHS) folds to an NON_LVALUE_EXPR
16436    wrapping WRAPPED_EXPR.  */
16437 
16438 static void
assert_binop_folds_to_nonlvalue(tree lhs,enum tree_code code,tree rhs,tree wrapped_expr)16439 assert_binop_folds_to_nonlvalue (tree lhs, enum tree_code code, tree rhs,
16440 				 tree wrapped_expr)
16441 {
16442   tree result = fold_build2 (code, TREE_TYPE (lhs), lhs, rhs);
16443   ASSERT_NE (wrapped_expr, result);
16444   ASSERT_EQ (NON_LVALUE_EXPR, TREE_CODE (result));
16445   ASSERT_EQ (wrapped_expr, TREE_OPERAND (result, 0));
16446 }
16447 
16448 /* Verify that various arithmetic binary operations are folded
16449    correctly.  */
16450 
16451 static void
test_arithmetic_folding()16452 test_arithmetic_folding ()
16453 {
16454   tree type = integer_type_node;
16455   tree x = create_tmp_var_raw (type, "x");
16456   tree zero = build_zero_cst (type);
16457   tree one = build_int_cst (type, 1);
16458 
16459   /* Addition.  */
16460   /* 1 <-- (0 + 1) */
16461   assert_binop_folds_to_const (zero, PLUS_EXPR, one,
16462 			       one);
16463   assert_binop_folds_to_const (one, PLUS_EXPR, zero,
16464 			       one);
16465 
16466   /* (nonlvalue)x <-- (x + 0) */
16467   assert_binop_folds_to_nonlvalue (x, PLUS_EXPR, zero,
16468 				   x);
16469 
16470   /* Subtraction.  */
16471   /* 0 <-- (x - x) */
16472   assert_binop_folds_to_const (x, MINUS_EXPR, x,
16473 			       zero);
16474   assert_binop_folds_to_nonlvalue (x, MINUS_EXPR, zero,
16475 				   x);
16476 
16477   /* Multiplication.  */
16478   /* 0 <-- (x * 0) */
16479   assert_binop_folds_to_const (x, MULT_EXPR, zero,
16480 			       zero);
16481 
16482   /* (nonlvalue)x <-- (x * 1) */
16483   assert_binop_folds_to_nonlvalue (x, MULT_EXPR, one,
16484 				   x);
16485 }
16486 
16487 /* Verify that various binary operations on vectors are folded
16488    correctly.  */
16489 
16490 static void
test_vector_folding()16491 test_vector_folding ()
16492 {
16493   tree inner_type = integer_type_node;
16494   tree type = build_vector_type (inner_type, 4);
16495   tree zero = build_zero_cst (type);
16496   tree one = build_one_cst (type);
16497   tree index = build_index_vector (type, 0, 1);
16498 
16499   /* Verify equality tests that return a scalar boolean result.  */
16500   tree res_type = boolean_type_node;
16501   ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, one)));
16502   ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type, zero, zero)));
16503   ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, zero, one)));
16504   ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, one, one)));
16505   ASSERT_TRUE (integer_nonzerop (fold_build2 (NE_EXPR, res_type, index, one)));
16506   ASSERT_FALSE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16507 					       index, one)));
16508   ASSERT_FALSE (integer_nonzerop (fold_build2 (NE_EXPR, res_type,
16509 					      index, index)));
16510   ASSERT_TRUE (integer_nonzerop (fold_build2 (EQ_EXPR, res_type,
16511 					      index, index)));
16512 }
16513 
16514 /* Verify folding of VEC_DUPLICATE_EXPRs.  */
16515 
16516 static void
test_vec_duplicate_folding()16517 test_vec_duplicate_folding ()
16518 {
16519   scalar_int_mode int_mode = SCALAR_INT_TYPE_MODE (ssizetype);
16520   machine_mode vec_mode = targetm.vectorize.preferred_simd_mode (int_mode);
16521   /* This will be 1 if VEC_MODE isn't a vector mode.  */
16522   poly_uint64 nunits = GET_MODE_NUNITS (vec_mode);
16523 
16524   tree type = build_vector_type (ssizetype, nunits);
16525   tree dup5_expr = fold_unary (VEC_DUPLICATE_EXPR, type, ssize_int (5));
16526   tree dup5_cst = build_vector_from_val (type, ssize_int (5));
16527   ASSERT_TRUE (operand_equal_p (dup5_expr, dup5_cst, 0));
16528 }
16529 
16530 /* Run all of the selftests within this file.  */
16531 
16532 void
fold_const_c_tests()16533 fold_const_c_tests ()
16534 {
16535   test_arithmetic_folding ();
16536   test_vector_folding ();
16537   test_vec_duplicate_folding ();
16538 }
16539 
16540 } // namespace selftest
16541 
16542 #endif /* CHECKING_P */
16543