1 /* Utility routines for data type conversion for GCC.
2    Copyright (C) 1987-2019 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 
21 /* These routines are somewhat language-independent utility function
22    intended to be called by the language-specific convert () functions.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "target.h"
28 #include "tree.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "convert.h"
33 #include "langhooks.h"
34 #include "builtins.h"
35 #include "ubsan.h"
36 #include "stringpool.h"
37 #include "attribs.h"
38 #include "asan.h"
39 #include "selftest.h"
40 
41 #define maybe_fold_build1_loc(FOLD_P, LOC, CODE, TYPE, EXPR) \
42   ((FOLD_P) ? fold_build1_loc (LOC, CODE, TYPE, EXPR)	     \
43    : build1_loc (LOC, CODE, TYPE, EXPR))
44 #define maybe_fold_build2_loc(FOLD_P, LOC, CODE, TYPE, EXPR1, EXPR2) \
45   ((FOLD_P) ? fold_build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2)	     \
46    : build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2))
47 
48 /* Convert EXPR to some pointer or reference type TYPE.
49    EXPR must be pointer, reference, integer, enumeral, or literal zero;
50    in other cases error is called.  If FOLD_P is true, try to fold the
51    expression.  */
52 
53 static tree
convert_to_pointer_1(tree type,tree expr,bool fold_p)54 convert_to_pointer_1 (tree type, tree expr, bool fold_p)
55 {
56   location_t loc = EXPR_LOCATION (expr);
57   if (TREE_TYPE (expr) == type)
58     return expr;
59 
60   switch (TREE_CODE (TREE_TYPE (expr)))
61     {
62     case POINTER_TYPE:
63     case REFERENCE_TYPE:
64       {
65         /* If the pointers point to different address spaces, conversion needs
66 	   to be done via a ADDR_SPACE_CONVERT_EXPR instead of a NOP_EXPR.  */
67 	addr_space_t to_as = TYPE_ADDR_SPACE (TREE_TYPE (type));
68 	addr_space_t from_as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
69 
70 	if (to_as == from_as)
71 	  return maybe_fold_build1_loc (fold_p, loc, NOP_EXPR, type, expr);
72 	else
73 	  return maybe_fold_build1_loc (fold_p, loc, ADDR_SPACE_CONVERT_EXPR,
74 					type, expr);
75       }
76 
77     case INTEGER_TYPE:
78     case ENUMERAL_TYPE:
79     case BOOLEAN_TYPE:
80       {
81 	/* If the input precision differs from the target pointer type
82 	   precision, first convert the input expression to an integer type of
83 	   the target precision.  Some targets, e.g. VMS, need several pointer
84 	   sizes to coexist so the latter isn't necessarily POINTER_SIZE.  */
85 	unsigned int pprec = TYPE_PRECISION (type);
86 	unsigned int eprec = TYPE_PRECISION (TREE_TYPE (expr));
87 
88 	if (eprec != pprec)
89 	  expr
90 	    = maybe_fold_build1_loc (fold_p, loc, NOP_EXPR,
91 				     lang_hooks.types.type_for_size (pprec, 0),
92 				     expr);
93       }
94       return maybe_fold_build1_loc (fold_p, loc, CONVERT_EXPR, type, expr);
95 
96     default:
97       error ("cannot convert to a pointer type");
98       return convert_to_pointer_1 (type, integer_zero_node, fold_p);
99     }
100 }
101 
102 /* Subroutine of the various convert_to_*_maybe_fold routines.
103 
104    If a location wrapper has been folded to a constant (presumably of
105    a different type), re-wrap the new constant with a location wrapper.  */
106 
107 tree
preserve_any_location_wrapper(tree result,tree orig_expr)108 preserve_any_location_wrapper (tree result, tree orig_expr)
109 {
110   if (CONSTANT_CLASS_P (result) && location_wrapper_p (orig_expr))
111     {
112       if (result == TREE_OPERAND (orig_expr, 0))
113 	return orig_expr;
114       else
115 	return maybe_wrap_with_location (result, EXPR_LOCATION (orig_expr));
116     }
117 
118   return result;
119 }
120 
121 /* A wrapper around convert_to_pointer_1 that always folds the
122    expression.  */
123 
124 tree
convert_to_pointer(tree type,tree expr)125 convert_to_pointer (tree type, tree expr)
126 {
127   return convert_to_pointer_1 (type, expr, true);
128 }
129 
130 /* A wrapper around convert_to_pointer_1 that only folds the
131    expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P.  */
132 
133 tree
convert_to_pointer_maybe_fold(tree type,tree expr,bool dofold)134 convert_to_pointer_maybe_fold (tree type, tree expr, bool dofold)
135 {
136   tree result
137     = convert_to_pointer_1 (type, expr,
138 			    dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
139   return preserve_any_location_wrapper (result, expr);
140 }
141 
142 /* Convert EXPR to some floating-point type TYPE.
143 
144    EXPR must be float, fixed-point, integer, or enumeral;
145    in other cases error is called.  If FOLD_P is true, try to fold
146    the expression.  */
147 
148 static tree
convert_to_real_1(tree type,tree expr,bool fold_p)149 convert_to_real_1 (tree type, tree expr, bool fold_p)
150 {
151   enum built_in_function fcode = builtin_mathfn_code (expr);
152   tree itype = TREE_TYPE (expr);
153   location_t loc = EXPR_LOCATION (expr);
154 
155   if (TREE_CODE (expr) == COMPOUND_EXPR)
156     {
157       tree t = convert_to_real_1 (type, TREE_OPERAND (expr, 1), fold_p);
158       if (t == TREE_OPERAND (expr, 1))
159 	return expr;
160       return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR, TREE_TYPE (t),
161 			 TREE_OPERAND (expr, 0), t);
162     }
163 
164   /* Disable until we figure out how to decide whether the functions are
165      present in runtime.  */
166   /* Convert (float)sqrt((double)x) where x is float into sqrtf(x) */
167   if (optimize
168       && (TYPE_MODE (type) == TYPE_MODE (double_type_node)
169           || TYPE_MODE (type) == TYPE_MODE (float_type_node)))
170     {
171       switch (fcode)
172         {
173 #define CASE_MATHFN(FN) case BUILT_IN_##FN: case BUILT_IN_##FN##L:
174 	  CASE_MATHFN (COSH)
175 	  CASE_MATHFN (EXP)
176 	  CASE_MATHFN (EXP10)
177 	  CASE_MATHFN (EXP2)
178  	  CASE_MATHFN (EXPM1)
179 	  CASE_MATHFN (GAMMA)
180 	  CASE_MATHFN (J0)
181 	  CASE_MATHFN (J1)
182 	  CASE_MATHFN (LGAMMA)
183 	  CASE_MATHFN (POW10)
184 	  CASE_MATHFN (SINH)
185 	  CASE_MATHFN (TGAMMA)
186 	  CASE_MATHFN (Y0)
187 	  CASE_MATHFN (Y1)
188 	    /* The above functions may set errno differently with float
189 	       input or output so this transformation is not safe with
190 	       -fmath-errno.  */
191 	    if (flag_errno_math)
192 	      break;
193 	    gcc_fallthrough ();
194 	  CASE_MATHFN (ACOS)
195 	  CASE_MATHFN (ACOSH)
196 	  CASE_MATHFN (ASIN)
197  	  CASE_MATHFN (ASINH)
198  	  CASE_MATHFN (ATAN)
199 	  CASE_MATHFN (ATANH)
200  	  CASE_MATHFN (CBRT)
201  	  CASE_MATHFN (COS)
202  	  CASE_MATHFN (ERF)
203  	  CASE_MATHFN (ERFC)
204 	  CASE_MATHFN (LOG)
205 	  CASE_MATHFN (LOG10)
206 	  CASE_MATHFN (LOG2)
207  	  CASE_MATHFN (LOG1P)
208  	  CASE_MATHFN (SIN)
209  	  CASE_MATHFN (TAN)
210  	  CASE_MATHFN (TANH)
211 	    /* The above functions are not safe to do this conversion.  */
212 	    if (!flag_unsafe_math_optimizations)
213 	      break;
214 	    gcc_fallthrough ();
215 	  CASE_MATHFN (SQRT)
216 	  CASE_MATHFN (FABS)
217 	  CASE_MATHFN (LOGB)
218 #undef CASE_MATHFN
219 	    if (call_expr_nargs (expr) != 1
220 		|| !SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (expr, 0))))
221 	      break;
222 	    {
223 	      tree arg0 = strip_float_extensions (CALL_EXPR_ARG (expr, 0));
224 	      tree newtype = type;
225 
226 	      /* We have (outertype)sqrt((innertype)x).  Choose the wider mode
227 		 from the both as the safe type for operation.  */
228 	      if (TYPE_PRECISION (TREE_TYPE (arg0)) > TYPE_PRECISION (type))
229 		newtype = TREE_TYPE (arg0);
230 
231 	      /* We consider to convert
232 
233 		     (T1) sqrtT2 ((T2) exprT3)
234 		 to
235 		     (T1) sqrtT4 ((T4) exprT3)
236 
237 		  , where T1 is TYPE, T2 is ITYPE, T3 is TREE_TYPE (ARG0),
238 		 and T4 is NEWTYPE.  All those types are of floating point types.
239 		 T4 (NEWTYPE) should be narrower than T2 (ITYPE). This conversion
240 		 is safe only if P1 >= P2*2+2, where P1 and P2 are precisions of
241 		 T2 and T4.  See the following URL for a reference:
242 		 http://stackoverflow.com/questions/9235456/determining-
243                  floating-point-square-root
244 		 */
245 	      if ((fcode == BUILT_IN_SQRT || fcode == BUILT_IN_SQRTL)
246 		  && !flag_unsafe_math_optimizations)
247 		{
248 		  /* The following conversion is unsafe even the precision condition
249 		     below is satisfied:
250 
251 		     (float) sqrtl ((long double) double_val) -> (float) sqrt (double_val)
252 		    */
253 		  if (TYPE_MODE (type) != TYPE_MODE (newtype))
254 		    break;
255 
256 		  int p1 = REAL_MODE_FORMAT (TYPE_MODE (itype))->p;
257 		  int p2 = REAL_MODE_FORMAT (TYPE_MODE (newtype))->p;
258 		  if (p1 < p2 * 2 + 2)
259 		    break;
260 		}
261 
262 	      /* Be careful about integer to fp conversions.
263 		 These may overflow still.  */
264 	      if (FLOAT_TYPE_P (TREE_TYPE (arg0))
265 		  && TYPE_PRECISION (newtype) < TYPE_PRECISION (itype)
266 		  && (TYPE_MODE (newtype) == TYPE_MODE (double_type_node)
267 		      || TYPE_MODE (newtype) == TYPE_MODE (float_type_node)))
268 		{
269 		  tree fn = mathfn_built_in (newtype, fcode);
270 		  if (fn)
271 		    {
272 		      tree arg = convert_to_real_1 (newtype, arg0, fold_p);
273 		      expr = build_call_expr (fn, 1, arg);
274 		      if (newtype == type)
275 			return expr;
276 		    }
277 		}
278 	    }
279 	default:
280 	  break;
281 	}
282     }
283 
284   /* Propagate the cast into the operation.  */
285   if (itype != type && FLOAT_TYPE_P (type))
286     switch (TREE_CODE (expr))
287       {
288 	/* Convert (float)-x into -(float)x.  This is safe for
289 	   round-to-nearest rounding mode when the inner type is float.  */
290 	case ABS_EXPR:
291 	case NEGATE_EXPR:
292 	  if (!flag_rounding_math
293 	      && FLOAT_TYPE_P (itype)
294 	      && TYPE_PRECISION (type) < TYPE_PRECISION (itype))
295 	    {
296 	      tree arg = convert_to_real_1 (type, TREE_OPERAND (expr, 0),
297 					    fold_p);
298 	      return build1 (TREE_CODE (expr), type, arg);
299 	    }
300 	  break;
301 	/* Convert (outertype)((innertype0)a+(innertype1)b)
302 	   into ((newtype)a+(newtype)b) where newtype
303 	   is the widest mode from all of these.  */
304 	case PLUS_EXPR:
305 	case MINUS_EXPR:
306 	case MULT_EXPR:
307 	case RDIV_EXPR:
308 	   {
309 	     tree arg0 = strip_float_extensions (TREE_OPERAND (expr, 0));
310 	     tree arg1 = strip_float_extensions (TREE_OPERAND (expr, 1));
311 
312 	     if (FLOAT_TYPE_P (TREE_TYPE (arg0))
313 		 && FLOAT_TYPE_P (TREE_TYPE (arg1))
314 		 && DECIMAL_FLOAT_TYPE_P (itype) == DECIMAL_FLOAT_TYPE_P (type))
315 	       {
316 		  tree newtype = type;
317 
318 		  if (TYPE_MODE (TREE_TYPE (arg0)) == SDmode
319 		      || TYPE_MODE (TREE_TYPE (arg1)) == SDmode
320 		      || TYPE_MODE (type) == SDmode)
321 		    newtype = dfloat32_type_node;
322 		  if (TYPE_MODE (TREE_TYPE (arg0)) == DDmode
323 		      || TYPE_MODE (TREE_TYPE (arg1)) == DDmode
324 		      || TYPE_MODE (type) == DDmode)
325 		    newtype = dfloat64_type_node;
326 		  if (TYPE_MODE (TREE_TYPE (arg0)) == TDmode
327 		      || TYPE_MODE (TREE_TYPE (arg1)) == TDmode
328 		      || TYPE_MODE (type) == TDmode)
329                     newtype = dfloat128_type_node;
330 		  if (newtype == dfloat32_type_node
331 		      || newtype == dfloat64_type_node
332 		      || newtype == dfloat128_type_node)
333 		    {
334 		      expr = build2 (TREE_CODE (expr), newtype,
335 				     convert_to_real_1 (newtype, arg0,
336 							fold_p),
337 				     convert_to_real_1 (newtype, arg1,
338 							fold_p));
339 		      if (newtype == type)
340 			return expr;
341 		      break;
342 		    }
343 
344 		  if (TYPE_PRECISION (TREE_TYPE (arg0)) > TYPE_PRECISION (newtype))
345 		    newtype = TREE_TYPE (arg0);
346 		  if (TYPE_PRECISION (TREE_TYPE (arg1)) > TYPE_PRECISION (newtype))
347 		    newtype = TREE_TYPE (arg1);
348 		  /* Sometimes this transformation is safe (cannot
349 		     change results through affecting double rounding
350 		     cases) and sometimes it is not.  If NEWTYPE is
351 		     wider than TYPE, e.g. (float)((long double)double
352 		     + (long double)double) converted to
353 		     (float)(double + double), the transformation is
354 		     unsafe regardless of the details of the types
355 		     involved; double rounding can arise if the result
356 		     of NEWTYPE arithmetic is a NEWTYPE value half way
357 		     between two representable TYPE values but the
358 		     exact value is sufficiently different (in the
359 		     right direction) for this difference to be
360 		     visible in ITYPE arithmetic.  If NEWTYPE is the
361 		     same as TYPE, however, the transformation may be
362 		     safe depending on the types involved: it is safe
363 		     if the ITYPE has strictly more than twice as many
364 		     mantissa bits as TYPE, can represent infinities
365 		     and NaNs if the TYPE can, and has sufficient
366 		     exponent range for the product or ratio of two
367 		     values representable in the TYPE to be within the
368 		     range of normal values of ITYPE.  */
369 		  if (TYPE_PRECISION (newtype) < TYPE_PRECISION (itype)
370 		      && (flag_unsafe_math_optimizations
371 			  || (TYPE_PRECISION (newtype) == TYPE_PRECISION (type)
372 			      && real_can_shorten_arithmetic (TYPE_MODE (itype),
373 							      TYPE_MODE (type))
374 			      && !excess_precision_type (newtype))))
375 		    {
376 		      expr = build2 (TREE_CODE (expr), newtype,
377 				     convert_to_real_1 (newtype, arg0,
378 							fold_p),
379 				     convert_to_real_1 (newtype, arg1,
380 							fold_p));
381 		      if (newtype == type)
382 			return expr;
383 		    }
384 	       }
385 	   }
386 	  break;
387 	default:
388 	  break;
389       }
390 
391   switch (TREE_CODE (TREE_TYPE (expr)))
392     {
393     case REAL_TYPE:
394       /* Ignore the conversion if we don't need to store intermediate
395 	 results and neither type is a decimal float.  */
396       return build1_loc (loc,
397 			 (flag_float_store
398 			  || DECIMAL_FLOAT_TYPE_P (type)
399 			  || DECIMAL_FLOAT_TYPE_P (itype))
400 			 ? CONVERT_EXPR : NOP_EXPR, type, expr);
401 
402     case INTEGER_TYPE:
403     case ENUMERAL_TYPE:
404     case BOOLEAN_TYPE:
405       return build1 (FLOAT_EXPR, type, expr);
406 
407     case FIXED_POINT_TYPE:
408       return build1 (FIXED_CONVERT_EXPR, type, expr);
409 
410     case COMPLEX_TYPE:
411       return convert (type,
412 		      maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
413 					     TREE_TYPE (TREE_TYPE (expr)),
414 					     expr));
415 
416     case POINTER_TYPE:
417     case REFERENCE_TYPE:
418       error ("pointer value used where a floating point value was expected");
419       return convert_to_real_1 (type, integer_zero_node, fold_p);
420 
421     default:
422       error ("aggregate value used where a float was expected");
423       return convert_to_real_1 (type, integer_zero_node, fold_p);
424     }
425 }
426 
427 /* A wrapper around convert_to_real_1 that always folds the
428    expression.  */
429 
430 tree
convert_to_real(tree type,tree expr)431 convert_to_real (tree type, tree expr)
432 {
433   return convert_to_real_1 (type, expr, true);
434 }
435 
436 /* A wrapper around convert_to_real_1 that only folds the
437    expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P.  */
438 
439 tree
convert_to_real_maybe_fold(tree type,tree expr,bool dofold)440 convert_to_real_maybe_fold (tree type, tree expr, bool dofold)
441 {
442   tree result
443     = convert_to_real_1 (type, expr,
444 			 dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
445   return preserve_any_location_wrapper (result, expr);
446 }
447 
448 /* Try to narrow EX_FORM ARG0 ARG1 in narrowed arg types producing a
449    result in TYPE.  */
450 
451 static tree
do_narrow(location_t loc,enum tree_code ex_form,tree type,tree arg0,tree arg1,tree expr,unsigned inprec,unsigned outprec,bool dofold)452 do_narrow (location_t loc,
453 	   enum tree_code ex_form, tree type, tree arg0, tree arg1,
454 	   tree expr, unsigned inprec, unsigned outprec, bool dofold)
455 {
456   /* Do the arithmetic in type TYPEX,
457      then convert result to TYPE.  */
458   tree typex = type;
459 
460   /* Can't do arithmetic in enumeral types
461      so use an integer type that will hold the values.  */
462   if (TREE_CODE (typex) == ENUMERAL_TYPE)
463     typex = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
464 					    TYPE_UNSIGNED (typex));
465 
466   /* The type demotion below might cause doing unsigned arithmetic
467      instead of signed, and thus hide overflow bugs.  */
468   if ((ex_form == PLUS_EXPR || ex_form == MINUS_EXPR)
469       && !TYPE_UNSIGNED (typex)
470       && sanitize_flags_p (SANITIZE_SI_OVERFLOW))
471     return NULL_TREE;
472 
473   /* But now perhaps TYPEX is as wide as INPREC.
474      In that case, do nothing special here.
475      (Otherwise would recurse infinitely in convert.  */
476   if (TYPE_PRECISION (typex) != inprec)
477     {
478       /* Don't do unsigned arithmetic where signed was wanted,
479 	 or vice versa.
480 	 Exception: if both of the original operands were
481 	 unsigned then we can safely do the work as unsigned.
482 	 Exception: shift operations take their type solely
483 	 from the first argument.
484 	 Exception: the LSHIFT_EXPR case above requires that
485 	 we perform this operation unsigned lest we produce
486 	 signed-overflow undefinedness.
487 	 And we may need to do it as unsigned
488 	 if we truncate to the original size.  */
489       if (TYPE_UNSIGNED (TREE_TYPE (expr))
490 	  || (TYPE_UNSIGNED (TREE_TYPE (arg0))
491 	      && (TYPE_UNSIGNED (TREE_TYPE (arg1))
492 		  || ex_form == LSHIFT_EXPR
493 		  || ex_form == RSHIFT_EXPR
494 		  || ex_form == LROTATE_EXPR
495 		  || ex_form == RROTATE_EXPR))
496 	  || ex_form == LSHIFT_EXPR
497 	  /* If we have !flag_wrapv, and either ARG0 or
498 	     ARG1 is of a signed type, we have to do
499 	     PLUS_EXPR, MINUS_EXPR or MULT_EXPR in an unsigned
500 	     type in case the operation in outprec precision
501 	     could overflow.  Otherwise, we would introduce
502 	     signed-overflow undefinedness.  */
503 	  || ((!(INTEGRAL_TYPE_P (TREE_TYPE (arg0))
504 		 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
505 	       || !(INTEGRAL_TYPE_P (TREE_TYPE (arg1))
506 		    && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1))))
507 	      && ((TYPE_PRECISION (TREE_TYPE (arg0)) * 2u
508 		   > outprec)
509 		  || (TYPE_PRECISION (TREE_TYPE (arg1)) * 2u
510 		      > outprec))
511 	      && (ex_form == PLUS_EXPR
512 		  || ex_form == MINUS_EXPR
513 		  || ex_form == MULT_EXPR)))
514 	{
515 	  if (!TYPE_UNSIGNED (typex))
516 	    typex = unsigned_type_for (typex);
517 	}
518       else
519 	{
520 	  if (TYPE_UNSIGNED (typex))
521 	    typex = signed_type_for (typex);
522 	}
523       /* We should do away with all this once we have a proper
524 	 type promotion/demotion pass, see PR45397.  */
525       expr = maybe_fold_build2_loc (dofold, loc, ex_form, typex,
526 				    convert (typex, arg0),
527 				    convert (typex, arg1));
528       return convert (type, expr);
529     }
530 
531   return NULL_TREE;
532 }
533 
534 /* Convert EXPR to some integer (or enum) type TYPE.
535 
536    EXPR must be pointer, integer, discrete (enum, char, or bool), float,
537    fixed-point or vector; in other cases error is called.
538 
539    If DOFOLD is TRUE, we try to simplify newly-created patterns by folding.
540 
541    The result of this is always supposed to be a newly created tree node
542    not in use in any existing structure.  */
543 
544 static tree
convert_to_integer_1(tree type,tree expr,bool dofold)545 convert_to_integer_1 (tree type, tree expr, bool dofold)
546 {
547   enum tree_code ex_form = TREE_CODE (expr);
548   tree intype = TREE_TYPE (expr);
549   unsigned int inprec = element_precision (intype);
550   unsigned int outprec = element_precision (type);
551   location_t loc = EXPR_LOCATION (expr);
552 
553   /* An INTEGER_TYPE cannot be incomplete, but an ENUMERAL_TYPE can
554      be.  Consider `enum E = { a, b = (enum E) 3 };'.  */
555   if (!COMPLETE_TYPE_P (type))
556     {
557       error ("conversion to incomplete type");
558       return error_mark_node;
559     }
560 
561   if (ex_form == COMPOUND_EXPR)
562     {
563       tree t = convert_to_integer_1 (type, TREE_OPERAND (expr, 1), dofold);
564       if (t == TREE_OPERAND (expr, 1))
565 	return expr;
566       return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR, TREE_TYPE (t),
567 			 TREE_OPERAND (expr, 0), t);
568     }
569 
570   /* Convert e.g. (long)round(d) -> lround(d).  */
571   /* If we're converting to char, we may encounter differing behavior
572      between converting from double->char vs double->long->char.
573      We're in "undefined" territory but we prefer to be conservative,
574      so only proceed in "unsafe" math mode.  */
575   if (optimize
576       && (flag_unsafe_math_optimizations
577 	  || (long_integer_type_node
578 	      && outprec >= TYPE_PRECISION (long_integer_type_node))))
579     {
580       tree s_expr = strip_float_extensions (expr);
581       tree s_intype = TREE_TYPE (s_expr);
582       const enum built_in_function fcode = builtin_mathfn_code (s_expr);
583       tree fn = 0;
584 
585       switch (fcode)
586         {
587 	CASE_FLT_FN (BUILT_IN_CEIL):
588 	CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
589 	  /* Only convert in ISO C99 mode.  */
590 	  if (!targetm.libc_has_function (function_c99_misc))
591 	    break;
592 	  if (outprec < TYPE_PRECISION (integer_type_node)
593 	      || (outprec == TYPE_PRECISION (integer_type_node)
594 		  && !TYPE_UNSIGNED (type)))
595 	    fn = mathfn_built_in (s_intype, BUILT_IN_ICEIL);
596 	  else if (outprec == TYPE_PRECISION (long_integer_type_node)
597 		   && !TYPE_UNSIGNED (type))
598 	    fn = mathfn_built_in (s_intype, BUILT_IN_LCEIL);
599 	  else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
600 		   && !TYPE_UNSIGNED (type))
601 	    fn = mathfn_built_in (s_intype, BUILT_IN_LLCEIL);
602 	  break;
603 
604 	CASE_FLT_FN (BUILT_IN_FLOOR):
605 	CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
606 	  /* Only convert in ISO C99 mode.  */
607 	  if (!targetm.libc_has_function (function_c99_misc))
608 	    break;
609 	  if (outprec < TYPE_PRECISION (integer_type_node)
610 	      || (outprec == TYPE_PRECISION (integer_type_node)
611 		  && !TYPE_UNSIGNED (type)))
612 	    fn = mathfn_built_in (s_intype, BUILT_IN_IFLOOR);
613 	  else if (outprec == TYPE_PRECISION (long_integer_type_node)
614 		   && !TYPE_UNSIGNED (type))
615 	    fn = mathfn_built_in (s_intype, BUILT_IN_LFLOOR);
616 	  else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
617 		   && !TYPE_UNSIGNED (type))
618 	    fn = mathfn_built_in (s_intype, BUILT_IN_LLFLOOR);
619 	  break;
620 
621 	CASE_FLT_FN (BUILT_IN_ROUND):
622 	CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
623 	  /* Only convert in ISO C99 mode and with -fno-math-errno.  */
624 	  if (!targetm.libc_has_function (function_c99_misc)
625 	      || flag_errno_math)
626 	    break;
627 	  if (outprec < TYPE_PRECISION (integer_type_node)
628 	      || (outprec == TYPE_PRECISION (integer_type_node)
629 		  && !TYPE_UNSIGNED (type)))
630 	    fn = mathfn_built_in (s_intype, BUILT_IN_IROUND);
631 	  else if (outprec == TYPE_PRECISION (long_integer_type_node)
632 		   && !TYPE_UNSIGNED (type))
633 	    fn = mathfn_built_in (s_intype, BUILT_IN_LROUND);
634 	  else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
635 		   && !TYPE_UNSIGNED (type))
636 	    fn = mathfn_built_in (s_intype, BUILT_IN_LLROUND);
637 	  break;
638 
639 	CASE_FLT_FN (BUILT_IN_NEARBYINT):
640 	CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
641 	  /* Only convert nearbyint* if we can ignore math exceptions.  */
642 	  if (flag_trapping_math)
643 	    break;
644 	  gcc_fallthrough ();
645 	CASE_FLT_FN (BUILT_IN_RINT):
646 	CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
647 	  /* Only convert in ISO C99 mode and with -fno-math-errno.  */
648 	  if (!targetm.libc_has_function (function_c99_misc)
649 	      || flag_errno_math)
650 	    break;
651 	  if (outprec < TYPE_PRECISION (integer_type_node)
652 	      || (outprec == TYPE_PRECISION (integer_type_node)
653 		  && !TYPE_UNSIGNED (type)))
654 	    fn = mathfn_built_in (s_intype, BUILT_IN_IRINT);
655 	  else if (outprec == TYPE_PRECISION (long_integer_type_node)
656 		   && !TYPE_UNSIGNED (type))
657 	    fn = mathfn_built_in (s_intype, BUILT_IN_LRINT);
658 	  else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
659 		   && !TYPE_UNSIGNED (type))
660 	    fn = mathfn_built_in (s_intype, BUILT_IN_LLRINT);
661 	  break;
662 
663 	CASE_FLT_FN (BUILT_IN_TRUNC):
664 	CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
665 	  if (call_expr_nargs (s_expr) != 1
666 	      || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (s_expr, 0))))
667 	    break;
668 	  return convert_to_integer_1 (type, CALL_EXPR_ARG (s_expr, 0),
669 				       dofold);
670 
671 	default:
672 	  break;
673 	}
674 
675       if (fn
676 	  && call_expr_nargs (s_expr) == 1
677 	  && SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (s_expr, 0))))
678 	{
679 	  tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
680 	  return convert_to_integer_1 (type, newexpr, dofold);
681 	}
682     }
683 
684   /* Convert (int)logb(d) -> ilogb(d).  */
685   if (optimize
686       && flag_unsafe_math_optimizations
687       && !flag_trapping_math && !flag_errno_math && flag_finite_math_only
688       && integer_type_node
689       && (outprec > TYPE_PRECISION (integer_type_node)
690 	  || (outprec == TYPE_PRECISION (integer_type_node)
691 	      && !TYPE_UNSIGNED (type))))
692     {
693       tree s_expr = strip_float_extensions (expr);
694       tree s_intype = TREE_TYPE (s_expr);
695       const enum built_in_function fcode = builtin_mathfn_code (s_expr);
696       tree fn = 0;
697 
698       switch (fcode)
699 	{
700 	CASE_FLT_FN (BUILT_IN_LOGB):
701 	  fn = mathfn_built_in (s_intype, BUILT_IN_ILOGB);
702 	  break;
703 
704 	default:
705 	  break;
706 	}
707 
708       if (fn
709 	  && call_expr_nargs (s_expr) == 1
710 	  && SCALAR_FLOAT_TYPE_P (TREE_TYPE (CALL_EXPR_ARG (s_expr, 0))))
711         {
712 	  tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
713 	  return convert_to_integer_1 (type, newexpr, dofold);
714 	}
715     }
716 
717   switch (TREE_CODE (intype))
718     {
719     case POINTER_TYPE:
720     case REFERENCE_TYPE:
721       if (integer_zerop (expr)
722 	  && !TREE_OVERFLOW (tree_strip_any_location_wrapper (expr)))
723 	return build_int_cst (type, 0);
724 
725       /* Convert to an unsigned integer of the correct width first, and from
726 	 there widen/truncate to the required type.  Some targets support the
727 	 coexistence of multiple valid pointer sizes, so fetch the one we need
728 	 from the type.  */
729       if (!dofold)
730 	return build1 (CONVERT_EXPR, type, expr);
731       expr = fold_build1 (CONVERT_EXPR,
732 			  lang_hooks.types.type_for_size
733 			    (TYPE_PRECISION (intype), 0),
734 			  expr);
735       return fold_convert (type, expr);
736 
737     case INTEGER_TYPE:
738     case ENUMERAL_TYPE:
739     case BOOLEAN_TYPE:
740     case OFFSET_TYPE:
741       /* If this is a logical operation, which just returns 0 or 1, we can
742 	 change the type of the expression.  */
743 
744       if (TREE_CODE_CLASS (ex_form) == tcc_comparison)
745 	{
746 	  expr = copy_node (expr);
747 	  TREE_TYPE (expr) = type;
748 	  return expr;
749 	}
750 
751       /* If we are widening the type, put in an explicit conversion.
752 	 Similarly if we are not changing the width.  After this, we know
753 	 we are truncating EXPR.  */
754 
755       else if (outprec >= inprec)
756 	{
757 	  enum tree_code code;
758 
759 	  /* If the precision of the EXPR's type is K bits and the
760 	     destination mode has more bits, and the sign is changing,
761 	     it is not safe to use a NOP_EXPR.  For example, suppose
762 	     that EXPR's type is a 3-bit unsigned integer type, the
763 	     TYPE is a 3-bit signed integer type, and the machine mode
764 	     for the types is 8-bit QImode.  In that case, the
765 	     conversion necessitates an explicit sign-extension.  In
766 	     the signed-to-unsigned case the high-order bits have to
767 	     be cleared.  */
768 	  if (TYPE_UNSIGNED (type) != TYPE_UNSIGNED (TREE_TYPE (expr))
769 	      && !type_has_mode_precision_p (TREE_TYPE (expr)))
770 	    code = CONVERT_EXPR;
771 	  else
772 	    code = NOP_EXPR;
773 
774 	  return maybe_fold_build1_loc (dofold, loc, code, type, expr);
775 	}
776 
777       /* If TYPE is an enumeral type or a type with a precision less
778 	 than the number of bits in its mode, do the conversion to the
779 	 type corresponding to its mode, then do a nop conversion
780 	 to TYPE.  */
781       else if (TREE_CODE (type) == ENUMERAL_TYPE
782 	       || maybe_ne (outprec, GET_MODE_PRECISION (TYPE_MODE (type))))
783 	{
784 	  expr
785 	    = convert_to_integer_1 (lang_hooks.types.type_for_mode
786 				    (TYPE_MODE (type), TYPE_UNSIGNED (type)),
787 				    expr, dofold);
788 	  return maybe_fold_build1_loc (dofold, loc, NOP_EXPR, type, expr);
789 	}
790 
791       /* Here detect when we can distribute the truncation down past some
792 	 arithmetic.  For example, if adding two longs and converting to an
793 	 int, we can equally well convert both to ints and then add.
794 	 For the operations handled here, such truncation distribution
795 	 is always safe.
796 	 It is desirable in these cases:
797 	 1) when truncating down to full-word from a larger size
798 	 2) when truncating takes no work.
799 	 3) when at least one operand of the arithmetic has been extended
800 	 (as by C's default conversions).  In this case we need two conversions
801 	 if we do the arithmetic as already requested, so we might as well
802 	 truncate both and then combine.  Perhaps that way we need only one.
803 
804 	 Note that in general we cannot do the arithmetic in a type
805 	 shorter than the desired result of conversion, even if the operands
806 	 are both extended from a shorter type, because they might overflow
807 	 if combined in that type.  The exceptions to this--the times when
808 	 two narrow values can be combined in their narrow type even to
809 	 make a wider result--are handled by "shorten" in build_binary_op.  */
810 
811       if (dofold)
812 	switch (ex_form)
813 	  {
814 	  case RSHIFT_EXPR:
815 	    /* We can pass truncation down through right shifting
816 	       when the shift count is a nonpositive constant.  */
817 	    if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
818 		&& tree_int_cst_sgn (TREE_OPERAND (expr, 1)) <= 0)
819 	      goto trunc1;
820 	    break;
821 
822 	  case LSHIFT_EXPR:
823 	    /* We can pass truncation down through left shifting
824 	       when the shift count is a nonnegative constant and
825 	       the target type is unsigned.  */
826 	    if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
827 		&& tree_int_cst_sgn (TREE_OPERAND (expr, 1)) >= 0
828 		&& TYPE_UNSIGNED (type)
829 		&& TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
830 	      {
831 		/* If shift count is less than the width of the truncated type,
832 		   really shift.  */
833 		if (tree_int_cst_lt (TREE_OPERAND (expr, 1), TYPE_SIZE (type)))
834 		  /* In this case, shifting is like multiplication.  */
835 		  goto trunc1;
836 		else
837 		  {
838 		    /* If it is >= that width, result is zero.
839 		       Handling this with trunc1 would give the wrong result:
840 		       (int) ((long long) a << 32) is well defined (as 0)
841 		       but (int) a << 32 is undefined and would get a
842 		       warning.  */
843 
844 		    tree t = build_int_cst (type, 0);
845 
846 		    /* If the original expression had side-effects, we must
847 		       preserve it.  */
848 		    if (TREE_SIDE_EFFECTS (expr))
849 		      return build2 (COMPOUND_EXPR, type, expr, t);
850 		    else
851 		      return t;
852 		  }
853 	      }
854 	    break;
855 
856 	  case TRUNC_DIV_EXPR:
857 	    {
858 	      tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), NULL_TREE);
859 	      tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), NULL_TREE);
860 
861 	      /* Don't distribute unless the output precision is at least as
862 		 big as the actual inputs and it has the same signedness.  */
863 	      if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
864 		  && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
865 		  /* If signedness of arg0 and arg1 don't match,
866 		     we can't necessarily find a type to compare them in.  */
867 		  && (TYPE_UNSIGNED (TREE_TYPE (arg0))
868 		      == TYPE_UNSIGNED (TREE_TYPE (arg1)))
869 		  /* Do not change the sign of the division.  */
870 		  && (TYPE_UNSIGNED (TREE_TYPE (expr))
871 		      == TYPE_UNSIGNED (TREE_TYPE (arg0)))
872 		  /* Either require unsigned division or a division by
873 		     a constant that is not -1.  */
874 		  && (TYPE_UNSIGNED (TREE_TYPE (arg0))
875 		      || (TREE_CODE (arg1) == INTEGER_CST
876 			  && !integer_all_onesp (arg1))))
877 		{
878 		  tree tem = do_narrow (loc, ex_form, type, arg0, arg1,
879 					expr, inprec, outprec, dofold);
880 		  if (tem)
881 		    return tem;
882 		}
883 	      break;
884 	    }
885 
886 	  case MAX_EXPR:
887 	  case MIN_EXPR:
888 	  case MULT_EXPR:
889 	    {
890 	      tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
891 	      tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
892 
893 	      /* Don't distribute unless the output precision is at least as
894 		 big as the actual inputs.  Otherwise, the comparison of the
895 		 truncated values will be wrong.  */
896 	      if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
897 		  && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
898 		  /* If signedness of arg0 and arg1 don't match,
899 		     we can't necessarily find a type to compare them in.  */
900 		  && (TYPE_UNSIGNED (TREE_TYPE (arg0))
901 		      == TYPE_UNSIGNED (TREE_TYPE (arg1))))
902 		goto trunc1;
903 	      break;
904 	    }
905 
906 	  case PLUS_EXPR:
907 	  case MINUS_EXPR:
908 	  case BIT_AND_EXPR:
909 	  case BIT_IOR_EXPR:
910 	  case BIT_XOR_EXPR:
911 	  trunc1:
912 	    {
913 	      tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
914 	      tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
915 
916 	      /* Do not try to narrow operands of pointer subtraction;
917 		 that will interfere with other folding.  */
918 	      if (ex_form == MINUS_EXPR
919 		  && CONVERT_EXPR_P (arg0)
920 		  && CONVERT_EXPR_P (arg1)
921 		  && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
922 		  && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
923 		break;
924 
925 	      if (outprec >= BITS_PER_WORD
926 		  || targetm.truly_noop_truncation (outprec, inprec)
927 		  || inprec > TYPE_PRECISION (TREE_TYPE (arg0))
928 		  || inprec > TYPE_PRECISION (TREE_TYPE (arg1)))
929 		{
930 		  tree tem = do_narrow (loc, ex_form, type, arg0, arg1,
931 					expr, inprec, outprec, dofold);
932 		  if (tem)
933 		    return tem;
934 		}
935 	    }
936 	    break;
937 
938 	  case NEGATE_EXPR:
939 	    /* Using unsigned arithmetic for signed types may hide overflow
940 	       bugs.  */
941 	    if (!TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (expr, 0)))
942 		&& sanitize_flags_p (SANITIZE_SI_OVERFLOW))
943 	      break;
944 	    /* Fall through.  */
945 	  case BIT_NOT_EXPR:
946 	    /* This is not correct for ABS_EXPR,
947 	       since we must test the sign before truncation.  */
948 	    {
949 	      /* Do the arithmetic in type TYPEX,
950 		 then convert result to TYPE.  */
951 	      tree typex = type;
952 
953 	      /* Can't do arithmetic in enumeral types
954 		 so use an integer type that will hold the values.  */
955 	      if (TREE_CODE (typex) == ENUMERAL_TYPE)
956 		typex
957 		  = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
958 						    TYPE_UNSIGNED (typex));
959 
960 	      if (!TYPE_UNSIGNED (typex))
961 		typex = unsigned_type_for (typex);
962 	      return convert (type,
963 			      fold_build1 (ex_form, typex,
964 					   convert (typex,
965 						    TREE_OPERAND (expr, 0))));
966 	    }
967 
968 	  CASE_CONVERT:
969 	    {
970 	      tree argtype = TREE_TYPE (TREE_OPERAND (expr, 0));
971 	      /* Don't introduce a "can't convert between vector values
972 		 of different size" error.  */
973 	      if (TREE_CODE (argtype) == VECTOR_TYPE
974 		  && maybe_ne (GET_MODE_SIZE (TYPE_MODE (argtype)),
975 			       GET_MODE_SIZE (TYPE_MODE (type))))
976 		break;
977 	    }
978 	    /* If truncating after truncating, might as well do all at once.
979 	       If truncating after extending, we may get rid of wasted work.  */
980 	    return convert (type, get_unwidened (TREE_OPERAND (expr, 0), type));
981 
982 	  case COND_EXPR:
983 	    /* It is sometimes worthwhile to push the narrowing down through
984 	       the conditional and never loses.  A COND_EXPR may have a throw
985 	       as one operand, which then has void type.  Just leave void
986 	       operands as they are.  */
987 	    return
988 	      fold_build3 (COND_EXPR, type, TREE_OPERAND (expr, 0),
989 			   VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1)))
990 			   ? TREE_OPERAND (expr, 1)
991 			   : convert (type, TREE_OPERAND (expr, 1)),
992 			   VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 2)))
993 			   ? TREE_OPERAND (expr, 2)
994 			   : convert (type, TREE_OPERAND (expr, 2)));
995 
996 	  default:
997 	    break;
998 	  }
999 
1000       /* When parsing long initializers, we might end up with a lot of casts.
1001 	 Shortcut this.  */
1002       if (TREE_CODE (tree_strip_any_location_wrapper (expr)) == INTEGER_CST)
1003 	return fold_convert (type, expr);
1004       return build1 (CONVERT_EXPR, type, expr);
1005 
1006     case REAL_TYPE:
1007       if (sanitize_flags_p (SANITIZE_FLOAT_CAST)
1008 	  && current_function_decl != NULL_TREE)
1009 	{
1010 	  expr = save_expr (expr);
1011 	  tree check = ubsan_instrument_float_cast (loc, type, expr);
1012 	  expr = build1 (FIX_TRUNC_EXPR, type, expr);
1013 	  if (check == NULL_TREE)
1014 	    return expr;
1015 	  return maybe_fold_build2_loc (dofold, loc, COMPOUND_EXPR,
1016 					TREE_TYPE (expr), check, expr);
1017 	}
1018       else
1019 	return build1 (FIX_TRUNC_EXPR, type, expr);
1020 
1021     case FIXED_POINT_TYPE:
1022       return build1 (FIXED_CONVERT_EXPR, type, expr);
1023 
1024     case COMPLEX_TYPE:
1025       expr = maybe_fold_build1_loc (dofold, loc, REALPART_EXPR,
1026 				    TREE_TYPE (TREE_TYPE (expr)), expr);
1027       return convert (type, expr);
1028 
1029     case VECTOR_TYPE:
1030       if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
1031 	{
1032 	  error ("can%'t convert a vector of type %qT"
1033 		 " to type %qT which has different size",
1034 		 TREE_TYPE (expr), type);
1035 	  return error_mark_node;
1036 	}
1037       return build1 (VIEW_CONVERT_EXPR, type, expr);
1038 
1039     default:
1040       error ("aggregate value used where an integer was expected");
1041       return convert (type, integer_zero_node);
1042     }
1043 }
1044 
1045 /* Convert EXPR to some integer (or enum) type TYPE.
1046 
1047    EXPR must be pointer, integer, discrete (enum, char, or bool), float,
1048    fixed-point or vector; in other cases error is called.
1049 
1050    The result of this is always supposed to be a newly created tree node
1051    not in use in any existing structure.  */
1052 
1053 tree
convert_to_integer(tree type,tree expr)1054 convert_to_integer (tree type, tree expr)
1055 {
1056   return convert_to_integer_1 (type, expr, true);
1057 }
1058 
1059 /* A wrapper around convert_to_complex_1 that only folds the
1060    expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P.  */
1061 
1062 tree
convert_to_integer_maybe_fold(tree type,tree expr,bool dofold)1063 convert_to_integer_maybe_fold (tree type, tree expr, bool dofold)
1064 {
1065   tree result
1066     = convert_to_integer_1 (type, expr,
1067 			    dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
1068   return preserve_any_location_wrapper (result, expr);
1069 }
1070 
1071 /* Convert EXPR to the complex type TYPE in the usual ways.  If FOLD_P is
1072    true, try to fold the expression.  */
1073 
1074 static tree
convert_to_complex_1(tree type,tree expr,bool fold_p)1075 convert_to_complex_1 (tree type, tree expr, bool fold_p)
1076 {
1077   location_t loc = EXPR_LOCATION (expr);
1078   tree subtype = TREE_TYPE (type);
1079 
1080   switch (TREE_CODE (TREE_TYPE (expr)))
1081     {
1082     case REAL_TYPE:
1083     case FIXED_POINT_TYPE:
1084     case INTEGER_TYPE:
1085     case ENUMERAL_TYPE:
1086     case BOOLEAN_TYPE:
1087       return build2 (COMPLEX_EXPR, type, convert (subtype, expr),
1088 		     convert (subtype, integer_zero_node));
1089 
1090     case COMPLEX_TYPE:
1091       {
1092 	tree elt_type = TREE_TYPE (TREE_TYPE (expr));
1093 
1094 	if (TYPE_MAIN_VARIANT (elt_type) == TYPE_MAIN_VARIANT (subtype))
1095 	  return expr;
1096 	else if (TREE_CODE (expr) == COMPOUND_EXPR)
1097 	  {
1098 	    tree t = convert_to_complex_1 (type, TREE_OPERAND (expr, 1),
1099 					   fold_p);
1100 	    if (t == TREE_OPERAND (expr, 1))
1101 	      return expr;
1102 	    return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR,
1103 			       TREE_TYPE (t), TREE_OPERAND (expr, 0), t);
1104 	  }
1105 	else if (TREE_CODE (expr) == COMPLEX_EXPR)
1106 	  return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1107 					convert (subtype,
1108 						 TREE_OPERAND (expr, 0)),
1109 					convert (subtype,
1110 						 TREE_OPERAND (expr, 1)));
1111 	else
1112 	  {
1113 	    expr = save_expr (expr);
1114 	    tree realp = maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
1115 						TREE_TYPE (TREE_TYPE (expr)),
1116 						expr);
1117 	    tree imagp = maybe_fold_build1_loc (fold_p, loc, IMAGPART_EXPR,
1118 						TREE_TYPE (TREE_TYPE (expr)),
1119 						expr);
1120 	    return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1121 					  convert (subtype, realp),
1122 					  convert (subtype, imagp));
1123 	  }
1124       }
1125 
1126     case POINTER_TYPE:
1127     case REFERENCE_TYPE:
1128       error ("pointer value used where a complex was expected");
1129       return convert_to_complex_1 (type, integer_zero_node, fold_p);
1130 
1131     default:
1132       error ("aggregate value used where a complex was expected");
1133       return convert_to_complex_1 (type, integer_zero_node, fold_p);
1134     }
1135 }
1136 
1137 /* A wrapper around convert_to_complex_1 that always folds the
1138    expression.  */
1139 
1140 tree
convert_to_complex(tree type,tree expr)1141 convert_to_complex (tree type, tree expr)
1142 {
1143   return convert_to_complex_1 (type, expr, true);
1144 }
1145 
1146 /* A wrapper around convert_to_complex_1 that only folds the
1147    expression if DOFOLD, or if it is CONSTANT_CLASS_OR_WRAPPER_P.  */
1148 
1149 tree
convert_to_complex_maybe_fold(tree type,tree expr,bool dofold)1150 convert_to_complex_maybe_fold (tree type, tree expr, bool dofold)
1151 {
1152   tree result
1153     = convert_to_complex_1 (type, expr,
1154 			    dofold || CONSTANT_CLASS_OR_WRAPPER_P (expr));
1155   return preserve_any_location_wrapper (result, expr);
1156 }
1157 
1158 /* Convert EXPR to the vector type TYPE in the usual ways.  */
1159 
1160 tree
convert_to_vector(tree type,tree expr)1161 convert_to_vector (tree type, tree expr)
1162 {
1163   switch (TREE_CODE (TREE_TYPE (expr)))
1164     {
1165     case INTEGER_TYPE:
1166     case VECTOR_TYPE:
1167       if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
1168 	{
1169 	  error ("can%'t convert a value of type %qT"
1170 		 " to vector type %qT which has different size",
1171 		 TREE_TYPE (expr), type);
1172 	  return error_mark_node;
1173 	}
1174       return build1 (VIEW_CONVERT_EXPR, type, expr);
1175 
1176     default:
1177       error ("can%'t convert value to a vector");
1178       return error_mark_node;
1179     }
1180 }
1181 
1182 /* Convert EXPR to some fixed-point type TYPE.
1183 
1184    EXPR must be fixed-point, float, integer, or enumeral;
1185    in other cases error is called.  */
1186 
1187 tree
convert_to_fixed(tree type,tree expr)1188 convert_to_fixed (tree type, tree expr)
1189 {
1190   if (integer_zerop (expr))
1191     {
1192       tree fixed_zero_node = build_fixed (type, FCONST0 (TYPE_MODE (type)));
1193       return fixed_zero_node;
1194     }
1195   else if (integer_onep (expr) && ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)))
1196     {
1197       tree fixed_one_node = build_fixed (type, FCONST1 (TYPE_MODE (type)));
1198       return fixed_one_node;
1199     }
1200 
1201   switch (TREE_CODE (TREE_TYPE (expr)))
1202     {
1203     case FIXED_POINT_TYPE:
1204     case INTEGER_TYPE:
1205     case ENUMERAL_TYPE:
1206     case BOOLEAN_TYPE:
1207     case REAL_TYPE:
1208       return build1 (FIXED_CONVERT_EXPR, type, expr);
1209 
1210     case COMPLEX_TYPE:
1211       return convert (type,
1212 		      fold_build1 (REALPART_EXPR,
1213 				   TREE_TYPE (TREE_TYPE (expr)), expr));
1214 
1215     default:
1216       error ("aggregate value used where a fixed-point was expected");
1217       return error_mark_node;
1218     }
1219 }
1220 
1221 #if CHECKING_P
1222 
1223 namespace selftest {
1224 
1225 /* Selftests for conversions.  */
1226 
1227 static void
test_convert_to_integer_maybe_fold(tree orig_type,tree new_type)1228 test_convert_to_integer_maybe_fold (tree orig_type, tree new_type)
1229 {
1230   /* Calling convert_to_integer_maybe_fold on an INTEGER_CST.  */
1231 
1232   tree orig_cst = build_int_cst (orig_type, 42);
1233 
1234   /* Verify that convert_to_integer_maybe_fold on a constant returns a new
1235      constant of the new type, unless the types are the same, in which
1236      case verify it's a no-op.  */
1237   {
1238     tree result = convert_to_integer_maybe_fold (new_type,
1239 						 orig_cst, false);
1240     if (orig_type != new_type)
1241       {
1242 	ASSERT_EQ (TREE_TYPE (result), new_type);
1243 	ASSERT_EQ (TREE_CODE (result), INTEGER_CST);
1244       }
1245     else
1246       ASSERT_EQ (result, orig_cst);
1247   }
1248 
1249   /* Calling convert_to_integer_maybe_fold on a location wrapper around
1250      an INTEGER_CST.
1251 
1252      Verify that convert_to_integer_maybe_fold on a location wrapper
1253      around a constant returns a new location wrapper around an equivalent
1254      constant, both of the new type, unless the types are the same,
1255      in which case the original wrapper should be returned.   */
1256   {
1257     const location_t loc = BUILTINS_LOCATION;
1258     tree wrapped_orig_cst = maybe_wrap_with_location (orig_cst, loc);
1259     tree result
1260       = convert_to_integer_maybe_fold (new_type, wrapped_orig_cst, false);
1261     ASSERT_EQ (TREE_TYPE (result), new_type);
1262     ASSERT_EQ (EXPR_LOCATION (result), loc);
1263     ASSERT_TRUE (location_wrapper_p (result));
1264     ASSERT_EQ (TREE_TYPE (TREE_OPERAND (result, 0)), new_type);
1265     ASSERT_EQ (TREE_CODE (TREE_OPERAND (result, 0)), INTEGER_CST);
1266 
1267     if (orig_type == new_type)
1268       ASSERT_EQ (result, wrapped_orig_cst);
1269   }
1270 }
1271 
1272 /* Verify that convert_to_integer_maybe_fold preserves locations.  */
1273 
1274 static void
test_convert_to_integer_maybe_fold()1275 test_convert_to_integer_maybe_fold ()
1276 {
1277   /* char -> long.  */
1278   test_convert_to_integer_maybe_fold (char_type_node, long_integer_type_node);
1279 
1280   /* char -> char.  */
1281   test_convert_to_integer_maybe_fold (char_type_node, char_type_node);
1282 
1283   /* long -> char.  */
1284   test_convert_to_integer_maybe_fold (char_type_node, long_integer_type_node);
1285 
1286   /* long -> long.  */
1287   test_convert_to_integer_maybe_fold (long_integer_type_node,
1288 				      long_integer_type_node);
1289 }
1290 
1291 /* Run all of the selftests within this file.  */
1292 
1293 void
convert_c_tests()1294 convert_c_tests ()
1295 {
1296   test_convert_to_integer_maybe_fold ();
1297 }
1298 
1299 } // namespace selftest
1300 
1301 #endif /* CHECKING_P */
1302