1 /* Utility routines for data type conversion for GCC.
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 /* These routines are somewhat language-independent utility function
22 intended to be called by the language-specific convert () functions. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "target.h"
28 #include "tree.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "stor-layout.h"
32 #include "convert.h"
33 #include "langhooks.h"
34 #include "builtins.h"
35 #include "ubsan.h"
36
37 #define maybe_fold_build1_loc(FOLD_P, LOC, CODE, TYPE, EXPR) \
38 ((FOLD_P) ? fold_build1_loc (LOC, CODE, TYPE, EXPR) \
39 : build1_loc (LOC, CODE, TYPE, EXPR))
40 #define maybe_fold_build2_loc(FOLD_P, LOC, CODE, TYPE, EXPR1, EXPR2) \
41 ((FOLD_P) ? fold_build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2) \
42 : build2_loc (LOC, CODE, TYPE, EXPR1, EXPR2))
43
44 /* Convert EXPR to some pointer or reference type TYPE.
45 EXPR must be pointer, reference, integer, enumeral, or literal zero;
46 in other cases error is called. If FOLD_P is true, try to fold the
47 expression. */
48
49 static tree
convert_to_pointer_1(tree type,tree expr,bool fold_p)50 convert_to_pointer_1 (tree type, tree expr, bool fold_p)
51 {
52 location_t loc = EXPR_LOCATION (expr);
53 if (TREE_TYPE (expr) == type)
54 return expr;
55
56 switch (TREE_CODE (TREE_TYPE (expr)))
57 {
58 case POINTER_TYPE:
59 case REFERENCE_TYPE:
60 {
61 /* If the pointers point to different address spaces, conversion needs
62 to be done via a ADDR_SPACE_CONVERT_EXPR instead of a NOP_EXPR. */
63 addr_space_t to_as = TYPE_ADDR_SPACE (TREE_TYPE (type));
64 addr_space_t from_as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (expr)));
65
66 if (to_as == from_as)
67 return maybe_fold_build1_loc (fold_p, loc, NOP_EXPR, type, expr);
68 else
69 return maybe_fold_build1_loc (fold_p, loc, ADDR_SPACE_CONVERT_EXPR,
70 type, expr);
71 }
72
73 case INTEGER_TYPE:
74 case ENUMERAL_TYPE:
75 case BOOLEAN_TYPE:
76 {
77 /* If the input precision differs from the target pointer type
78 precision, first convert the input expression to an integer type of
79 the target precision. Some targets, e.g. VMS, need several pointer
80 sizes to coexist so the latter isn't necessarily POINTER_SIZE. */
81 unsigned int pprec = TYPE_PRECISION (type);
82 unsigned int eprec = TYPE_PRECISION (TREE_TYPE (expr));
83
84 if (eprec != pprec)
85 expr
86 = maybe_fold_build1_loc (fold_p, loc, NOP_EXPR,
87 lang_hooks.types.type_for_size (pprec, 0),
88 expr);
89 }
90 return maybe_fold_build1_loc (fold_p, loc, CONVERT_EXPR, type, expr);
91
92 default:
93 error ("cannot convert to a pointer type");
94 return convert_to_pointer_1 (type, integer_zero_node, fold_p);
95 }
96 }
97
98 /* A wrapper around convert_to_pointer_1 that always folds the
99 expression. */
100
101 tree
convert_to_pointer(tree type,tree expr)102 convert_to_pointer (tree type, tree expr)
103 {
104 return convert_to_pointer_1 (type, expr, true);
105 }
106
107 /* A wrapper around convert_to_pointer_1 that only folds the
108 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
109
110 tree
convert_to_pointer_maybe_fold(tree type,tree expr,bool dofold)111 convert_to_pointer_maybe_fold (tree type, tree expr, bool dofold)
112 {
113 return convert_to_pointer_1 (type, expr, dofold || CONSTANT_CLASS_P (expr));
114 }
115
116 /* Convert EXPR to some floating-point type TYPE.
117
118 EXPR must be float, fixed-point, integer, or enumeral;
119 in other cases error is called. If FOLD_P is true, try to fold
120 the expression. */
121
122 static tree
convert_to_real_1(tree type,tree expr,bool fold_p)123 convert_to_real_1 (tree type, tree expr, bool fold_p)
124 {
125 enum built_in_function fcode = builtin_mathfn_code (expr);
126 tree itype = TREE_TYPE (expr);
127 location_t loc = EXPR_LOCATION (expr);
128
129 if (TREE_CODE (expr) == COMPOUND_EXPR)
130 {
131 tree t = convert_to_real_1 (type, TREE_OPERAND (expr, 1), fold_p);
132 if (t == TREE_OPERAND (expr, 1))
133 return expr;
134 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR, TREE_TYPE (t),
135 TREE_OPERAND (expr, 0), t);
136 }
137
138 /* Disable until we figure out how to decide whether the functions are
139 present in runtime. */
140 /* Convert (float)sqrt((double)x) where x is float into sqrtf(x) */
141 if (optimize
142 && (TYPE_MODE (type) == TYPE_MODE (double_type_node)
143 || TYPE_MODE (type) == TYPE_MODE (float_type_node)))
144 {
145 switch (fcode)
146 {
147 #define CASE_MATHFN(FN) case BUILT_IN_##FN: case BUILT_IN_##FN##L:
148 CASE_MATHFN (COSH)
149 CASE_MATHFN (EXP)
150 CASE_MATHFN (EXP10)
151 CASE_MATHFN (EXP2)
152 CASE_MATHFN (EXPM1)
153 CASE_MATHFN (GAMMA)
154 CASE_MATHFN (J0)
155 CASE_MATHFN (J1)
156 CASE_MATHFN (LGAMMA)
157 CASE_MATHFN (POW10)
158 CASE_MATHFN (SINH)
159 CASE_MATHFN (TGAMMA)
160 CASE_MATHFN (Y0)
161 CASE_MATHFN (Y1)
162 /* The above functions may set errno differently with float
163 input or output so this transformation is not safe with
164 -fmath-errno. */
165 if (flag_errno_math)
166 break;
167 CASE_MATHFN (ACOS)
168 CASE_MATHFN (ACOSH)
169 CASE_MATHFN (ASIN)
170 CASE_MATHFN (ASINH)
171 CASE_MATHFN (ATAN)
172 CASE_MATHFN (ATANH)
173 CASE_MATHFN (CBRT)
174 CASE_MATHFN (COS)
175 CASE_MATHFN (ERF)
176 CASE_MATHFN (ERFC)
177 CASE_MATHFN (LOG)
178 CASE_MATHFN (LOG10)
179 CASE_MATHFN (LOG2)
180 CASE_MATHFN (LOG1P)
181 CASE_MATHFN (SIN)
182 CASE_MATHFN (TAN)
183 CASE_MATHFN (TANH)
184 /* The above functions are not safe to do this conversion. */
185 if (!flag_unsafe_math_optimizations)
186 break;
187 CASE_MATHFN (SQRT)
188 CASE_MATHFN (FABS)
189 CASE_MATHFN (LOGB)
190 #undef CASE_MATHFN
191 {
192 tree arg0 = strip_float_extensions (CALL_EXPR_ARG (expr, 0));
193 tree newtype = type;
194
195 /* We have (outertype)sqrt((innertype)x). Choose the wider mode from
196 the both as the safe type for operation. */
197 if (TYPE_PRECISION (TREE_TYPE (arg0)) > TYPE_PRECISION (type))
198 newtype = TREE_TYPE (arg0);
199
200 /* We consider to convert
201
202 (T1) sqrtT2 ((T2) exprT3)
203 to
204 (T1) sqrtT4 ((T4) exprT3)
205
206 , where T1 is TYPE, T2 is ITYPE, T3 is TREE_TYPE (ARG0),
207 and T4 is NEWTYPE. All those types are of floating point types.
208 T4 (NEWTYPE) should be narrower than T2 (ITYPE). This conversion
209 is safe only if P1 >= P2*2+2, where P1 and P2 are precisions of
210 T2 and T4. See the following URL for a reference:
211 http://stackoverflow.com/questions/9235456/determining-
212 floating-point-square-root
213 */
214 if ((fcode == BUILT_IN_SQRT || fcode == BUILT_IN_SQRTL)
215 && !flag_unsafe_math_optimizations)
216 {
217 /* The following conversion is unsafe even the precision condition
218 below is satisfied:
219
220 (float) sqrtl ((long double) double_val) -> (float) sqrt (double_val)
221 */
222 if (TYPE_MODE (type) != TYPE_MODE (newtype))
223 break;
224
225 int p1 = REAL_MODE_FORMAT (TYPE_MODE (itype))->p;
226 int p2 = REAL_MODE_FORMAT (TYPE_MODE (newtype))->p;
227 if (p1 < p2 * 2 + 2)
228 break;
229 }
230
231 /* Be careful about integer to fp conversions.
232 These may overflow still. */
233 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
234 && TYPE_PRECISION (newtype) < TYPE_PRECISION (itype)
235 && (TYPE_MODE (newtype) == TYPE_MODE (double_type_node)
236 || TYPE_MODE (newtype) == TYPE_MODE (float_type_node)))
237 {
238 tree fn = mathfn_built_in (newtype, fcode);
239 if (fn)
240 {
241 tree arg = convert_to_real_1 (newtype, arg0, fold_p);
242 expr = build_call_expr (fn, 1, arg);
243 if (newtype == type)
244 return expr;
245 }
246 }
247 }
248 default:
249 break;
250 }
251 }
252
253 /* Propagate the cast into the operation. */
254 if (itype != type && FLOAT_TYPE_P (type))
255 switch (TREE_CODE (expr))
256 {
257 /* Convert (float)-x into -(float)x. This is safe for
258 round-to-nearest rounding mode when the inner type is float. */
259 case ABS_EXPR:
260 case NEGATE_EXPR:
261 if (!flag_rounding_math
262 && FLOAT_TYPE_P (itype)
263 && TYPE_PRECISION (type) < TYPE_PRECISION (itype))
264 {
265 tree arg = convert_to_real_1 (type, TREE_OPERAND (expr, 0),
266 fold_p);
267 return build1 (TREE_CODE (expr), type, arg);
268 }
269 break;
270 /* Convert (outertype)((innertype0)a+(innertype1)b)
271 into ((newtype)a+(newtype)b) where newtype
272 is the widest mode from all of these. */
273 case PLUS_EXPR:
274 case MINUS_EXPR:
275 case MULT_EXPR:
276 case RDIV_EXPR:
277 {
278 tree arg0 = strip_float_extensions (TREE_OPERAND (expr, 0));
279 tree arg1 = strip_float_extensions (TREE_OPERAND (expr, 1));
280
281 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
282 && FLOAT_TYPE_P (TREE_TYPE (arg1))
283 && DECIMAL_FLOAT_TYPE_P (itype) == DECIMAL_FLOAT_TYPE_P (type))
284 {
285 tree newtype = type;
286
287 if (TYPE_MODE (TREE_TYPE (arg0)) == SDmode
288 || TYPE_MODE (TREE_TYPE (arg1)) == SDmode
289 || TYPE_MODE (type) == SDmode)
290 newtype = dfloat32_type_node;
291 if (TYPE_MODE (TREE_TYPE (arg0)) == DDmode
292 || TYPE_MODE (TREE_TYPE (arg1)) == DDmode
293 || TYPE_MODE (type) == DDmode)
294 newtype = dfloat64_type_node;
295 if (TYPE_MODE (TREE_TYPE (arg0)) == TDmode
296 || TYPE_MODE (TREE_TYPE (arg1)) == TDmode
297 || TYPE_MODE (type) == TDmode)
298 newtype = dfloat128_type_node;
299 if (newtype == dfloat32_type_node
300 || newtype == dfloat64_type_node
301 || newtype == dfloat128_type_node)
302 {
303 expr = build2 (TREE_CODE (expr), newtype,
304 convert_to_real_1 (newtype, arg0,
305 fold_p),
306 convert_to_real_1 (newtype, arg1,
307 fold_p));
308 if (newtype == type)
309 return expr;
310 break;
311 }
312
313 if (TYPE_PRECISION (TREE_TYPE (arg0)) > TYPE_PRECISION (newtype))
314 newtype = TREE_TYPE (arg0);
315 if (TYPE_PRECISION (TREE_TYPE (arg1)) > TYPE_PRECISION (newtype))
316 newtype = TREE_TYPE (arg1);
317 /* Sometimes this transformation is safe (cannot
318 change results through affecting double rounding
319 cases) and sometimes it is not. If NEWTYPE is
320 wider than TYPE, e.g. (float)((long double)double
321 + (long double)double) converted to
322 (float)(double + double), the transformation is
323 unsafe regardless of the details of the types
324 involved; double rounding can arise if the result
325 of NEWTYPE arithmetic is a NEWTYPE value half way
326 between two representable TYPE values but the
327 exact value is sufficiently different (in the
328 right direction) for this difference to be
329 visible in ITYPE arithmetic. If NEWTYPE is the
330 same as TYPE, however, the transformation may be
331 safe depending on the types involved: it is safe
332 if the ITYPE has strictly more than twice as many
333 mantissa bits as TYPE, can represent infinities
334 and NaNs if the TYPE can, and has sufficient
335 exponent range for the product or ratio of two
336 values representable in the TYPE to be within the
337 range of normal values of ITYPE. */
338 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (itype)
339 && (flag_unsafe_math_optimizations
340 || (TYPE_PRECISION (newtype) == TYPE_PRECISION (type)
341 && real_can_shorten_arithmetic (TYPE_MODE (itype),
342 TYPE_MODE (type))
343 && !excess_precision_type (newtype))))
344 {
345 expr = build2 (TREE_CODE (expr), newtype,
346 convert_to_real_1 (newtype, arg0,
347 fold_p),
348 convert_to_real_1 (newtype, arg1,
349 fold_p));
350 if (newtype == type)
351 return expr;
352 }
353 }
354 }
355 break;
356 default:
357 break;
358 }
359
360 switch (TREE_CODE (TREE_TYPE (expr)))
361 {
362 case REAL_TYPE:
363 /* Ignore the conversion if we don't need to store intermediate
364 results and neither type is a decimal float. */
365 return build1_loc (loc,
366 (flag_float_store
367 || DECIMAL_FLOAT_TYPE_P (type)
368 || DECIMAL_FLOAT_TYPE_P (itype))
369 ? CONVERT_EXPR : NOP_EXPR, type, expr);
370
371 case INTEGER_TYPE:
372 case ENUMERAL_TYPE:
373 case BOOLEAN_TYPE:
374 return build1 (FLOAT_EXPR, type, expr);
375
376 case FIXED_POINT_TYPE:
377 return build1 (FIXED_CONVERT_EXPR, type, expr);
378
379 case COMPLEX_TYPE:
380 return convert (type,
381 maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
382 TREE_TYPE (TREE_TYPE (expr)),
383 expr));
384
385 case POINTER_TYPE:
386 case REFERENCE_TYPE:
387 error ("pointer value used where a floating point value was expected");
388 return convert_to_real_1 (type, integer_zero_node, fold_p);
389
390 default:
391 error ("aggregate value used where a float was expected");
392 return convert_to_real_1 (type, integer_zero_node, fold_p);
393 }
394 }
395
396 /* A wrapper around convert_to_real_1 that always folds the
397 expression. */
398
399 tree
convert_to_real(tree type,tree expr)400 convert_to_real (tree type, tree expr)
401 {
402 return convert_to_real_1 (type, expr, true);
403 }
404
405 /* A wrapper around convert_to_real_1 that only folds the
406 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
407
408 tree
convert_to_real_maybe_fold(tree type,tree expr,bool dofold)409 convert_to_real_maybe_fold (tree type, tree expr, bool dofold)
410 {
411 return convert_to_real_1 (type, expr, dofold || CONSTANT_CLASS_P (expr));
412 }
413
414 /* Try to narrow EX_FORM ARG0 ARG1 in narrowed arg types producing a
415 result in TYPE. */
416
417 static tree
do_narrow(location_t loc,enum tree_code ex_form,tree type,tree arg0,tree arg1,tree expr,unsigned inprec,unsigned outprec,bool dofold)418 do_narrow (location_t loc,
419 enum tree_code ex_form, tree type, tree arg0, tree arg1,
420 tree expr, unsigned inprec, unsigned outprec, bool dofold)
421 {
422 /* Do the arithmetic in type TYPEX,
423 then convert result to TYPE. */
424 tree typex = type;
425
426 /* Can't do arithmetic in enumeral types
427 so use an integer type that will hold the values. */
428 if (TREE_CODE (typex) == ENUMERAL_TYPE)
429 typex = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
430 TYPE_UNSIGNED (typex));
431
432 /* But now perhaps TYPEX is as wide as INPREC.
433 In that case, do nothing special here.
434 (Otherwise would recurse infinitely in convert. */
435 if (TYPE_PRECISION (typex) != inprec)
436 {
437 /* Don't do unsigned arithmetic where signed was wanted,
438 or vice versa.
439 Exception: if both of the original operands were
440 unsigned then we can safely do the work as unsigned.
441 Exception: shift operations take their type solely
442 from the first argument.
443 Exception: the LSHIFT_EXPR case above requires that
444 we perform this operation unsigned lest we produce
445 signed-overflow undefinedness.
446 And we may need to do it as unsigned
447 if we truncate to the original size. */
448 if (TYPE_UNSIGNED (TREE_TYPE (expr))
449 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
450 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
451 || ex_form == LSHIFT_EXPR
452 || ex_form == RSHIFT_EXPR
453 || ex_form == LROTATE_EXPR
454 || ex_form == RROTATE_EXPR))
455 || ex_form == LSHIFT_EXPR
456 /* If we have !flag_wrapv, and either ARG0 or
457 ARG1 is of a signed type, we have to do
458 PLUS_EXPR, MINUS_EXPR or MULT_EXPR in an unsigned
459 type in case the operation in outprec precision
460 could overflow. Otherwise, we would introduce
461 signed-overflow undefinedness. */
462 || ((!TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
463 || !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
464 && ((TYPE_PRECISION (TREE_TYPE (arg0)) * 2u
465 > outprec)
466 || (TYPE_PRECISION (TREE_TYPE (arg1)) * 2u
467 > outprec))
468 && (ex_form == PLUS_EXPR
469 || ex_form == MINUS_EXPR
470 || ex_form == MULT_EXPR)))
471 {
472 if (!TYPE_UNSIGNED (typex))
473 typex = unsigned_type_for (typex);
474 }
475 else
476 {
477 if (TYPE_UNSIGNED (typex))
478 typex = signed_type_for (typex);
479 }
480 /* We should do away with all this once we have a proper
481 type promotion/demotion pass, see PR45397. */
482 expr = maybe_fold_build2_loc (dofold, loc, ex_form, typex,
483 convert (typex, arg0),
484 convert (typex, arg1));
485 return convert (type, expr);
486 }
487
488 return NULL_TREE;
489 }
490
491 /* Convert EXPR to some integer (or enum) type TYPE.
492
493 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
494 fixed-point or vector; in other cases error is called.
495
496 If DOFOLD is TRUE, we try to simplify newly-created patterns by folding.
497
498 The result of this is always supposed to be a newly created tree node
499 not in use in any existing structure. */
500
501 static tree
convert_to_integer_1(tree type,tree expr,bool dofold)502 convert_to_integer_1 (tree type, tree expr, bool dofold)
503 {
504 enum tree_code ex_form = TREE_CODE (expr);
505 tree intype = TREE_TYPE (expr);
506 unsigned int inprec = element_precision (intype);
507 unsigned int outprec = element_precision (type);
508 location_t loc = EXPR_LOCATION (expr);
509
510 /* An INTEGER_TYPE cannot be incomplete, but an ENUMERAL_TYPE can
511 be. Consider `enum E = { a, b = (enum E) 3 };'. */
512 if (!COMPLETE_TYPE_P (type))
513 {
514 error ("conversion to incomplete type");
515 return error_mark_node;
516 }
517
518 if (ex_form == COMPOUND_EXPR)
519 {
520 tree t = convert_to_integer_1 (type, TREE_OPERAND (expr, 1), dofold);
521 if (t == TREE_OPERAND (expr, 1))
522 return expr;
523 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR, TREE_TYPE (t),
524 TREE_OPERAND (expr, 0), t);
525 }
526
527 /* Convert e.g. (long)round(d) -> lround(d). */
528 /* If we're converting to char, we may encounter differing behavior
529 between converting from double->char vs double->long->char.
530 We're in "undefined" territory but we prefer to be conservative,
531 so only proceed in "unsafe" math mode. */
532 if (optimize
533 && (flag_unsafe_math_optimizations
534 || (long_integer_type_node
535 && outprec >= TYPE_PRECISION (long_integer_type_node))))
536 {
537 tree s_expr = strip_float_extensions (expr);
538 tree s_intype = TREE_TYPE (s_expr);
539 const enum built_in_function fcode = builtin_mathfn_code (s_expr);
540 tree fn = 0;
541
542 switch (fcode)
543 {
544 CASE_FLT_FN (BUILT_IN_CEIL):
545 /* Only convert in ISO C99 mode. */
546 if (!targetm.libc_has_function (function_c99_misc))
547 break;
548 if (outprec < TYPE_PRECISION (integer_type_node)
549 || (outprec == TYPE_PRECISION (integer_type_node)
550 && !TYPE_UNSIGNED (type)))
551 fn = mathfn_built_in (s_intype, BUILT_IN_ICEIL);
552 else if (outprec == TYPE_PRECISION (long_integer_type_node)
553 && !TYPE_UNSIGNED (type))
554 fn = mathfn_built_in (s_intype, BUILT_IN_LCEIL);
555 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
556 && !TYPE_UNSIGNED (type))
557 fn = mathfn_built_in (s_intype, BUILT_IN_LLCEIL);
558 break;
559
560 CASE_FLT_FN (BUILT_IN_FLOOR):
561 /* Only convert in ISO C99 mode. */
562 if (!targetm.libc_has_function (function_c99_misc))
563 break;
564 if (outprec < TYPE_PRECISION (integer_type_node)
565 || (outprec == TYPE_PRECISION (integer_type_node)
566 && !TYPE_UNSIGNED (type)))
567 fn = mathfn_built_in (s_intype, BUILT_IN_IFLOOR);
568 else if (outprec == TYPE_PRECISION (long_integer_type_node)
569 && !TYPE_UNSIGNED (type))
570 fn = mathfn_built_in (s_intype, BUILT_IN_LFLOOR);
571 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
572 && !TYPE_UNSIGNED (type))
573 fn = mathfn_built_in (s_intype, BUILT_IN_LLFLOOR);
574 break;
575
576 CASE_FLT_FN (BUILT_IN_ROUND):
577 /* Only convert in ISO C99 mode and with -fno-math-errno. */
578 if (!targetm.libc_has_function (function_c99_misc) || flag_errno_math)
579 break;
580 if (outprec < TYPE_PRECISION (integer_type_node)
581 || (outprec == TYPE_PRECISION (integer_type_node)
582 && !TYPE_UNSIGNED (type)))
583 fn = mathfn_built_in (s_intype, BUILT_IN_IROUND);
584 else if (outprec == TYPE_PRECISION (long_integer_type_node)
585 && !TYPE_UNSIGNED (type))
586 fn = mathfn_built_in (s_intype, BUILT_IN_LROUND);
587 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
588 && !TYPE_UNSIGNED (type))
589 fn = mathfn_built_in (s_intype, BUILT_IN_LLROUND);
590 break;
591
592 CASE_FLT_FN (BUILT_IN_NEARBYINT):
593 /* Only convert nearbyint* if we can ignore math exceptions. */
594 if (flag_trapping_math)
595 break;
596 /* ... Fall through ... */
597 CASE_FLT_FN (BUILT_IN_RINT):
598 /* Only convert in ISO C99 mode and with -fno-math-errno. */
599 if (!targetm.libc_has_function (function_c99_misc) || flag_errno_math)
600 break;
601 if (outprec < TYPE_PRECISION (integer_type_node)
602 || (outprec == TYPE_PRECISION (integer_type_node)
603 && !TYPE_UNSIGNED (type)))
604 fn = mathfn_built_in (s_intype, BUILT_IN_IRINT);
605 else if (outprec == TYPE_PRECISION (long_integer_type_node)
606 && !TYPE_UNSIGNED (type))
607 fn = mathfn_built_in (s_intype, BUILT_IN_LRINT);
608 else if (outprec == TYPE_PRECISION (long_long_integer_type_node)
609 && !TYPE_UNSIGNED (type))
610 fn = mathfn_built_in (s_intype, BUILT_IN_LLRINT);
611 break;
612
613 CASE_FLT_FN (BUILT_IN_TRUNC):
614 return convert_to_integer_1 (type, CALL_EXPR_ARG (s_expr, 0), dofold);
615
616 default:
617 break;
618 }
619
620 if (fn)
621 {
622 tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
623 return convert_to_integer_1 (type, newexpr, dofold);
624 }
625 }
626
627 /* Convert (int)logb(d) -> ilogb(d). */
628 if (optimize
629 && flag_unsafe_math_optimizations
630 && !flag_trapping_math && !flag_errno_math && flag_finite_math_only
631 && integer_type_node
632 && (outprec > TYPE_PRECISION (integer_type_node)
633 || (outprec == TYPE_PRECISION (integer_type_node)
634 && !TYPE_UNSIGNED (type))))
635 {
636 tree s_expr = strip_float_extensions (expr);
637 tree s_intype = TREE_TYPE (s_expr);
638 const enum built_in_function fcode = builtin_mathfn_code (s_expr);
639 tree fn = 0;
640
641 switch (fcode)
642 {
643 CASE_FLT_FN (BUILT_IN_LOGB):
644 fn = mathfn_built_in (s_intype, BUILT_IN_ILOGB);
645 break;
646
647 default:
648 break;
649 }
650
651 if (fn)
652 {
653 tree newexpr = build_call_expr (fn, 1, CALL_EXPR_ARG (s_expr, 0));
654 return convert_to_integer_1 (type, newexpr, dofold);
655 }
656 }
657
658 switch (TREE_CODE (intype))
659 {
660 case POINTER_TYPE:
661 case REFERENCE_TYPE:
662 if (integer_zerop (expr))
663 return build_int_cst (type, 0);
664
665 /* Convert to an unsigned integer of the correct width first, and from
666 there widen/truncate to the required type. Some targets support the
667 coexistence of multiple valid pointer sizes, so fetch the one we need
668 from the type. */
669 if (!dofold)
670 return build1 (CONVERT_EXPR, type, expr);
671 expr = fold_build1 (CONVERT_EXPR,
672 lang_hooks.types.type_for_size
673 (TYPE_PRECISION (intype), 0),
674 expr);
675 return fold_convert (type, expr);
676
677 case INTEGER_TYPE:
678 case ENUMERAL_TYPE:
679 case BOOLEAN_TYPE:
680 case OFFSET_TYPE:
681 /* If this is a logical operation, which just returns 0 or 1, we can
682 change the type of the expression. */
683
684 if (TREE_CODE_CLASS (ex_form) == tcc_comparison)
685 {
686 expr = copy_node (expr);
687 TREE_TYPE (expr) = type;
688 return expr;
689 }
690
691 /* If we are widening the type, put in an explicit conversion.
692 Similarly if we are not changing the width. After this, we know
693 we are truncating EXPR. */
694
695 else if (outprec >= inprec)
696 {
697 enum tree_code code;
698
699 /* If the precision of the EXPR's type is K bits and the
700 destination mode has more bits, and the sign is changing,
701 it is not safe to use a NOP_EXPR. For example, suppose
702 that EXPR's type is a 3-bit unsigned integer type, the
703 TYPE is a 3-bit signed integer type, and the machine mode
704 for the types is 8-bit QImode. In that case, the
705 conversion necessitates an explicit sign-extension. In
706 the signed-to-unsigned case the high-order bits have to
707 be cleared. */
708 if (TYPE_UNSIGNED (type) != TYPE_UNSIGNED (TREE_TYPE (expr))
709 && (TYPE_PRECISION (TREE_TYPE (expr))
710 != GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (expr)))))
711 code = CONVERT_EXPR;
712 else
713 code = NOP_EXPR;
714
715 return maybe_fold_build1_loc (dofold, loc, code, type, expr);
716 }
717
718 /* If TYPE is an enumeral type or a type with a precision less
719 than the number of bits in its mode, do the conversion to the
720 type corresponding to its mode, then do a nop conversion
721 to TYPE. */
722 else if (TREE_CODE (type) == ENUMERAL_TYPE
723 || outprec != GET_MODE_PRECISION (TYPE_MODE (type)))
724 {
725 expr = convert (lang_hooks.types.type_for_mode
726 (TYPE_MODE (type), TYPE_UNSIGNED (type)), expr);
727 return maybe_fold_build1_loc (dofold, loc, NOP_EXPR, type, expr);
728 }
729
730 /* Here detect when we can distribute the truncation down past some
731 arithmetic. For example, if adding two longs and converting to an
732 int, we can equally well convert both to ints and then add.
733 For the operations handled here, such truncation distribution
734 is always safe.
735 It is desirable in these cases:
736 1) when truncating down to full-word from a larger size
737 2) when truncating takes no work.
738 3) when at least one operand of the arithmetic has been extended
739 (as by C's default conversions). In this case we need two conversions
740 if we do the arithmetic as already requested, so we might as well
741 truncate both and then combine. Perhaps that way we need only one.
742
743 Note that in general we cannot do the arithmetic in a type
744 shorter than the desired result of conversion, even if the operands
745 are both extended from a shorter type, because they might overflow
746 if combined in that type. The exceptions to this--the times when
747 two narrow values can be combined in their narrow type even to
748 make a wider result--are handled by "shorten" in build_binary_op. */
749
750 if (dofold)
751 switch (ex_form)
752 {
753 case RSHIFT_EXPR:
754 /* We can pass truncation down through right shifting
755 when the shift count is a nonpositive constant. */
756 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
757 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) <= 0)
758 goto trunc1;
759 break;
760
761 case LSHIFT_EXPR:
762 /* We can pass truncation down through left shifting
763 when the shift count is a nonnegative constant and
764 the target type is unsigned. */
765 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST
766 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) >= 0
767 && TYPE_UNSIGNED (type)
768 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
769 {
770 /* If shift count is less than the width of the truncated type,
771 really shift. */
772 if (tree_int_cst_lt (TREE_OPERAND (expr, 1), TYPE_SIZE (type)))
773 /* In this case, shifting is like multiplication. */
774 goto trunc1;
775 else
776 {
777 /* If it is >= that width, result is zero.
778 Handling this with trunc1 would give the wrong result:
779 (int) ((long long) a << 32) is well defined (as 0)
780 but (int) a << 32 is undefined and would get a
781 warning. */
782
783 tree t = build_int_cst (type, 0);
784
785 /* If the original expression had side-effects, we must
786 preserve it. */
787 if (TREE_SIDE_EFFECTS (expr))
788 return build2 (COMPOUND_EXPR, type, expr, t);
789 else
790 return t;
791 }
792 }
793 break;
794
795 case TRUNC_DIV_EXPR:
796 {
797 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), NULL_TREE);
798 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), NULL_TREE);
799
800 /* Don't distribute unless the output precision is at least as
801 big as the actual inputs and it has the same signedness. */
802 if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
803 && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
804 /* If signedness of arg0 and arg1 don't match,
805 we can't necessarily find a type to compare them in. */
806 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
807 == TYPE_UNSIGNED (TREE_TYPE (arg1)))
808 /* Do not change the sign of the division. */
809 && (TYPE_UNSIGNED (TREE_TYPE (expr))
810 == TYPE_UNSIGNED (TREE_TYPE (arg0)))
811 /* Either require unsigned division or a division by
812 a constant that is not -1. */
813 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
814 || (TREE_CODE (arg1) == INTEGER_CST
815 && !integer_all_onesp (arg1))))
816 {
817 tree tem = do_narrow (loc, ex_form, type, arg0, arg1,
818 expr, inprec, outprec, dofold);
819 if (tem)
820 return tem;
821 }
822 break;
823 }
824
825 case MAX_EXPR:
826 case MIN_EXPR:
827 case MULT_EXPR:
828 {
829 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
830 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
831
832 /* Don't distribute unless the output precision is at least as
833 big as the actual inputs. Otherwise, the comparison of the
834 truncated values will be wrong. */
835 if (outprec >= TYPE_PRECISION (TREE_TYPE (arg0))
836 && outprec >= TYPE_PRECISION (TREE_TYPE (arg1))
837 /* If signedness of arg0 and arg1 don't match,
838 we can't necessarily find a type to compare them in. */
839 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
840 == TYPE_UNSIGNED (TREE_TYPE (arg1))))
841 goto trunc1;
842 break;
843 }
844
845 case PLUS_EXPR:
846 case MINUS_EXPR:
847 case BIT_AND_EXPR:
848 case BIT_IOR_EXPR:
849 case BIT_XOR_EXPR:
850 trunc1:
851 {
852 tree arg0 = get_unwidened (TREE_OPERAND (expr, 0), type);
853 tree arg1 = get_unwidened (TREE_OPERAND (expr, 1), type);
854
855 /* Do not try to narrow operands of pointer subtraction;
856 that will interfere with other folding. */
857 if (ex_form == MINUS_EXPR
858 && CONVERT_EXPR_P (arg0)
859 && CONVERT_EXPR_P (arg1)
860 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
861 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
862 break;
863
864 if (outprec >= BITS_PER_WORD
865 || TRULY_NOOP_TRUNCATION (outprec, inprec)
866 || inprec > TYPE_PRECISION (TREE_TYPE (arg0))
867 || inprec > TYPE_PRECISION (TREE_TYPE (arg1)))
868 {
869 tree tem = do_narrow (loc, ex_form, type, arg0, arg1,
870 expr, inprec, outprec, dofold);
871 if (tem)
872 return tem;
873 }
874 }
875 break;
876
877 case NEGATE_EXPR:
878 case BIT_NOT_EXPR:
879 /* This is not correct for ABS_EXPR,
880 since we must test the sign before truncation. */
881 {
882 /* Do the arithmetic in type TYPEX,
883 then convert result to TYPE. */
884 tree typex = type;
885
886 /* Can't do arithmetic in enumeral types
887 so use an integer type that will hold the values. */
888 if (TREE_CODE (typex) == ENUMERAL_TYPE)
889 typex
890 = lang_hooks.types.type_for_size (TYPE_PRECISION (typex),
891 TYPE_UNSIGNED (typex));
892
893 if (!TYPE_UNSIGNED (typex))
894 typex = unsigned_type_for (typex);
895 return convert (type,
896 fold_build1 (ex_form, typex,
897 convert (typex,
898 TREE_OPERAND (expr, 0))));
899 }
900
901 CASE_CONVERT:
902 /* Don't introduce a "can't convert between vector values of
903 different size" error. */
904 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == VECTOR_TYPE
905 && (GET_MODE_SIZE (TYPE_MODE
906 (TREE_TYPE (TREE_OPERAND (expr, 0))))
907 != GET_MODE_SIZE (TYPE_MODE (type))))
908 break;
909 /* If truncating after truncating, might as well do all at once.
910 If truncating after extending, we may get rid of wasted work. */
911 return convert (type, get_unwidened (TREE_OPERAND (expr, 0), type));
912
913 case COND_EXPR:
914 /* It is sometimes worthwhile to push the narrowing down through
915 the conditional and never loses. A COND_EXPR may have a throw
916 as one operand, which then has void type. Just leave void
917 operands as they are. */
918 return
919 fold_build3 (COND_EXPR, type, TREE_OPERAND (expr, 0),
920 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1)))
921 ? TREE_OPERAND (expr, 1)
922 : convert (type, TREE_OPERAND (expr, 1)),
923 VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 2)))
924 ? TREE_OPERAND (expr, 2)
925 : convert (type, TREE_OPERAND (expr, 2)));
926
927 default:
928 break;
929 }
930
931 /* When parsing long initializers, we might end up with a lot of casts.
932 Shortcut this. */
933 if (TREE_CODE (expr) == INTEGER_CST)
934 return fold_convert (type, expr);
935 return build1 (CONVERT_EXPR, type, expr);
936
937 case REAL_TYPE:
938 if (flag_sanitize & SANITIZE_FLOAT_CAST
939 && do_ubsan_in_current_function ())
940 {
941 expr = save_expr (expr);
942 tree check = ubsan_instrument_float_cast (loc, type, expr);
943 expr = build1 (FIX_TRUNC_EXPR, type, expr);
944 if (check == NULL_TREE)
945 return expr;
946 return maybe_fold_build2_loc (dofold, loc, COMPOUND_EXPR,
947 TREE_TYPE (expr), check, expr);
948 }
949 else
950 return build1 (FIX_TRUNC_EXPR, type, expr);
951
952 case FIXED_POINT_TYPE:
953 return build1 (FIXED_CONVERT_EXPR, type, expr);
954
955 case COMPLEX_TYPE:
956 expr = maybe_fold_build1_loc (dofold, loc, REALPART_EXPR,
957 TREE_TYPE (TREE_TYPE (expr)), expr);
958 return convert (type, expr);
959
960 case VECTOR_TYPE:
961 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
962 {
963 error ("can%'t convert a vector of type %qT"
964 " to type %qT which has different size",
965 TREE_TYPE (expr), type);
966 return error_mark_node;
967 }
968 return build1 (VIEW_CONVERT_EXPR, type, expr);
969
970 default:
971 error ("aggregate value used where an integer was expected");
972 return convert (type, integer_zero_node);
973 }
974 }
975
976 /* Convert EXPR to some integer (or enum) type TYPE.
977
978 EXPR must be pointer, integer, discrete (enum, char, or bool), float,
979 fixed-point or vector; in other cases error is called.
980
981 The result of this is always supposed to be a newly created tree node
982 not in use in any existing structure. */
983
984 tree
convert_to_integer(tree type,tree expr)985 convert_to_integer (tree type, tree expr)
986 {
987 return convert_to_integer_1 (type, expr, true);
988 }
989
990 /* A wrapper around convert_to_complex_1 that only folds the
991 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
992
993 tree
convert_to_integer_maybe_fold(tree type,tree expr,bool dofold)994 convert_to_integer_maybe_fold (tree type, tree expr, bool dofold)
995 {
996 return convert_to_integer_1 (type, expr, dofold || CONSTANT_CLASS_P (expr));
997 }
998
999 /* Convert EXPR to the complex type TYPE in the usual ways. If FOLD_P is
1000 true, try to fold the expression. */
1001
1002 static tree
convert_to_complex_1(tree type,tree expr,bool fold_p)1003 convert_to_complex_1 (tree type, tree expr, bool fold_p)
1004 {
1005 location_t loc = EXPR_LOCATION (expr);
1006 tree subtype = TREE_TYPE (type);
1007
1008 switch (TREE_CODE (TREE_TYPE (expr)))
1009 {
1010 case REAL_TYPE:
1011 case FIXED_POINT_TYPE:
1012 case INTEGER_TYPE:
1013 case ENUMERAL_TYPE:
1014 case BOOLEAN_TYPE:
1015 return build2 (COMPLEX_EXPR, type, convert (subtype, expr),
1016 convert (subtype, integer_zero_node));
1017
1018 case COMPLEX_TYPE:
1019 {
1020 tree elt_type = TREE_TYPE (TREE_TYPE (expr));
1021
1022 if (TYPE_MAIN_VARIANT (elt_type) == TYPE_MAIN_VARIANT (subtype))
1023 return expr;
1024 else if (TREE_CODE (expr) == COMPOUND_EXPR)
1025 {
1026 tree t = convert_to_complex_1 (type, TREE_OPERAND (expr, 1),
1027 fold_p);
1028 if (t == TREE_OPERAND (expr, 1))
1029 return expr;
1030 return build2_loc (EXPR_LOCATION (expr), COMPOUND_EXPR,
1031 TREE_TYPE (t), TREE_OPERAND (expr, 0), t);
1032 }
1033 else if (TREE_CODE (expr) == COMPLEX_EXPR)
1034 return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1035 convert (subtype,
1036 TREE_OPERAND (expr, 0)),
1037 convert (subtype,
1038 TREE_OPERAND (expr, 1)));
1039 else
1040 {
1041 expr = save_expr (expr);
1042 tree realp = maybe_fold_build1_loc (fold_p, loc, REALPART_EXPR,
1043 TREE_TYPE (TREE_TYPE (expr)),
1044 expr);
1045 tree imagp = maybe_fold_build1_loc (fold_p, loc, IMAGPART_EXPR,
1046 TREE_TYPE (TREE_TYPE (expr)),
1047 expr);
1048 return maybe_fold_build2_loc (fold_p, loc, COMPLEX_EXPR, type,
1049 convert (subtype, realp),
1050 convert (subtype, imagp));
1051 }
1052 }
1053
1054 case POINTER_TYPE:
1055 case REFERENCE_TYPE:
1056 error ("pointer value used where a complex was expected");
1057 return convert_to_complex_1 (type, integer_zero_node, fold_p);
1058
1059 default:
1060 error ("aggregate value used where a complex was expected");
1061 return convert_to_complex_1 (type, integer_zero_node, fold_p);
1062 }
1063 }
1064
1065 /* A wrapper around convert_to_complex_1 that always folds the
1066 expression. */
1067
1068 tree
convert_to_complex(tree type,tree expr)1069 convert_to_complex (tree type, tree expr)
1070 {
1071 return convert_to_complex_1 (type, expr, true);
1072 }
1073
1074 /* A wrapper around convert_to_complex_1 that only folds the
1075 expression if DOFOLD, or if it is CONSTANT_CLASS_P. */
1076
1077 tree
convert_to_complex_maybe_fold(tree type,tree expr,bool dofold)1078 convert_to_complex_maybe_fold (tree type, tree expr, bool dofold)
1079 {
1080 return convert_to_complex_1 (type, expr, dofold || CONSTANT_CLASS_P (expr));
1081 }
1082
1083 /* Convert EXPR to the vector type TYPE in the usual ways. */
1084
1085 tree
convert_to_vector(tree type,tree expr)1086 convert_to_vector (tree type, tree expr)
1087 {
1088 switch (TREE_CODE (TREE_TYPE (expr)))
1089 {
1090 case INTEGER_TYPE:
1091 case VECTOR_TYPE:
1092 if (!tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (TREE_TYPE (expr))))
1093 {
1094 error ("can%'t convert a value of type %qT"
1095 " to vector type %qT which has different size",
1096 TREE_TYPE (expr), type);
1097 return error_mark_node;
1098 }
1099 return build1 (VIEW_CONVERT_EXPR, type, expr);
1100
1101 default:
1102 error ("can%'t convert value to a vector");
1103 return error_mark_node;
1104 }
1105 }
1106
1107 /* Convert EXPR to some fixed-point type TYPE.
1108
1109 EXPR must be fixed-point, float, integer, or enumeral;
1110 in other cases error is called. */
1111
1112 tree
convert_to_fixed(tree type,tree expr)1113 convert_to_fixed (tree type, tree expr)
1114 {
1115 if (integer_zerop (expr))
1116 {
1117 tree fixed_zero_node = build_fixed (type, FCONST0 (TYPE_MODE (type)));
1118 return fixed_zero_node;
1119 }
1120 else if (integer_onep (expr) && ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type)))
1121 {
1122 tree fixed_one_node = build_fixed (type, FCONST1 (TYPE_MODE (type)));
1123 return fixed_one_node;
1124 }
1125
1126 switch (TREE_CODE (TREE_TYPE (expr)))
1127 {
1128 case FIXED_POINT_TYPE:
1129 case INTEGER_TYPE:
1130 case ENUMERAL_TYPE:
1131 case BOOLEAN_TYPE:
1132 case REAL_TYPE:
1133 return build1 (FIXED_CONVERT_EXPR, type, expr);
1134
1135 case COMPLEX_TYPE:
1136 return convert (type,
1137 fold_build1 (REALPART_EXPR,
1138 TREE_TYPE (TREE_TYPE (expr)), expr));
1139
1140 default:
1141 error ("aggregate value used where a fixed-point was expected");
1142 return error_mark_node;
1143 }
1144 }
1145