1 /* intrinsics.cc -- D language compiler intrinsics.
2 Copyright (C) 2006-2019 Free Software Foundation, Inc.
3
4 GCC is free software; you can redistribute it and/or modify
5 it under the terms of the GNU General Public License as published by
6 the Free Software Foundation; either version 3, or (at your option)
7 any later version.
8
9 GCC is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
13
14 You should have received a copy of the GNU General Public License
15 along with GCC; see the file COPYING3. If not see
16 <http://www.gnu.org/licenses/>. */
17
18 #include "config.h"
19 #include "system.h"
20 #include "coretypes.h"
21
22 #include "dmd/declaration.h"
23 #include "dmd/identifier.h"
24 #include "dmd/mangle.h"
25 #include "dmd/mangle.h"
26 #include "dmd/module.h"
27 #include "dmd/template.h"
28
29 #include "tm.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "fold-const.h"
33 #include "stringpool.h"
34 #include "builtins.h"
35
36 #include "d-tree.h"
37
38
39 /* An internal struct used to hold information on D intrinsics. */
40
41 struct intrinsic_decl
42 {
43 /* The DECL_FUNCTION_CODE of this decl. */
44 intrinsic_code code;
45
46 /* The name of the intrinsic. */
47 const char *name;
48
49 /* The module where the intrinsic is located. */
50 const char *module;
51
52 /* The mangled signature decoration of the intrinsic. */
53 const char *deco;
54
55 /* True if the intrinsic is only handled in CTFE. */
56 bool ctfeonly;
57 };
58
59 static const intrinsic_decl intrinsic_decls[] =
60 {
61 #define DEF_D_INTRINSIC(CODE, ALIAS, NAME, MODULE, DECO, CTFE) \
62 { INTRINSIC_ ## ALIAS, NAME, MODULE, DECO, CTFE },
63
64 #include "intrinsics.def"
65
66 #undef DEF_D_INTRINSIC
67 };
68
69 /* Checks if DECL is an intrinsic or run time library function that requires
70 special processing. Sets DECL_INTRINSIC_CODE so it can be identified
71 later in maybe_expand_intrinsic. */
72
73 void
maybe_set_intrinsic(FuncDeclaration * decl)74 maybe_set_intrinsic (FuncDeclaration *decl)
75 {
76 if (!decl->ident || decl->builtin != BUILTINunknown)
77 return;
78
79 /* The builtin flag is updated only if we can evaluate the intrinsic
80 at compile-time. Such as the math or bitop intrinsics. */
81 decl->builtin = BUILTINno;
82
83 /* Check if it's a compiler intrinsic. We only require that any
84 internally recognised intrinsics are declared in a module with
85 an explicit module declaration. */
86 Module *m = decl->getModule ();
87
88 if (!m || !m->md)
89 return;
90
91 TemplateInstance *ti = decl->isInstantiated ();
92 TemplateDeclaration *td = ti ? ti->tempdecl->isTemplateDeclaration () : NULL;
93
94 const char *tname = decl->ident->toChars ();
95 const char *tmodule = m->md->toChars ();
96 const char *tdeco = (td == NULL) ? decl->type->deco : NULL;
97
98 /* Look through all D intrinsics. */
99 for (size_t i = 0; i < (int) INTRINSIC_LAST; i++)
100 {
101 if (!intrinsic_decls[i].name)
102 continue;
103
104 if (strcmp (intrinsic_decls[i].name, tname) != 0
105 || strcmp (intrinsic_decls[i].module, tmodule) != 0)
106 continue;
107
108 /* Instantiated functions would have the wrong type deco, get it from the
109 template member instead. */
110 if (tdeco == NULL)
111 {
112 if (!td || !td->onemember)
113 return;
114
115 FuncDeclaration *fd = td->onemember->isFuncDeclaration ();
116 if (fd == NULL)
117 return;
118
119 OutBuffer buf;
120 mangleToBuffer (fd->type, &buf);
121 tdeco = buf.extractString ();
122 }
123
124 /* Matching the type deco may be a bit too strict, as it means that all
125 function attributes that end up in the signature must be kept aligned
126 between the compiler and library declaration. */
127 if (strcmp (intrinsic_decls[i].deco, tdeco) == 0)
128 {
129 intrinsic_code code = intrinsic_decls[i].code;
130
131 if (decl->csym == NULL)
132 get_symbol_decl (decl);
133
134 /* If there is no function body, then the implementation is always
135 provided by the compiler. */
136 if (!decl->fbody)
137 {
138 DECL_BUILT_IN_CLASS (decl->csym) = BUILT_IN_FRONTEND;
139 DECL_FUNCTION_CODE (decl->csym) = (built_in_function) code;
140 }
141
142 /* Infer whether the intrinsic can be used for CTFE, let the
143 front-end know that it can be evaluated at compile-time. */
144 switch (code)
145 {
146 case INTRINSIC_VA_ARG:
147 case INTRINSIC_C_VA_ARG:
148 case INTRINSIC_VASTART:
149 case INTRINSIC_ADDS:
150 case INTRINSIC_SUBS:
151 case INTRINSIC_MULS:
152 case INTRINSIC_NEGS:
153 case INTRINSIC_VLOAD:
154 case INTRINSIC_VSTORE:
155 break;
156
157 case INTRINSIC_POW:
158 {
159 /* Check that this overload of pow() is has an equivalent
160 built-in function. It could be `int pow(int, int)'. */
161 tree rettype = TREE_TYPE (TREE_TYPE (decl->csym));
162 if (mathfn_built_in (rettype, BUILT_IN_POW) != NULL_TREE)
163 decl->builtin = BUILTINyes;
164 break;
165 }
166
167 default:
168 decl->builtin = BUILTINyes;
169 break;
170 }
171
172 /* The intrinsic was marked as CTFE-only. */
173 if (intrinsic_decls[i].ctfeonly)
174 DECL_BUILT_IN_CTFE (decl->csym) = 1;
175
176 DECL_INTRINSIC_CODE (decl->csym) = code;
177 break;
178 }
179 }
180 }
181
182 /* Construct a function call to the built-in function CODE, N is the number of
183 arguments, and the `...' parameters are the argument expressions.
184 The original call expression is held in CALLEXP. */
185
186 static tree
call_builtin_fn(tree callexp,built_in_function code,int n,...)187 call_builtin_fn (tree callexp, built_in_function code, int n, ...)
188 {
189 tree *argarray = XALLOCAVEC (tree, n);
190 va_list ap;
191
192 va_start (ap, n);
193 for (int i = 0; i < n; i++)
194 argarray[i] = va_arg (ap, tree);
195 va_end (ap);
196
197 tree exp = build_call_expr_loc_array (EXPR_LOCATION (callexp),
198 builtin_decl_explicit (code),
199 n, argarray);
200 return convert (TREE_TYPE (callexp), fold (exp));
201 }
202
203 /* Expand a front-end instrinsic call to bsf(). This takes one argument,
204 the signature to which can be either:
205
206 int bsf (uint arg);
207 int bsf (ulong arg);
208
209 This scans all bits in the given argument starting with the first,
210 returning the bit number of the first bit set. The original call
211 expression is held in CALLEXP. */
212
213 static tree
expand_intrinsic_bsf(tree callexp)214 expand_intrinsic_bsf (tree callexp)
215 {
216 /* The bsr() intrinsic gets turned into __builtin_ctz(arg).
217 The return value is supposed to be undefined if arg is zero. */
218 tree arg = CALL_EXPR_ARG (callexp, 0);
219 int argsize = TYPE_PRECISION (TREE_TYPE (arg));
220
221 /* Which variant of __builtin_ctz* should we call? */
222 built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_CTZ
223 : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_CTZL
224 : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_CTZLL
225 : END_BUILTINS;
226
227 gcc_assert (code != END_BUILTINS);
228
229 return call_builtin_fn (callexp, code, 1, arg);
230 }
231
232 /* Expand a front-end instrinsic call to bsr(). This takes one argument,
233 the signature to which can be either:
234
235 int bsr (uint arg);
236 int bsr (ulong arg);
237
238 This scans all bits in the given argument from the most significant bit
239 to the least significant, returning the bit number of the first bit set.
240 The original call expression is held in CALLEXP. */
241
242 static tree
expand_intrinsic_bsr(tree callexp)243 expand_intrinsic_bsr (tree callexp)
244 {
245 /* The bsr() intrinsic gets turned into (size - 1) - __builtin_clz(arg).
246 The return value is supposed to be undefined if arg is zero. */
247 tree arg = CALL_EXPR_ARG (callexp, 0);
248 tree type = TREE_TYPE (arg);
249 int argsize = TYPE_PRECISION (type);
250
251 /* Which variant of __builtin_clz* should we call? */
252 built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_CLZ
253 : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_CLZL
254 : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_CLZLL
255 : END_BUILTINS;
256
257 gcc_assert (code != END_BUILTINS);
258
259 tree result = call_builtin_fn (callexp, code, 1, arg);
260
261 /* Handle int -> long conversions. */
262 if (TREE_TYPE (result) != type)
263 result = fold_convert (type, result);
264
265 result = fold_build2 (MINUS_EXPR, type,
266 build_integer_cst (argsize - 1, type), result);
267 return fold_convert (TREE_TYPE (callexp), result);
268 }
269
270 /* Expand a front-end intrinsic call to INTRINSIC, which is either a call to
271 bt(), btc(), btr(), or bts(). These intrinsics expect to take two arguments,
272 the signature to which is:
273
274 int bt (size_t* ptr, size_t bitnum);
275
276 All intrinsics test if a bit is set and return the result of that condition.
277 Variants of `bt' will then update that bit. `btc' compliments the bit, `bts'
278 sets the bit, and `btr' resets the bit. The original call expression is
279 held in CALLEXP. */
280
281 static tree
expand_intrinsic_bt(intrinsic_code intrinsic,tree callexp)282 expand_intrinsic_bt (intrinsic_code intrinsic, tree callexp)
283 {
284 tree ptr = CALL_EXPR_ARG (callexp, 0);
285 tree bitnum = CALL_EXPR_ARG (callexp, 1);
286 tree type = TREE_TYPE (TREE_TYPE (ptr));
287
288 /* size_t bitsize = sizeof(*ptr) * BITS_PER_UNIT; */
289 tree bitsize = fold_convert (type, TYPE_SIZE (type));
290
291 /* ptr[bitnum / bitsize] */
292 ptr = build_array_index (ptr, fold_build2 (TRUNC_DIV_EXPR, type,
293 bitnum, bitsize));
294 ptr = indirect_ref (type, ptr);
295
296 /* mask = 1 << (bitnum % bitsize); */
297 bitnum = fold_build2 (TRUNC_MOD_EXPR, type, bitnum, bitsize);
298 bitnum = fold_build2 (LSHIFT_EXPR, type, size_one_node, bitnum);
299
300 /* cond = ptr[bitnum / size] & mask; */
301 tree cond = fold_build2 (BIT_AND_EXPR, type, ptr, bitnum);
302
303 /* cond ? -1 : 0; */
304 cond = build_condition (TREE_TYPE (callexp), d_truthvalue_conversion (cond),
305 integer_minus_one_node, integer_zero_node);
306
307 /* Update the bit as needed, only testing the bit for bt(). */
308 if (intrinsic == INTRINSIC_BT)
309 return cond;
310
311 tree_code code = (intrinsic == INTRINSIC_BTC) ? BIT_XOR_EXPR
312 : (intrinsic == INTRINSIC_BTR) ? BIT_AND_EXPR
313 : (intrinsic == INTRINSIC_BTS) ? BIT_IOR_EXPR
314 : ERROR_MARK;
315 gcc_assert (code != ERROR_MARK);
316
317 /* ptr[bitnum / size] op= mask; */
318 if (intrinsic == INTRINSIC_BTR)
319 bitnum = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (bitnum), bitnum);
320
321 ptr = modify_expr (ptr, fold_build2 (code, TREE_TYPE (ptr), ptr, bitnum));
322
323 /* Store the condition result in a temporary, and return expressions in
324 correct order of evaluation. */
325 tree tmp = build_local_temp (TREE_TYPE (callexp));
326 cond = modify_expr (tmp, cond);
327
328 return compound_expr (cond, compound_expr (ptr, tmp));
329 }
330
331 /* Expand a front-end intrinsic call to bswap(). This takes one argument, the
332 signature to which can be either:
333
334 int bswap (uint arg);
335 int bswap (ulong arg);
336
337 This swaps all bytes in an N byte type end-to-end. The original call
338 expression is held in CALLEXP. */
339
340 static tree
expand_intrinsic_bswap(tree callexp)341 expand_intrinsic_bswap (tree callexp)
342 {
343 tree arg = CALL_EXPR_ARG (callexp, 0);
344 int argsize = TYPE_PRECISION (TREE_TYPE (arg));
345
346 /* Which variant of __builtin_bswap* should we call? */
347 built_in_function code = (argsize == 32) ? BUILT_IN_BSWAP32
348 : (argsize == 64) ? BUILT_IN_BSWAP64
349 : END_BUILTINS;
350
351 gcc_assert (code != END_BUILTINS);
352
353 return call_builtin_fn (callexp, code, 1, arg);
354 }
355
356 /* Expand a front-end intrinsic call to popcnt(). This takes one argument, the
357 signature to which can be either:
358
359 int popcnt (uint arg);
360 int popcnt (ulong arg);
361
362 Calculates the number of set bits in an integer. The original call
363 expression is held in CALLEXP. */
364
365 static tree
expand_intrinsic_popcnt(tree callexp)366 expand_intrinsic_popcnt (tree callexp)
367 {
368 tree arg = CALL_EXPR_ARG (callexp, 0);
369 int argsize = TYPE_PRECISION (TREE_TYPE (arg));
370
371 /* Which variant of __builtin_popcount* should we call? */
372 built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_POPCOUNT
373 : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_POPCOUNTL
374 : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_POPCOUNTLL
375 : END_BUILTINS;
376
377 gcc_assert (code != END_BUILTINS);
378
379 return call_builtin_fn (callexp, code, 1, arg);
380 }
381
382 /* Expand a front-end intrinsic call to INTRINSIC, which is either a call to
383 sqrt(), sqrtf(), sqrtl(). These intrinsics expect to take one argument,
384 the signature to which can be either:
385
386 float sqrt (float arg);
387 double sqrt (double arg);
388 real sqrt (real arg);
389
390 This computes the square root of the given argument. The original call
391 expression is held in CALLEXP. */
392
393 static tree
expand_intrinsic_sqrt(intrinsic_code intrinsic,tree callexp)394 expand_intrinsic_sqrt (intrinsic_code intrinsic, tree callexp)
395 {
396 tree arg = CALL_EXPR_ARG (callexp, 0);
397
398 /* Which variant of __builtin_sqrt* should we call? */
399 built_in_function code = (intrinsic == INTRINSIC_SQRT) ? BUILT_IN_SQRT
400 : (intrinsic == INTRINSIC_SQRTF) ? BUILT_IN_SQRTF
401 : (intrinsic == INTRINSIC_SQRTL) ? BUILT_IN_SQRTL
402 : END_BUILTINS;
403
404 gcc_assert (code != END_BUILTINS);
405 return call_builtin_fn (callexp, code, 1, arg);
406 }
407
408 /* Expand a front-end intrinsic call to copysign(). This takes two arguments,
409 the signature to which can be either:
410
411 float copysign (T to, float from);
412 double copysign (T to, double from);
413 real copysign (T to, real from);
414
415 This computes a value composed of TO with the sign bit of FROM. The original
416 call expression is held in CALLEXP. */
417
418 static tree
expand_intrinsic_copysign(tree callexp)419 expand_intrinsic_copysign (tree callexp)
420 {
421 tree to = CALL_EXPR_ARG (callexp, 0);
422 tree from = CALL_EXPR_ARG (callexp, 1);
423 tree type = TREE_TYPE (to);
424
425 /* Convert parameters to the same type. Prefer the first parameter unless it
426 is an integral type. */
427 if (INTEGRAL_TYPE_P (type))
428 {
429 to = fold_convert (TREE_TYPE (from), to);
430 type = TREE_TYPE (to);
431 }
432 else
433 from = fold_convert (type, from);
434
435 /* Which variant of __builtin_copysign* should we call? */
436 tree builtin = mathfn_built_in (type, BUILT_IN_COPYSIGN);
437 gcc_assert (builtin != NULL_TREE);
438
439 return call_builtin_fn (callexp, DECL_FUNCTION_CODE (builtin), 2,
440 to, from);
441 }
442
443 /* Expand a front-end intrinsic call to pow(). This takes two arguments, the
444 signature to which can be either:
445
446 float pow (float base, T exponent);
447 double pow (double base, T exponent);
448 real pow (real base, T exponent);
449
450 This computes the value of BASE raised to the power of EXPONENT.
451 The original call expression is held in CALLEXP. */
452
453 static tree
expand_intrinsic_pow(tree callexp)454 expand_intrinsic_pow (tree callexp)
455 {
456 tree base = CALL_EXPR_ARG (callexp, 0);
457 tree exponent = CALL_EXPR_ARG (callexp, 1);
458 tree exptype = TREE_TYPE (exponent);
459
460 /* Which variant of __builtin_pow* should we call? */
461 built_in_function code = SCALAR_FLOAT_TYPE_P (exptype) ? BUILT_IN_POW
462 : INTEGRAL_TYPE_P (exptype) ? BUILT_IN_POWI
463 : END_BUILTINS;
464 gcc_assert (code != END_BUILTINS);
465
466 tree builtin = mathfn_built_in (TREE_TYPE (base), code);
467 gcc_assert (builtin != NULL_TREE);
468
469 return call_builtin_fn (callexp, DECL_FUNCTION_CODE (builtin), 2,
470 base, exponent);
471 }
472
473 /* Expand a front-end intrinsic call to va_arg(). This takes either one or two
474 arguments, the signature to which can be either:
475
476 T va_arg(T) (ref va_list ap);
477 void va_arg(T) (va_list ap, ref T parmn);
478
479 This retrieves the next variadic parameter that is type T from the given
480 va_list. If also given, store the value into parmn, otherwise return it.
481 The original call expression is held in CALLEXP. */
482
483 static tree
expand_intrinsic_vaarg(tree callexp)484 expand_intrinsic_vaarg (tree callexp)
485 {
486 tree ap = CALL_EXPR_ARG (callexp, 0);
487 tree parmn = NULL_TREE;
488 tree type;
489
490 STRIP_NOPS (ap);
491
492 if (call_expr_nargs (callexp) == 1)
493 type = TREE_TYPE (callexp);
494 else
495 {
496 parmn = CALL_EXPR_ARG (callexp, 1);
497 STRIP_NOPS (parmn);
498
499 /* The `ref' argument to va_arg is either an address or reference,
500 get the value of it. */
501 if (TREE_CODE (parmn) == PARM_DECL && POINTER_TYPE_P (TREE_TYPE (parmn)))
502 parmn = build_deref (parmn);
503 else
504 {
505 gcc_assert (TREE_CODE (parmn) == ADDR_EXPR);
506 parmn = TREE_OPERAND (parmn, 0);
507 }
508
509 type = TREE_TYPE (parmn);
510 }
511
512 /* (T) VA_ARG_EXP<ap>; */
513 tree exp = build1 (VA_ARG_EXPR, type, ap);
514
515 /* parmn = (T) VA_ARG_EXP<ap>; */
516 if (parmn != NULL_TREE)
517 exp = modify_expr (parmn, exp);
518
519 return exp;
520 }
521
522 /* Expand a front-end intrinsic call to va_start(), which takes two arguments,
523 the signature to which is:
524
525 void va_start(T) (out va_list ap, ref T parmn);
526
527 This initializes the va_list type, where parmn should be the last named
528 parameter. The original call expression is held in CALLEXP. */
529
530 static tree
expand_intrinsic_vastart(tree callexp)531 expand_intrinsic_vastart (tree callexp)
532 {
533 tree ap = CALL_EXPR_ARG (callexp, 0);
534 tree parmn = CALL_EXPR_ARG (callexp, 1);
535
536 STRIP_NOPS (ap);
537 STRIP_NOPS (parmn);
538
539 /* The va_list argument should already have its address taken. The second
540 argument, however, is inout and that needs to be fixed to prevent a
541 warning. Could be casting, so need to check type too? */
542 gcc_assert (TREE_CODE (ap) == ADDR_EXPR
543 || (TREE_CODE (ap) == PARM_DECL
544 && POINTER_TYPE_P (TREE_TYPE (ap))));
545
546 /* Assuming nobody tries to change the return type. */
547 if (TREE_CODE (parmn) != PARM_DECL)
548 {
549 gcc_assert (TREE_CODE (parmn) == ADDR_EXPR);
550 parmn = TREE_OPERAND (parmn, 0);
551 }
552
553 return call_builtin_fn (callexp, BUILT_IN_VA_START, 2, ap, parmn);
554 }
555
556 /* Expand a front-end instrinsic call to INTRINSIC, which is either a call to
557 adds(), addu(), subs(), subu(), negs(), muls(), or mulu(). These intrinsics
558 expect to take two or three arguments, the signature to which can be either:
559
560 int adds (int x, int y, ref bool overflow);
561 long adds (long x, long y, ref bool overflow);
562 int negs (int x, ref bool overflow);
563 long negs (long x, ref bool overflow);
564
565 This performs an operation on two signed or unsigned integers, checking for
566 overflow. The overflow is sticky, meaning that a sequence of operations
567 can be done and overflow need only be checked at the end. The original call
568 expression is held in CALLEXP. */
569
570 static tree
expand_intrinsic_checkedint(intrinsic_code intrinsic,tree callexp)571 expand_intrinsic_checkedint (intrinsic_code intrinsic, tree callexp)
572 {
573 tree type = TREE_TYPE (callexp);
574 tree x;
575 tree y;
576 tree overflow;
577
578 /* The negs() intrinsic gets turned into SUB_OVERFLOW (0, y). */
579 if (intrinsic == INTRINSIC_NEGS)
580 {
581 x = fold_convert (type, integer_zero_node);
582 y = CALL_EXPR_ARG (callexp, 0);
583 overflow = CALL_EXPR_ARG (callexp, 1);
584 }
585 else
586 {
587 x = CALL_EXPR_ARG (callexp, 0);
588 y = CALL_EXPR_ARG (callexp, 1);
589 overflow = CALL_EXPR_ARG (callexp, 2);
590 }
591
592 /* Which variant of *_OVERFLOW should we generate? */
593 internal_fn icode = (intrinsic == INTRINSIC_ADDS) ? IFN_ADD_OVERFLOW
594 : (intrinsic == INTRINSIC_SUBS) ? IFN_SUB_OVERFLOW
595 : (intrinsic == INTRINSIC_MULS) ? IFN_MUL_OVERFLOW
596 : (intrinsic == INTRINSIC_NEGS) ? IFN_SUB_OVERFLOW
597 : IFN_LAST;
598 gcc_assert (icode != IFN_LAST);
599
600 tree result
601 = build_call_expr_internal_loc (EXPR_LOCATION (callexp), icode,
602 build_complex_type (type), 2, x, y);
603
604 STRIP_NOPS (overflow);
605 overflow = build_deref (overflow);
606
607 /* Assign returned result to overflow parameter, however if overflow is
608 already true, maintain its value. */
609 type = TREE_TYPE (overflow);
610 result = save_expr (result);
611
612 tree exp = fold_build2 (BIT_IOR_EXPR, type, overflow,
613 fold_convert (type, imaginary_part (result)));
614 exp = modify_expr (overflow, exp);
615
616 /* Return the value of result. */
617 return compound_expr (exp, real_part (result));
618 }
619
620 /* Expand a front-end instrinsic call to volatileLoad(). This takes one
621 argument, the signature to which can be either:
622
623 ubyte volatileLoad (ubyte* ptr);
624 ushort volatileLoad (ushort* ptr);
625 uint volatileLoad (uint* ptr);
626 ulong volatileLoad (ulong* ptr);
627
628 This reads a value from the memory location indicated by ptr. Calls to
629 them are be guaranteed to not be removed (such as during DCE) or reordered
630 in the same thread. The original call expression is held in CALLEXP. */
631
632 static tree
expand_volatile_load(tree callexp)633 expand_volatile_load (tree callexp)
634 {
635 tree ptr = CALL_EXPR_ARG (callexp, 0);
636 tree ptrtype = TREE_TYPE (ptr);
637 gcc_assert (POINTER_TYPE_P (ptrtype));
638
639 /* (T) *(volatile T *) ptr; */
640 tree type = build_qualified_type (TREE_TYPE (ptrtype), TYPE_QUAL_VOLATILE);
641 tree result = indirect_ref (type, ptr);
642 TREE_THIS_VOLATILE (result) = 1;
643
644 return result;
645 }
646
647 /* Expand a front-end instrinsic call to volatileStore(). This takes two
648 arguments, the signature to which can be either:
649
650 void volatileStore (ubyte* ptr, ubyte value);
651 void volatileStore (ushort* ptr, ushort value);
652 void volatileStore (uint* ptr, uint value);
653 void volatileStore (ulong* ptr, ulong value);
654
655 This writes a value to the memory location indicated by ptr. Calls to
656 them are be guaranteed to not be removed (such as during DCE) or reordered
657 in the same thread. The original call expression is held in CALLEXP. */
658
659 static tree
expand_volatile_store(tree callexp)660 expand_volatile_store (tree callexp)
661 {
662 tree ptr = CALL_EXPR_ARG (callexp, 0);
663 tree ptrtype = TREE_TYPE (ptr);
664 gcc_assert (POINTER_TYPE_P (ptrtype));
665
666 /* (T) *(volatile T *) ptr; */
667 tree type = build_qualified_type (TREE_TYPE (ptrtype), TYPE_QUAL_VOLATILE);
668 tree result = indirect_ref (type, ptr);
669 TREE_THIS_VOLATILE (result) = 1;
670
671 /* (*(volatile T *) ptr) = value; */
672 tree value = CALL_EXPR_ARG (callexp, 1);
673 return modify_expr (result, value);
674 }
675
676 /* If CALLEXP is for an intrinsic , expand and return inlined compiler
677 generated instructions. Most map directly to GCC builtins, others
678 require a little extra work around them. */
679
680 tree
maybe_expand_intrinsic(tree callexp)681 maybe_expand_intrinsic (tree callexp)
682 {
683 tree callee = CALL_EXPR_FN (callexp);
684
685 if (TREE_CODE (callee) == ADDR_EXPR)
686 callee = TREE_OPERAND (callee, 0);
687
688 if (TREE_CODE (callee) != FUNCTION_DECL)
689 return callexp;
690
691 /* Don't expand CTFE-only intrinsics outside of semantic processing. */
692 if (DECL_BUILT_IN_CTFE (callee) && !doing_semantic_analysis_p)
693 return callexp;
694
695 intrinsic_code intrinsic = DECL_INTRINSIC_CODE (callee);
696 built_in_function code;
697
698 switch (intrinsic)
699 {
700 case INTRINSIC_NONE:
701 return callexp;
702
703 case INTRINSIC_BSF:
704 return expand_intrinsic_bsf (callexp);
705
706 case INTRINSIC_BSR:
707 return expand_intrinsic_bsr (callexp);
708
709 case INTRINSIC_BT:
710 case INTRINSIC_BTC:
711 case INTRINSIC_BTR:
712 case INTRINSIC_BTS:
713 return expand_intrinsic_bt (intrinsic, callexp);
714
715 case INTRINSIC_BSWAP:
716 return expand_intrinsic_bswap (callexp);
717
718 case INTRINSIC_POPCNT:
719 return expand_intrinsic_popcnt (callexp);
720
721 case INTRINSIC_COS:
722 return call_builtin_fn (callexp, BUILT_IN_COSL, 1,
723 CALL_EXPR_ARG (callexp, 0));
724
725 case INTRINSIC_SIN:
726 return call_builtin_fn (callexp, BUILT_IN_SINL, 1,
727 CALL_EXPR_ARG (callexp, 0));
728
729 case INTRINSIC_RNDTOL:
730 /* Not sure if llroundl stands as a good replacement for the
731 expected behavior of rndtol. */
732 return call_builtin_fn (callexp, BUILT_IN_LLROUNDL, 1,
733 CALL_EXPR_ARG (callexp, 0));
734
735 case INTRINSIC_SQRT:
736 case INTRINSIC_SQRTF:
737 case INTRINSIC_SQRTL:
738 return expand_intrinsic_sqrt (intrinsic, callexp);
739
740 case INTRINSIC_LDEXP:
741 return call_builtin_fn (callexp, BUILT_IN_LDEXPL, 2,
742 CALL_EXPR_ARG (callexp, 0),
743 CALL_EXPR_ARG (callexp, 1));
744
745 case INTRINSIC_FABS:
746 return call_builtin_fn (callexp, BUILT_IN_FABSL, 1,
747 CALL_EXPR_ARG (callexp, 0));
748
749 case INTRINSIC_RINT:
750 return call_builtin_fn (callexp, BUILT_IN_RINTL, 1,
751 CALL_EXPR_ARG (callexp, 0));
752
753 case INTRINSIC_TAN:
754 return call_builtin_fn (callexp, BUILT_IN_TANL, 1,
755 CALL_EXPR_ARG (callexp, 0));
756
757 case INTRINSIC_ISNAN:
758 return call_builtin_fn (callexp, BUILT_IN_ISNAN, 1,
759 CALL_EXPR_ARG (callexp, 0));
760
761 case INTRINSIC_ISINFINITY:
762 return call_builtin_fn (callexp, BUILT_IN_ISINF, 1,
763 CALL_EXPR_ARG (callexp, 0));
764
765 case INTRINSIC_ISFINITE:
766 return call_builtin_fn (callexp, BUILT_IN_ISFINITE, 1,
767 CALL_EXPR_ARG (callexp, 0));
768
769 case INTRINSIC_EXP:
770 return call_builtin_fn (callexp, BUILT_IN_EXPL, 1,
771 CALL_EXPR_ARG (callexp, 0));
772
773 case INTRINSIC_EXPM1:
774 return call_builtin_fn (callexp, BUILT_IN_EXPM1L, 1,
775 CALL_EXPR_ARG (callexp, 0));
776
777 case INTRINSIC_EXP2:
778 return call_builtin_fn (callexp, BUILT_IN_EXP2L, 1,
779 CALL_EXPR_ARG (callexp, 0));
780
781 case INTRINSIC_LOG:
782 return call_builtin_fn (callexp, BUILT_IN_LOGL, 1,
783 CALL_EXPR_ARG (callexp, 0));
784
785 case INTRINSIC_LOG2:
786 return call_builtin_fn (callexp, BUILT_IN_LOG2L, 1,
787 CALL_EXPR_ARG (callexp, 0));
788
789 case INTRINSIC_LOG10:
790 return call_builtin_fn (callexp, BUILT_IN_LOG10L, 1,
791 CALL_EXPR_ARG (callexp, 0));
792
793 case INTRINSIC_ROUND:
794 return call_builtin_fn (callexp, BUILT_IN_ROUNDL, 1,
795 CALL_EXPR_ARG (callexp, 0));
796
797 case INTRINSIC_FLOORF:
798 case INTRINSIC_FLOOR:
799 case INTRINSIC_FLOORL:
800 code = (intrinsic == INTRINSIC_FLOOR) ? BUILT_IN_FLOOR
801 : (intrinsic == INTRINSIC_FLOORF) ? BUILT_IN_FLOORF
802 : BUILT_IN_FLOORL;
803 return call_builtin_fn (callexp, code, 1, CALL_EXPR_ARG (callexp, 0));
804
805 case INTRINSIC_CEILF:
806 case INTRINSIC_CEIL:
807 case INTRINSIC_CEILL:
808 code = (intrinsic == INTRINSIC_CEIL) ? BUILT_IN_CEIL
809 : (intrinsic == INTRINSIC_CEILF) ? BUILT_IN_CEILF
810 : BUILT_IN_CEILL;
811 return call_builtin_fn (callexp, code, 1, CALL_EXPR_ARG (callexp, 0));
812
813 case INTRINSIC_TRUNC:
814 return call_builtin_fn (callexp, BUILT_IN_TRUNCL, 1,
815 CALL_EXPR_ARG (callexp, 0));
816
817 case INTRINSIC_FMIN:
818 return call_builtin_fn (callexp, BUILT_IN_FMINL, 2,
819 CALL_EXPR_ARG (callexp, 0),
820 CALL_EXPR_ARG (callexp, 1));
821
822 case INTRINSIC_FMAX:
823 return call_builtin_fn (callexp, BUILT_IN_FMAXL, 2,
824 CALL_EXPR_ARG (callexp, 0),
825 CALL_EXPR_ARG (callexp, 1));
826
827 case INTRINSIC_COPYSIGN:
828 return expand_intrinsic_copysign (callexp);
829
830 case INTRINSIC_POW:
831 return expand_intrinsic_pow (callexp);
832
833 case INTRINSIC_FMA:
834 return call_builtin_fn (callexp, BUILT_IN_FMAL, 3,
835 CALL_EXPR_ARG (callexp, 0),
836 CALL_EXPR_ARG (callexp, 1),
837 CALL_EXPR_ARG (callexp, 2));
838
839 case INTRINSIC_VA_ARG:
840 case INTRINSIC_C_VA_ARG:
841 return expand_intrinsic_vaarg (callexp);
842
843 case INTRINSIC_VASTART:
844 return expand_intrinsic_vastart (callexp);
845
846 case INTRINSIC_ADDS:
847 case INTRINSIC_SUBS:
848 case INTRINSIC_MULS:
849 case INTRINSIC_NEGS:
850 return expand_intrinsic_checkedint (intrinsic, callexp);
851
852 case INTRINSIC_VLOAD:
853 return expand_volatile_load (callexp);
854
855 case INTRINSIC_VSTORE:
856 return expand_volatile_store (callexp);
857
858 default:
859 gcc_unreachable ();
860 }
861 }
862