1 /* intrinsics.cc -- D language compiler intrinsics.
2    Copyright (C) 2006-2020 Free Software Foundation, Inc.
3 
4 GCC is free software; you can redistribute it and/or modify
5 it under the terms of the GNU General Public License as published by
6 the Free Software Foundation; either version 3, or (at your option)
7 any later version.
8 
9 GCC is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12 GNU General Public License for more details.
13 
14 You should have received a copy of the GNU General Public License
15 along with GCC; see the file COPYING3.  If not see
16 <http://www.gnu.org/licenses/>.  */
17 
18 #include "config.h"
19 #include "system.h"
20 #include "coretypes.h"
21 
22 #include "dmd/declaration.h"
23 #include "dmd/identifier.h"
24 #include "dmd/mangle.h"
25 #include "dmd/mangle.h"
26 #include "dmd/module.h"
27 #include "dmd/template.h"
28 
29 #include "tm.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "fold-const.h"
33 #include "stringpool.h"
34 #include "builtins.h"
35 
36 #include "d-tree.h"
37 
38 
39 /* An internal struct used to hold information on D intrinsics.  */
40 
41 struct intrinsic_decl
42 {
43   /* The DECL_FUNCTION_CODE of this decl.  */
44   intrinsic_code code;
45 
46   /* The name of the intrinsic.  */
47   const char *name;
48 
49   /* The module where the intrinsic is located.  */
50   const char *module;
51 
52   /* The mangled signature decoration of the intrinsic.  */
53   const char *deco;
54 
55   /* True if the intrinsic is only handled in CTFE.  */
56   bool ctfeonly;
57 };
58 
59 static const intrinsic_decl intrinsic_decls[] =
60 {
61 #define DEF_D_INTRINSIC(CODE, ALIAS, NAME, MODULE, DECO, CTFE) \
62     { INTRINSIC_ ## ALIAS, NAME, MODULE, DECO, CTFE },
63 
64 #include "intrinsics.def"
65 
66 #undef DEF_D_INTRINSIC
67 };
68 
69 /* Checks if DECL is an intrinsic or run time library function that requires
70    special processing.  Sets DECL_INTRINSIC_CODE so it can be identified
71    later in maybe_expand_intrinsic.  */
72 
73 void
maybe_set_intrinsic(FuncDeclaration * decl)74 maybe_set_intrinsic (FuncDeclaration *decl)
75 {
76   if (!decl->ident || decl->builtin != BUILTINunknown)
77     return;
78 
79   /* The builtin flag is updated only if we can evaluate the intrinsic
80      at compile-time.  Such as the math or bitop intrinsics.  */
81   decl->builtin = BUILTINno;
82 
83   /* Check if it's a compiler intrinsic.  We only require that any
84      internally recognised intrinsics are declared in a module with
85      an explicit module declaration.  */
86   Module *m = decl->getModule ();
87 
88   if (!m || !m->md)
89     return;
90 
91   TemplateInstance *ti = decl->isInstantiated ();
92   TemplateDeclaration *td = ti ? ti->tempdecl->isTemplateDeclaration () : NULL;
93 
94   const char *tname = decl->ident->toChars ();
95   const char *tmodule = m->md->toChars ();
96   const char *tdeco = (td == NULL) ? decl->type->deco : NULL;
97 
98   /* Look through all D intrinsics.  */
99   for (size_t i = 0; i < (int) INTRINSIC_LAST; i++)
100     {
101       if (!intrinsic_decls[i].name)
102 	continue;
103 
104       if (strcmp (intrinsic_decls[i].name, tname) != 0
105 	  || strcmp (intrinsic_decls[i].module, tmodule) != 0)
106 	continue;
107 
108       /* Instantiated functions would have the wrong type deco, get it from the
109 	 template member instead.  */
110       if (tdeco == NULL)
111 	{
112 	  if (!td || !td->onemember)
113 	    return;
114 
115 	  FuncDeclaration *fd = td->onemember->isFuncDeclaration ();
116 	  if (fd == NULL)
117 	    return;
118 
119 	  OutBuffer buf;
120 	  mangleToBuffer (fd->type, &buf);
121 	  tdeco = buf.extractString ();
122 	}
123 
124       /* Matching the type deco may be a bit too strict, as it means that all
125 	 function attributes that end up in the signature must be kept aligned
126 	 between the compiler and library declaration.  */
127       if (strcmp (intrinsic_decls[i].deco, tdeco) == 0)
128 	{
129 	  intrinsic_code code = intrinsic_decls[i].code;
130 
131 	  if (decl->csym == NULL)
132 	    get_symbol_decl (decl);
133 
134 	  /* If there is no function body, then the implementation is always
135 	     provided by the compiler.  */
136 	  if (!decl->fbody)
137 	    set_decl_built_in_function (decl->csym, BUILT_IN_FRONTEND, code);
138 
139 	  /* Infer whether the intrinsic can be used for CTFE, let the
140 	     front-end know that it can be evaluated at compile-time.  */
141 	  switch (code)
142 	    {
143 	    case INTRINSIC_VA_ARG:
144 	    case INTRINSIC_C_VA_ARG:
145 	    case INTRINSIC_VASTART:
146 	    case INTRINSIC_ADDS:
147 	    case INTRINSIC_SUBS:
148 	    case INTRINSIC_MULS:
149 	    case INTRINSIC_NEGS:
150 	    case INTRINSIC_VLOAD:
151 	    case INTRINSIC_VSTORE:
152 	      break;
153 
154 	    case INTRINSIC_POW:
155 	    {
156 	      /* Check that this overload of pow() is has an equivalent
157 		 built-in function.  It could be `int pow(int, int)'.  */
158 	      tree rettype = TREE_TYPE (TREE_TYPE (decl->csym));
159 	      if (mathfn_built_in (rettype, BUILT_IN_POW) != NULL_TREE)
160 		decl->builtin = BUILTINyes;
161 	      break;
162 	    }
163 
164 	    default:
165 	      decl->builtin = BUILTINyes;
166 	      break;
167 	    }
168 
169 	  /* The intrinsic was marked as CTFE-only.  */
170 	  if (intrinsic_decls[i].ctfeonly)
171 	    DECL_BUILT_IN_CTFE (decl->csym) = 1;
172 
173 	  DECL_INTRINSIC_CODE (decl->csym) = code;
174 	  break;
175 	}
176     }
177 }
178 
179 /* Construct a function call to the built-in function CODE, N is the number of
180    arguments, and the `...' parameters are the argument expressions.
181    The original call expression is held in CALLEXP.  */
182 
183 static tree
call_builtin_fn(tree callexp,built_in_function code,int n,...)184 call_builtin_fn (tree callexp, built_in_function code, int n, ...)
185 {
186   tree *argarray = XALLOCAVEC (tree, n);
187   va_list ap;
188 
189   va_start (ap, n);
190   for (int i = 0; i < n; i++)
191     argarray[i] = va_arg (ap, tree);
192   va_end (ap);
193 
194   tree exp = build_call_expr_loc_array (EXPR_LOCATION (callexp),
195 					builtin_decl_explicit (code),
196 					n, argarray);
197   return convert (TREE_TYPE (callexp), fold (exp));
198 }
199 
200 /* Expand a front-end instrinsic call to bsf().  This takes one argument,
201    the signature to which can be either:
202 
203 	int bsf (uint arg);
204 	int bsf (ulong arg);
205 
206    This scans all bits in the given argument starting with the first,
207    returning the bit number of the first bit set.  The original call
208    expression is held in CALLEXP.  */
209 
210 static tree
expand_intrinsic_bsf(tree callexp)211 expand_intrinsic_bsf (tree callexp)
212 {
213   /* The bsr() intrinsic gets turned into __builtin_ctz(arg).
214      The return value is supposed to be undefined if arg is zero.  */
215   tree arg = CALL_EXPR_ARG (callexp, 0);
216   int argsize = TYPE_PRECISION (TREE_TYPE (arg));
217 
218   /* Which variant of __builtin_ctz* should we call?  */
219   built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_CTZ
220     : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_CTZL
221     : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_CTZLL
222     : END_BUILTINS;
223 
224   gcc_assert (code != END_BUILTINS);
225 
226   return call_builtin_fn (callexp, code, 1, arg);
227 }
228 
229 /* Expand a front-end instrinsic call to bsr().  This takes one argument,
230    the signature to which can be either:
231 
232 	int bsr (uint arg);
233 	int bsr (ulong arg);
234 
235    This scans all bits in the given argument from the most significant bit
236    to the least significant, returning the bit number of the first bit set.
237    The original call expression is held in CALLEXP.  */
238 
239 static tree
expand_intrinsic_bsr(tree callexp)240 expand_intrinsic_bsr (tree callexp)
241 {
242   /* The bsr() intrinsic gets turned into (size - 1) - __builtin_clz(arg).
243      The return value is supposed to be undefined if arg is zero.  */
244   tree arg = CALL_EXPR_ARG (callexp, 0);
245   tree type = TREE_TYPE (arg);
246   int argsize = TYPE_PRECISION (type);
247 
248   /* Which variant of __builtin_clz* should we call?  */
249   built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_CLZ
250     : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_CLZL
251     : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_CLZLL
252     : END_BUILTINS;
253 
254   gcc_assert (code != END_BUILTINS);
255 
256   tree result = call_builtin_fn (callexp, code, 1, arg);
257 
258   /* Handle int -> long conversions.  */
259   if (TREE_TYPE (result) != type)
260     result = fold_convert (type, result);
261 
262   result = fold_build2 (MINUS_EXPR, type,
263 			build_integer_cst (argsize - 1, type), result);
264   return fold_convert (TREE_TYPE (callexp), result);
265 }
266 
267 /* Expand a front-end intrinsic call to INTRINSIC, which is either a call to
268    bt(), btc(), btr(), or bts().  These intrinsics expect to take two arguments,
269    the signature to which is:
270 
271 	int bt (size_t* ptr, size_t bitnum);
272 
273    All intrinsics test if a bit is set and return the result of that condition.
274    Variants of `bt' will then update that bit. `btc' compliments the bit, `bts'
275    sets the bit, and `btr' resets the bit.  The original call expression is
276    held in CALLEXP.  */
277 
278 static tree
expand_intrinsic_bt(intrinsic_code intrinsic,tree callexp)279 expand_intrinsic_bt (intrinsic_code intrinsic, tree callexp)
280 {
281   tree ptr = CALL_EXPR_ARG (callexp, 0);
282   tree bitnum = CALL_EXPR_ARG (callexp, 1);
283   tree type = TREE_TYPE (TREE_TYPE (ptr));
284 
285   /* size_t bitsize = sizeof(*ptr) * BITS_PER_UNIT;  */
286   tree bitsize = fold_convert (type, TYPE_SIZE (type));
287 
288   /* ptr[bitnum / bitsize]  */
289   ptr = build_array_index (ptr, fold_build2 (TRUNC_DIV_EXPR, type,
290 					     bitnum, bitsize));
291   ptr = indirect_ref (type, ptr);
292 
293   /* mask = 1 << (bitnum % bitsize);  */
294   bitnum = fold_build2 (TRUNC_MOD_EXPR, type, bitnum, bitsize);
295   bitnum = fold_build2 (LSHIFT_EXPR, type, size_one_node, bitnum);
296 
297   /* cond = ptr[bitnum / size] & mask;  */
298   tree cond = fold_build2 (BIT_AND_EXPR, type, ptr, bitnum);
299 
300   /* cond ? -1 : 0;  */
301   cond = build_condition (TREE_TYPE (callexp), d_truthvalue_conversion (cond),
302 			 integer_minus_one_node, integer_zero_node);
303 
304   /* Update the bit as needed, only testing the bit for bt().  */
305   if (intrinsic == INTRINSIC_BT)
306     return cond;
307 
308   tree_code code = (intrinsic == INTRINSIC_BTC) ? BIT_XOR_EXPR
309     : (intrinsic == INTRINSIC_BTR) ? BIT_AND_EXPR
310     : (intrinsic == INTRINSIC_BTS) ? BIT_IOR_EXPR
311     : ERROR_MARK;
312   gcc_assert (code != ERROR_MARK);
313 
314   /* ptr[bitnum / size] op= mask;  */
315   if (intrinsic == INTRINSIC_BTR)
316     bitnum = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (bitnum), bitnum);
317 
318   ptr = modify_expr (ptr, fold_build2 (code, TREE_TYPE (ptr), ptr, bitnum));
319 
320   /* Store the condition result in a temporary, and return expressions in
321      correct order of evaluation.  */
322   tree tmp = build_local_temp (TREE_TYPE (callexp));
323   cond = modify_expr (tmp, cond);
324 
325   return compound_expr (cond, compound_expr (ptr, tmp));
326 }
327 
328 /* Expand a front-end intrinsic call to bswap().  This takes one argument, the
329    signature to which can be either:
330 
331 	int bswap (uint arg);
332 	int bswap (ulong arg);
333 
334    This swaps all bytes in an N byte type end-to-end.  The original call
335    expression is held in CALLEXP.  */
336 
337 static tree
expand_intrinsic_bswap(tree callexp)338 expand_intrinsic_bswap (tree callexp)
339 {
340   tree arg = CALL_EXPR_ARG (callexp, 0);
341   int argsize = TYPE_PRECISION (TREE_TYPE (arg));
342 
343   /* Which variant of __builtin_bswap* should we call?  */
344   built_in_function code = (argsize == 32) ? BUILT_IN_BSWAP32
345     : (argsize == 64) ? BUILT_IN_BSWAP64
346     : END_BUILTINS;
347 
348   gcc_assert (code != END_BUILTINS);
349 
350   return call_builtin_fn (callexp, code, 1, arg);
351 }
352 
353 /* Expand a front-end intrinsic call to popcnt().  This takes one argument, the
354    signature to which can be either:
355 
356 	int popcnt (uint arg);
357 	int popcnt (ulong arg);
358 
359    Calculates the number of set bits in an integer.  The original call
360    expression is held in CALLEXP.  */
361 
362 static tree
expand_intrinsic_popcnt(tree callexp)363 expand_intrinsic_popcnt (tree callexp)
364 {
365   tree arg = CALL_EXPR_ARG (callexp, 0);
366   int argsize = TYPE_PRECISION (TREE_TYPE (arg));
367 
368   /* Which variant of __builtin_popcount* should we call?  */
369   built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_POPCOUNT
370     : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_POPCOUNTL
371     : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_POPCOUNTLL
372     : END_BUILTINS;
373 
374   gcc_assert (code != END_BUILTINS);
375 
376   return call_builtin_fn (callexp, code, 1, arg);
377 }
378 
379 /* Expand a front-end intrinsic call to INTRINSIC, which is either a call to
380    sqrt(), sqrtf(), sqrtl().  These intrinsics expect to take one argument,
381    the signature to which can be either:
382 
383 	float sqrt (float arg);
384 	double sqrt (double arg);
385 	real sqrt (real arg);
386 
387    This computes the square root of the given argument.  The original call
388    expression is held in CALLEXP.  */
389 
390 static tree
expand_intrinsic_sqrt(intrinsic_code intrinsic,tree callexp)391 expand_intrinsic_sqrt (intrinsic_code intrinsic, tree callexp)
392 {
393   tree arg = CALL_EXPR_ARG (callexp, 0);
394 
395   /* Which variant of __builtin_sqrt* should we call?  */
396   built_in_function code = (intrinsic == INTRINSIC_SQRT) ? BUILT_IN_SQRT
397     : (intrinsic == INTRINSIC_SQRTF) ? BUILT_IN_SQRTF
398     : (intrinsic == INTRINSIC_SQRTL) ? BUILT_IN_SQRTL
399     : END_BUILTINS;
400 
401   gcc_assert (code != END_BUILTINS);
402   return call_builtin_fn (callexp, code, 1, arg);
403 }
404 
405 /* Expand a front-end intrinsic call to copysign().  This takes two arguments,
406    the signature to which can be either:
407 
408 	float copysign (T to, float from);
409 	double copysign (T to, double from);
410 	real copysign (T to, real from);
411 
412    This computes a value composed of TO with the sign bit of FROM.  The original
413    call expression is held in CALLEXP.  */
414 
415 static tree
expand_intrinsic_copysign(tree callexp)416 expand_intrinsic_copysign (tree callexp)
417 {
418   tree to = CALL_EXPR_ARG (callexp, 0);
419   tree from = CALL_EXPR_ARG (callexp, 1);
420   tree type = TREE_TYPE (to);
421 
422   /* Convert parameters to the same type.  Prefer the first parameter unless it
423      is an integral type.  */
424   if (INTEGRAL_TYPE_P (type))
425     {
426       to = fold_convert (TREE_TYPE (from), to);
427       type = TREE_TYPE (to);
428     }
429   else
430     from = fold_convert (type, from);
431 
432   /* Which variant of __builtin_copysign* should we call?  */
433   built_in_function code = (type == float_type_node) ? BUILT_IN_COPYSIGNF
434     : (type == double_type_node) ? BUILT_IN_COPYSIGN
435     : (type == long_double_type_node) ? BUILT_IN_COPYSIGNL
436     : END_BUILTINS;
437 
438   gcc_assert (code != END_BUILTINS);
439 
440   return call_builtin_fn (callexp, code, 2, to, from);
441 }
442 
443 /* Expand a front-end intrinsic call to pow().  This takes two arguments, the
444    signature to which can be either:
445 
446 	float pow (float base, T exponent);
447 	double pow (double base, T exponent);
448 	real pow (real base, T exponent);
449 
450    This computes the value of BASE raised to the power of EXPONENT.
451    The original call expression is held in CALLEXP.  */
452 
453 static tree
expand_intrinsic_pow(tree callexp)454 expand_intrinsic_pow (tree callexp)
455 {
456   tree base = CALL_EXPR_ARG (callexp, 0);
457   tree exponent = CALL_EXPR_ARG (callexp, 1);
458   tree exptype = TREE_TYPE (exponent);
459 
460   /* Which variant of __builtin_pow* should we call?  */
461   built_in_function code = SCALAR_FLOAT_TYPE_P (exptype) ? BUILT_IN_POW
462     : INTEGRAL_TYPE_P (exptype) ? BUILT_IN_POWI
463     : END_BUILTINS;
464   gcc_assert (code != END_BUILTINS);
465 
466   tree builtin = mathfn_built_in (TREE_TYPE (base), code);
467   gcc_assert (builtin != NULL_TREE);
468 
469   return call_builtin_fn (callexp, DECL_FUNCTION_CODE (builtin), 2,
470 			  base, exponent);
471 }
472 
473 /* Expand a front-end intrinsic call to toPrec().  This takes one argument, the
474    signature to which can be either:
475 
476 	T toPrec(T)(float f);
477 	T toPrec(T)(double f);
478 	T toPrec(T)(real f);
479 
480     This rounds the argument F to the precision of the specified floating
481     point type T.  The original call expression is held in CALLEXP.  */
482 
483 static tree
expand_intrinsic_toprec(tree callexp)484 expand_intrinsic_toprec (tree callexp)
485 {
486   tree f = CALL_EXPR_ARG (callexp, 0);
487   tree type = TREE_TYPE (callexp);
488 
489   return convert (type, f);
490 }
491 
492 /* Expand a front-end intrinsic call to va_arg().  This takes either one or two
493    arguments, the signature to which can be either:
494 
495 	T va_arg(T) (ref va_list ap);
496 	void va_arg(T) (va_list ap, ref T parmn);
497 
498    This retrieves the next variadic parameter that is type T from the given
499    va_list.  If also given, store the value into parmn, otherwise return it.
500    The original call expression is held in CALLEXP.  */
501 
502 static tree
expand_intrinsic_vaarg(tree callexp)503 expand_intrinsic_vaarg (tree callexp)
504 {
505   tree ap = CALL_EXPR_ARG (callexp, 0);
506   tree parmn = NULL_TREE;
507   tree type;
508 
509   STRIP_NOPS (ap);
510 
511   if (call_expr_nargs (callexp) == 1)
512     type = TREE_TYPE (callexp);
513   else
514     {
515       parmn = CALL_EXPR_ARG (callexp, 1);
516       STRIP_NOPS (parmn);
517 
518       /* The `ref' argument to va_arg is either an address or reference,
519 	 get the value of it.  */
520       if (TREE_CODE (parmn) == PARM_DECL && POINTER_TYPE_P (TREE_TYPE (parmn)))
521 	parmn = build_deref (parmn);
522       else
523 	{
524 	  gcc_assert (TREE_CODE (parmn) == ADDR_EXPR);
525 	  parmn = TREE_OPERAND (parmn, 0);
526 	}
527 
528       type = TREE_TYPE (parmn);
529     }
530 
531   /* (T) VA_ARG_EXP<ap>;  */
532   tree exp = build1 (VA_ARG_EXPR, type, ap);
533 
534   /* parmn = (T) VA_ARG_EXP<ap>;  */
535   if (parmn != NULL_TREE)
536     exp = modify_expr (parmn, exp);
537 
538   return exp;
539 }
540 
541 /* Expand a front-end intrinsic call to va_start(), which takes two arguments,
542    the signature to which is:
543 
544 	void va_start(T) (out va_list ap, ref T parmn);
545 
546    This initializes the va_list type, where parmn should be the last named
547    parameter.  The original call expression is held in CALLEXP.  */
548 
549 static tree
expand_intrinsic_vastart(tree callexp)550 expand_intrinsic_vastart (tree callexp)
551 {
552   tree ap = CALL_EXPR_ARG (callexp, 0);
553   tree parmn = CALL_EXPR_ARG (callexp, 1);
554 
555   STRIP_NOPS (ap);
556   STRIP_NOPS (parmn);
557 
558   /* The va_list argument should already have its address taken.  The second
559      argument, however, is inout and that needs to be fixed to prevent a
560      warning.  Could be casting, so need to check type too?  */
561   gcc_assert (TREE_CODE (ap) == ADDR_EXPR
562 	      || (TREE_CODE (ap) == PARM_DECL
563 		  && POINTER_TYPE_P (TREE_TYPE (ap))));
564 
565   /* Assuming nobody tries to change the return type.  */
566   if (TREE_CODE (parmn) != PARM_DECL)
567     {
568       gcc_assert (TREE_CODE (parmn) == ADDR_EXPR);
569       parmn = TREE_OPERAND (parmn, 0);
570     }
571 
572   return call_builtin_fn (callexp, BUILT_IN_VA_START, 2, ap, parmn);
573 }
574 
575 /* Expand a front-end instrinsic call to INTRINSIC, which is either a call to
576    adds(), addu(), subs(), subu(), negs(), muls(), or mulu().  These intrinsics
577    expect to take two or three arguments, the signature to which can be either:
578 
579 	int adds (int x, int y, ref bool overflow);
580 	long adds (long x, long y, ref bool overflow);
581 	int negs (int x, ref bool overflow);
582 	long negs (long x, ref bool overflow);
583 
584    This performs an operation on two signed or unsigned integers, checking for
585    overflow.  The overflow is sticky, meaning that a sequence of operations
586    can be done and overflow need only be checked at the end.  The original call
587    expression is held in CALLEXP.  */
588 
589 static tree
expand_intrinsic_checkedint(intrinsic_code intrinsic,tree callexp)590 expand_intrinsic_checkedint (intrinsic_code intrinsic, tree callexp)
591 {
592   tree type = TREE_TYPE (callexp);
593   tree x;
594   tree y;
595   tree overflow;
596 
597   /* The negs() intrinsic gets turned into SUB_OVERFLOW (0, y).  */
598   if (intrinsic == INTRINSIC_NEGS)
599     {
600       x = fold_convert (type, integer_zero_node);
601       y = CALL_EXPR_ARG (callexp, 0);
602       overflow = CALL_EXPR_ARG (callexp, 1);
603     }
604   else
605     {
606       x = CALL_EXPR_ARG (callexp, 0);
607       y = CALL_EXPR_ARG (callexp, 1);
608       overflow = CALL_EXPR_ARG (callexp, 2);
609     }
610 
611   /* Which variant of *_OVERFLOW should we generate?  */
612   internal_fn icode = (intrinsic == INTRINSIC_ADDS) ? IFN_ADD_OVERFLOW
613     : (intrinsic == INTRINSIC_SUBS) ? IFN_SUB_OVERFLOW
614     : (intrinsic == INTRINSIC_MULS) ? IFN_MUL_OVERFLOW
615     : (intrinsic == INTRINSIC_NEGS) ? IFN_SUB_OVERFLOW
616     : IFN_LAST;
617   gcc_assert (icode != IFN_LAST);
618 
619   tree result
620     = build_call_expr_internal_loc (EXPR_LOCATION (callexp), icode,
621 				    build_complex_type (type), 2, x, y);
622 
623   STRIP_NOPS (overflow);
624   overflow = build_deref (overflow);
625 
626   /* Assign returned result to overflow parameter, however if overflow is
627      already true, maintain its value.  */
628   type = TREE_TYPE (overflow);
629   result = save_expr (result);
630 
631   tree exp = fold_build2 (BIT_IOR_EXPR, type, overflow,
632 			  fold_convert (type, imaginary_part (result)));
633   exp = modify_expr (overflow, exp);
634 
635   /* Return the value of result.  */
636   return compound_expr (exp, real_part (result));
637 }
638 
639 /* Expand a front-end instrinsic call to volatileLoad().  This takes one
640    argument, the signature to which can be either:
641 
642 	ubyte volatileLoad (ubyte* ptr);
643 	ushort volatileLoad (ushort* ptr);
644 	uint volatileLoad (uint* ptr);
645 	ulong volatileLoad (ulong* ptr);
646 
647    This reads a value from the memory location indicated by ptr.  Calls to
648    them are be guaranteed to not be removed (such as during DCE) or reordered
649    in the same thread.  The original call expression is held in CALLEXP.  */
650 
651 static tree
expand_volatile_load(tree callexp)652 expand_volatile_load (tree callexp)
653 {
654   tree ptr = CALL_EXPR_ARG (callexp, 0);
655   tree ptrtype = TREE_TYPE (ptr);
656   gcc_assert (POINTER_TYPE_P (ptrtype));
657 
658   /* (T) *(volatile T *) ptr;  */
659   tree type = build_qualified_type (TREE_TYPE (ptrtype), TYPE_QUAL_VOLATILE);
660   tree result = indirect_ref (type, ptr);
661   TREE_THIS_VOLATILE (result) = 1;
662 
663   return result;
664 }
665 
666 /* Expand a front-end instrinsic call to volatileStore().  This takes two
667    arguments, the signature to which can be either:
668 
669 	void volatileStore (ubyte* ptr, ubyte value);
670 	void volatileStore (ushort* ptr, ushort value);
671 	void volatileStore (uint* ptr, uint value);
672 	void volatileStore (ulong* ptr, ulong value);
673 
674    This writes a value to the memory location indicated by ptr.  Calls to
675    them are be guaranteed to not be removed (such as during DCE) or reordered
676    in the same thread.  The original call expression is held in CALLEXP.  */
677 
678 static tree
expand_volatile_store(tree callexp)679 expand_volatile_store (tree callexp)
680 {
681   tree ptr = CALL_EXPR_ARG (callexp, 0);
682   tree ptrtype = TREE_TYPE (ptr);
683   gcc_assert (POINTER_TYPE_P (ptrtype));
684 
685   /* (T) *(volatile T *) ptr;  */
686   tree type = build_qualified_type (TREE_TYPE (ptrtype), TYPE_QUAL_VOLATILE);
687   tree result = indirect_ref (type, ptr);
688   TREE_THIS_VOLATILE (result) = 1;
689 
690   /* (*(volatile T *) ptr) = value;  */
691   tree value = CALL_EXPR_ARG (callexp, 1);
692   return modify_expr (result, value);
693 }
694 
695 /* If CALLEXP is for an intrinsic , expand and return inlined compiler
696    generated instructions.  Most map directly to GCC builtins, others
697    require a little extra work around them.  */
698 
699 tree
maybe_expand_intrinsic(tree callexp)700 maybe_expand_intrinsic (tree callexp)
701 {
702   tree callee = CALL_EXPR_FN (callexp);
703 
704   if (TREE_CODE (callee) == ADDR_EXPR)
705     callee = TREE_OPERAND (callee, 0);
706 
707   if (TREE_CODE (callee) != FUNCTION_DECL)
708     return callexp;
709 
710   /* Don't expand CTFE-only intrinsics outside of semantic processing.  */
711   if (DECL_BUILT_IN_CTFE (callee) && !doing_semantic_analysis_p)
712     return callexp;
713 
714   intrinsic_code intrinsic = DECL_INTRINSIC_CODE (callee);
715   built_in_function code;
716 
717   switch (intrinsic)
718     {
719     case INTRINSIC_NONE:
720       return callexp;
721 
722     case INTRINSIC_BSF:
723       return expand_intrinsic_bsf (callexp);
724 
725     case INTRINSIC_BSR:
726       return expand_intrinsic_bsr (callexp);
727 
728     case INTRINSIC_BT:
729     case INTRINSIC_BTC:
730     case INTRINSIC_BTR:
731     case INTRINSIC_BTS:
732       return expand_intrinsic_bt (intrinsic, callexp);
733 
734     case INTRINSIC_BSWAP:
735       return expand_intrinsic_bswap (callexp);
736 
737     case INTRINSIC_POPCNT:
738       return expand_intrinsic_popcnt (callexp);
739 
740     case INTRINSIC_COS:
741       return call_builtin_fn (callexp, BUILT_IN_COSL, 1,
742 			      CALL_EXPR_ARG (callexp, 0));
743 
744     case INTRINSIC_SIN:
745       return call_builtin_fn (callexp, BUILT_IN_SINL, 1,
746 			      CALL_EXPR_ARG (callexp, 0));
747 
748     case INTRINSIC_RNDTOL:
749       /* Not sure if llroundl stands as a good replacement for the
750 	 expected behavior of rndtol.  */
751       return call_builtin_fn (callexp, BUILT_IN_LLROUNDL, 1,
752 			      CALL_EXPR_ARG (callexp, 0));
753 
754     case INTRINSIC_SQRT:
755     case INTRINSIC_SQRTF:
756     case INTRINSIC_SQRTL:
757       return expand_intrinsic_sqrt (intrinsic, callexp);
758 
759     case INTRINSIC_LDEXP:
760       return call_builtin_fn (callexp, BUILT_IN_LDEXPL, 2,
761 			      CALL_EXPR_ARG (callexp, 0),
762 			      CALL_EXPR_ARG (callexp, 1));
763 
764     case INTRINSIC_FABS:
765       return call_builtin_fn (callexp, BUILT_IN_FABSL, 1,
766 			      CALL_EXPR_ARG (callexp, 0));
767 
768     case INTRINSIC_RINT:
769       return call_builtin_fn (callexp, BUILT_IN_RINTL, 1,
770 			      CALL_EXPR_ARG (callexp, 0));
771 
772     case INTRINSIC_TAN:
773       return call_builtin_fn (callexp, BUILT_IN_TANL, 1,
774 			      CALL_EXPR_ARG (callexp, 0));
775 
776     case INTRINSIC_ISNAN:
777       return call_builtin_fn (callexp, BUILT_IN_ISNAN, 1,
778 			      CALL_EXPR_ARG (callexp, 0));
779 
780     case INTRINSIC_ISINFINITY:
781       return call_builtin_fn (callexp, BUILT_IN_ISINF, 1,
782 			      CALL_EXPR_ARG (callexp, 0));
783 
784     case INTRINSIC_ISFINITE:
785       return call_builtin_fn (callexp, BUILT_IN_ISFINITE, 1,
786 			      CALL_EXPR_ARG (callexp, 0));
787 
788     case INTRINSIC_EXP:
789       return call_builtin_fn (callexp, BUILT_IN_EXPL, 1,
790 			      CALL_EXPR_ARG (callexp, 0));
791 
792     case INTRINSIC_EXPM1:
793       return call_builtin_fn (callexp, BUILT_IN_EXPM1L, 1,
794 			      CALL_EXPR_ARG (callexp, 0));
795 
796     case INTRINSIC_EXP2:
797       return call_builtin_fn (callexp, BUILT_IN_EXP2L, 1,
798 			      CALL_EXPR_ARG (callexp, 0));
799 
800     case INTRINSIC_LOG:
801       return call_builtin_fn (callexp, BUILT_IN_LOGL, 1,
802 			      CALL_EXPR_ARG (callexp, 0));
803 
804     case INTRINSIC_LOG2:
805       return call_builtin_fn (callexp, BUILT_IN_LOG2L, 1,
806 			      CALL_EXPR_ARG (callexp, 0));
807 
808     case INTRINSIC_LOG10:
809       return call_builtin_fn (callexp, BUILT_IN_LOG10L, 1,
810 			      CALL_EXPR_ARG (callexp, 0));
811 
812     case INTRINSIC_ROUND:
813       return call_builtin_fn (callexp, BUILT_IN_ROUNDL, 1,
814 			      CALL_EXPR_ARG (callexp, 0));
815 
816     case INTRINSIC_FLOORF:
817     case INTRINSIC_FLOOR:
818     case INTRINSIC_FLOORL:
819       code = (intrinsic == INTRINSIC_FLOOR) ? BUILT_IN_FLOOR
820 	: (intrinsic == INTRINSIC_FLOORF) ? BUILT_IN_FLOORF
821 	: BUILT_IN_FLOORL;
822       return call_builtin_fn (callexp, code, 1, CALL_EXPR_ARG (callexp, 0));
823 
824     case INTRINSIC_CEILF:
825     case INTRINSIC_CEIL:
826     case INTRINSIC_CEILL:
827       code = (intrinsic == INTRINSIC_CEIL) ? BUILT_IN_CEIL
828 	: (intrinsic == INTRINSIC_CEILF) ? BUILT_IN_CEILF
829 	: BUILT_IN_CEILL;
830       return call_builtin_fn (callexp, code, 1, CALL_EXPR_ARG (callexp, 0));
831 
832     case INTRINSIC_TRUNC:
833       return call_builtin_fn (callexp, BUILT_IN_TRUNCL, 1,
834 			      CALL_EXPR_ARG (callexp, 0));
835 
836     case INTRINSIC_FMIN:
837       return call_builtin_fn (callexp, BUILT_IN_FMINL, 2,
838 			      CALL_EXPR_ARG (callexp, 0),
839 			      CALL_EXPR_ARG (callexp, 1));
840 
841     case INTRINSIC_FMAX:
842       return call_builtin_fn (callexp, BUILT_IN_FMAXL, 2,
843 			      CALL_EXPR_ARG (callexp, 0),
844 			      CALL_EXPR_ARG (callexp, 1));
845 
846     case INTRINSIC_COPYSIGN:
847       return expand_intrinsic_copysign (callexp);
848 
849     case INTRINSIC_POW:
850       return expand_intrinsic_pow (callexp);
851 
852     case INTRINSIC_FMA:
853       return call_builtin_fn (callexp, BUILT_IN_FMAL, 3,
854 			      CALL_EXPR_ARG (callexp, 0),
855 			      CALL_EXPR_ARG (callexp, 1),
856 			      CALL_EXPR_ARG (callexp, 2));
857 
858     case INTRINSIC_TOPREC:
859       return expand_intrinsic_toprec (callexp);
860 
861     case INTRINSIC_VA_ARG:
862     case INTRINSIC_C_VA_ARG:
863       return expand_intrinsic_vaarg (callexp);
864 
865     case INTRINSIC_VASTART:
866       return expand_intrinsic_vastart (callexp);
867 
868     case INTRINSIC_ADDS:
869     case INTRINSIC_SUBS:
870     case INTRINSIC_MULS:
871     case INTRINSIC_NEGS:
872       return expand_intrinsic_checkedint (intrinsic, callexp);
873 
874     case INTRINSIC_VLOAD:
875       return expand_volatile_load (callexp);
876 
877     case INTRINSIC_VSTORE:
878       return expand_volatile_store (callexp);
879 
880     default:
881       gcc_unreachable ();
882     }
883 }
884