xref: /netbsd/external/gpl3/gcc/dist/gcc/d/intrinsics.cc (revision f0fbc68b)
1 /* intrinsics.cc -- D language compiler intrinsics.
2    Copyright (C) 2006-2022 Free Software Foundation, Inc.
3 
4 GCC is free software; you can redistribute it and/or modify
5 it under the terms of the GNU General Public License as published by
6 the Free Software Foundation; either version 3, or (at your option)
7 any later version.
8 
9 GCC is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12 GNU General Public License for more details.
13 
14 You should have received a copy of the GNU General Public License
15 along with GCC; see the file COPYING3.  If not see
16 <http://www.gnu.org/licenses/>.  */
17 
18 #include "config.h"
19 #include "system.h"
20 #include "coretypes.h"
21 
22 #include "dmd/declaration.h"
23 #include "dmd/expression.h"
24 #include "dmd/identifier.h"
25 #include "dmd/mangle.h"
26 #include "dmd/module.h"
27 #include "dmd/template.h"
28 
29 #include "tm.h"
30 #include "function.h"
31 #include "tree.h"
32 #include "fold-const.h"
33 #include "stringpool.h"
34 #include "builtins.h"
35 
36 #include "d-tree.h"
37 
38 
39 /* An internal struct used to hold information on D intrinsics.  */
40 
41 struct intrinsic_decl
42 {
43   /* The DECL_INTRINSIC_CODE of this decl.  */
44   intrinsic_code code;
45 
46   /* The DECL_FUNCTION_CODE of this decl, if it directly maps to any.  */
47   built_in_function built_in;
48 
49   /* The name of the intrinsic.  */
50   const char *name;
51 
52   /* The module where the intrinsic is located.  */
53   const char *module;
54 
55   /* The mangled signature decoration of the intrinsic.  */
56   const char *deco;
57 
58   /* True if the intrinsic is only handled in CTFE.  */
59   bool ctfeonly;
60 };
61 
62 static const intrinsic_decl intrinsic_decls[] =
63 {
64 #define DEF_D_INTRINSIC(CODE, BUILTIN, NAME, MODULE, DECO, CTFE) \
65     { CODE, BUILTIN, NAME, MODULE, DECO, CTFE },
66 
67 #include "intrinsics.def"
68 
69 #undef DEF_D_INTRINSIC
70 };
71 
72 /* Checks if DECL is an intrinsic or run time library function that requires
73    special processing.  Sets DECL_INTRINSIC_CODE so it can be identified
74    later in maybe_expand_intrinsic.  */
75 
76 void
maybe_set_intrinsic(FuncDeclaration * decl)77 maybe_set_intrinsic (FuncDeclaration *decl)
78 {
79   if (!decl->ident || decl->builtin != BUILTIN::unknown)
80     return;
81 
82   /* The builtin flag is updated only if we can evaluate the intrinsic
83      at compile-time.  Such as the math or bitop intrinsics.  */
84   decl->builtin = BUILTIN::unimp;
85 
86   /* Check if it's a compiler intrinsic.  We only require that any
87      internally recognised intrinsics are declared in a module with
88      an explicit module declaration.  */
89   Module *m = decl->getModule ();
90 
91   if (!m || !m->md)
92     return;
93 
94   TemplateInstance *ti = decl->isInstantiated ();
95   TemplateDeclaration *td = ti ? ti->tempdecl->isTemplateDeclaration () : NULL;
96 
97   const char *tname = decl->ident->toChars ();
98   const char *tmodule = m->md->toChars ();
99   const char *tdeco = (td == NULL) ? decl->type->deco : NULL;
100 
101   /* Look through all D intrinsics.  */
102   for (size_t i = 0; i < (int) INTRINSIC_LAST; i++)
103     {
104       if (!intrinsic_decls[i].name)
105 	continue;
106 
107       if (strcmp (intrinsic_decls[i].name, tname) != 0
108 	  || strcmp (intrinsic_decls[i].module, tmodule) != 0)
109 	continue;
110 
111       /* Instantiated functions would have the wrong type deco, get it from the
112 	 template member instead.  */
113       if (tdeco == NULL)
114 	{
115 	  if (!td || !td->onemember)
116 	    return;
117 
118 	  FuncDeclaration *fd = td->onemember->isFuncDeclaration ();
119 	  if (fd == NULL)
120 	    return;
121 
122 	  OutBuffer buf;
123 	  mangleToBuffer (fd->type, &buf);
124 	  tdeco = buf.extractChars ();
125 	}
126 
127       /* Matching the type deco may be a bit too strict, as it means that all
128 	 function attributes that end up in the signature must be kept aligned
129 	 between the compiler and library declaration.  */
130       if (strcmp (intrinsic_decls[i].deco, tdeco) == 0)
131 	{
132 	  intrinsic_code code = intrinsic_decls[i].code;
133 
134 	  if (decl->csym == NULL)
135 	    get_symbol_decl (decl);
136 
137 	  /* If there is no function body, then the implementation is always
138 	     provided by the compiler.  */
139 	  if (!decl->fbody)
140 	    set_decl_built_in_function (decl->csym, BUILT_IN_FRONTEND, code);
141 
142 	  /* Infer whether the intrinsic can be used for CTFE, let the
143 	     front-end know that it can be evaluated at compile-time.  */
144 	  switch (code)
145 	    {
146 	    case INTRINSIC_VA_ARG:
147 	    case INTRINSIC_C_VA_ARG:
148 	    case INTRINSIC_VASTART:
149 	    case INTRINSIC_ADDS:
150 	    case INTRINSIC_ADDSL:
151 	    case INTRINSIC_ADDU:
152 	    case INTRINSIC_ADDUL:
153 	    case INTRINSIC_SUBS:
154 	    case INTRINSIC_SUBSL:
155 	    case INTRINSIC_SUBU:
156 	    case INTRINSIC_SUBUL:
157 	    case INTRINSIC_MULS:
158 	    case INTRINSIC_MULSL:
159 	    case INTRINSIC_MULU:
160 	    case INTRINSIC_MULUI:
161 	    case INTRINSIC_MULUL:
162 	    case INTRINSIC_NEGS:
163 	    case INTRINSIC_NEGSL:
164 	    case INTRINSIC_VLOAD8:
165 	    case INTRINSIC_VLOAD16:
166 	    case INTRINSIC_VLOAD32:
167 	    case INTRINSIC_VLOAD64:
168 	    case INTRINSIC_VSTORE8:
169 	    case INTRINSIC_VSTORE16:
170 	    case INTRINSIC_VSTORE32:
171 	    case INTRINSIC_VSTORE64:
172 	      break;
173 
174 	    case INTRINSIC_POW:
175 	    {
176 	      /* Check that this overload of pow() is has an equivalent
177 		 built-in function.  It could be `int pow(int, int)'.  */
178 	      tree rettype = TREE_TYPE (TREE_TYPE (decl->csym));
179 	      if (mathfn_built_in (rettype, BUILT_IN_POW) != NULL_TREE)
180 		decl->builtin = BUILTIN::gcc;
181 	      break;
182 	    }
183 
184 	    default:
185 	      decl->builtin = BUILTIN::gcc;
186 	      break;
187 	    }
188 
189 	  /* The intrinsic was marked as CTFE-only.  */
190 	  if (intrinsic_decls[i].ctfeonly)
191 	    DECL_BUILT_IN_CTFE (decl->csym) = 1;
192 
193 	  DECL_INTRINSIC_CODE (decl->csym) = code;
194 	  break;
195 	}
196     }
197 }
198 
199 /* Construct a function call to the built-in function CODE, N is the number of
200    arguments, and the `...' parameters are the argument expressions.
201    The original call expression is held in CALLEXP.  */
202 
203 static tree
call_builtin_fn(tree callexp,built_in_function code,int n,...)204 call_builtin_fn (tree callexp, built_in_function code, int n, ...)
205 {
206   tree *argarray = XALLOCAVEC (tree, n);
207   va_list ap;
208 
209   va_start (ap, n);
210   for (int i = 0; i < n; i++)
211     argarray[i] = va_arg (ap, tree);
212   va_end (ap);
213 
214   tree exp = build_call_expr_loc_array (EXPR_LOCATION (callexp),
215 					builtin_decl_explicit (code),
216 					n, argarray);
217   return convert (TREE_TYPE (callexp), fold (exp));
218 }
219 
220 /* Expand a front-end instrinsic call to bsf().  This takes one argument,
221    the signature to which can be either:
222 
223 	int bsf (uint arg);
224 	int bsf (ulong arg);
225 
226    This scans all bits in the given argument starting with the first,
227    returning the bit number of the first bit set.  The original call
228    expression is held in CALLEXP.  */
229 
230 static tree
expand_intrinsic_bsf(tree callexp)231 expand_intrinsic_bsf (tree callexp)
232 {
233   /* The bsr() intrinsic gets turned into __builtin_ctz(arg).
234      The return value is supposed to be undefined if arg is zero.  */
235   tree arg = CALL_EXPR_ARG (callexp, 0);
236   int argsize = TYPE_PRECISION (TREE_TYPE (arg));
237 
238   /* Which variant of __builtin_ctz* should we call?  */
239   built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_CTZ
240     : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_CTZL
241     : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_CTZLL
242     : END_BUILTINS;
243 
244   gcc_assert (code != END_BUILTINS);
245 
246   return call_builtin_fn (callexp, code, 1, arg);
247 }
248 
249 /* Expand a front-end instrinsic call to bsr().  This takes one argument,
250    the signature to which can be either:
251 
252 	int bsr (uint arg);
253 	int bsr (ulong arg);
254 
255    This scans all bits in the given argument from the most significant bit
256    to the least significant, returning the bit number of the first bit set.
257    The original call expression is held in CALLEXP.  */
258 
259 static tree
expand_intrinsic_bsr(tree callexp)260 expand_intrinsic_bsr (tree callexp)
261 {
262   /* The bsr() intrinsic gets turned into (size - 1) - __builtin_clz(arg).
263      The return value is supposed to be undefined if arg is zero.  */
264   tree arg = CALL_EXPR_ARG (callexp, 0);
265   tree type = TREE_TYPE (arg);
266   int argsize = TYPE_PRECISION (type);
267 
268   /* Which variant of __builtin_clz* should we call?  */
269   built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_CLZ
270     : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_CLZL
271     : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_CLZLL
272     : END_BUILTINS;
273 
274   gcc_assert (code != END_BUILTINS);
275 
276   tree result = call_builtin_fn (callexp, code, 1, arg);
277 
278   /* Handle int -> long conversions.  */
279   if (TREE_TYPE (result) != type)
280     result = fold_convert (type, result);
281 
282   result = fold_build2 (MINUS_EXPR, type,
283 			build_integer_cst (argsize - 1, type), result);
284   return fold_convert (TREE_TYPE (callexp), result);
285 }
286 
287 /* Expand a front-end intrinsic call to INTRINSIC, which is either a call to
288    bt(), btc(), btr(), or bts().  These intrinsics expect to take two arguments,
289    the signature to which is:
290 
291 	int bt (size_t* ptr, size_t bitnum);
292 
293    All intrinsics test if a bit is set and return the result of that condition.
294    Variants of `bt' will then update that bit. `btc' compliments the bit, `bts'
295    sets the bit, and `btr' resets the bit.  The original call expression is
296    held in CALLEXP.  */
297 
298 static tree
expand_intrinsic_bt(intrinsic_code intrinsic,tree callexp)299 expand_intrinsic_bt (intrinsic_code intrinsic, tree callexp)
300 {
301   tree ptr = CALL_EXPR_ARG (callexp, 0);
302   tree bitnum = CALL_EXPR_ARG (callexp, 1);
303   tree type = TREE_TYPE (TREE_TYPE (ptr));
304 
305   /* size_t bitsize = sizeof(*ptr) * BITS_PER_UNIT;  */
306   tree bitsize = fold_convert (type, TYPE_SIZE (TREE_TYPE (ptr)));
307 
308   /* ptr[bitnum / bitsize]  */
309   ptr = build_array_index (ptr, fold_build2 (TRUNC_DIV_EXPR, type,
310 					     bitnum, bitsize));
311   ptr = indirect_ref (type, ptr);
312 
313   /* mask = 1 << (bitnum % bitsize);  */
314   bitnum = fold_build2 (TRUNC_MOD_EXPR, type, bitnum, bitsize);
315   bitnum = fold_build2 (LSHIFT_EXPR, type, build_one_cst (type), bitnum);
316 
317   /* cond = ptr[bitnum / size] & mask;  */
318   tree cond = fold_build2 (BIT_AND_EXPR, type, ptr, bitnum);
319 
320   /* cond ? -1 : 0;  */
321   cond = build_condition (TREE_TYPE (callexp), d_truthvalue_conversion (cond),
322 			  build_minus_one_cst (TREE_TYPE (callexp)),
323 			  build_zero_cst (TREE_TYPE (callexp)));
324 
325   /* Update the bit as needed, only testing the bit for bt().  */
326   tree_code code;
327 
328   switch (intrinsic)
329     {
330     case INTRINSIC_BT:
331     case INTRINSIC_BT64:
332       return cond;
333 
334     case INTRINSIC_BTC:
335     case INTRINSIC_BTC64:
336       code = BIT_XOR_EXPR;
337       break;
338 
339     case INTRINSIC_BTR:
340     case INTRINSIC_BTR64:
341       bitnum = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (bitnum), bitnum);
342       code = BIT_AND_EXPR;
343       break;
344 
345     case INTRINSIC_BTS:
346     case INTRINSIC_BTS64:
347       code = BIT_IOR_EXPR;
348       break;
349 
350     default:
351       gcc_unreachable ();
352     }
353 
354   /* ptr[bitnum / size] op= mask;  */
355   ptr = modify_expr (ptr, fold_build2 (code, TREE_TYPE (ptr), ptr, bitnum));
356 
357   /* Store the condition result in a temporary, and return expressions in
358      correct order of evaluation.  */
359   tree tmp = build_local_temp (TREE_TYPE (callexp));
360   cond = modify_expr (tmp, cond);
361 
362   return compound_expr (cond, compound_expr (ptr, tmp));
363 }
364 
365 /* Expand a front-end intrinsic call to popcnt().  This takes one argument, the
366    signature to which can be either:
367 
368 	int popcnt (uint arg);
369 	int popcnt (ulong arg);
370 
371    Calculates the number of set bits in an integer.  The original call
372    expression is held in CALLEXP.  */
373 
374 static tree
expand_intrinsic_popcnt(tree callexp)375 expand_intrinsic_popcnt (tree callexp)
376 {
377   tree arg = CALL_EXPR_ARG (callexp, 0);
378   int argsize = TYPE_PRECISION (TREE_TYPE (arg));
379 
380   /* Which variant of __builtin_popcount* should we call?  */
381   built_in_function code = (argsize <= INT_TYPE_SIZE) ? BUILT_IN_POPCOUNT
382     : (argsize <= LONG_TYPE_SIZE) ? BUILT_IN_POPCOUNTL
383     : (argsize <= LONG_LONG_TYPE_SIZE) ? BUILT_IN_POPCOUNTLL
384     : END_BUILTINS;
385 
386   gcc_assert (code != END_BUILTINS);
387 
388   return call_builtin_fn (callexp, code, 1, arg);
389 }
390 
391 /* Expand a front-end intrinsic call to INTRINSIC, which is either a call to
392    rol() or ror().  These intrinsics expect to take one or two arguments,
393    the signature to which can be either:
394 
395 	T rol(T) (const T value, const uint count);
396 	T rol(uint count, T) (const T value);
397 	T ror(T) (const T value, const uint count);
398 	T ror(uint count, T) (const T value);
399 
400    This bitwise rotates VALUE left or right by COUNT bit positions.  */
401 
402 static tree
expand_intrinsic_rotate(intrinsic_code intrinsic,tree callexp)403 expand_intrinsic_rotate (intrinsic_code intrinsic, tree callexp)
404 {
405   tree type = TREE_TYPE (callexp);
406   tree value = CALL_EXPR_ARG (callexp, 0);
407   tree count;
408   tree_code code;
409 
410   /* Get the equivalent tree code for the intrinsic.  */
411   if (intrinsic == INTRINSIC_ROL || intrinsic == INTRINSIC_ROL_TIARG)
412     code = LROTATE_EXPR;
413   else if (intrinsic == INTRINSIC_ROR || intrinsic == INTRINSIC_ROR_TIARG)
414     code = RROTATE_EXPR;
415   else
416     gcc_unreachable ();
417 
418   /* Get the COUNT parameter.  Either from the call expression arguments or the
419      template instantiation arguments.  */
420   if (intrinsic == INTRINSIC_ROL || intrinsic == INTRINSIC_ROR)
421     count = CALL_EXPR_ARG (callexp, 1);
422   else
423     {
424       tree callee = CALL_EXPR_FN (callexp);
425 
426       if (TREE_CODE (callee) == ADDR_EXPR)
427 	callee = TREE_OPERAND (callee, 0);
428 
429       /* Retrieve from the encoded template instantation.  */
430       TemplateInstance *ti = DECL_LANG_FRONTEND (callee)->isInstantiated ();
431       gcc_assert (ti && ti->tiargs && ti->tiargs->length == 2);
432 
433       Expression *e = isExpression ((*ti->tiargs)[0]);
434       gcc_assert (e && e->op == EXP::int64);
435       count = build_expr (e, true);
436     }
437 
438   return fold_build2 (code, type, value, count);
439 }
440 
441 /* Expand a front-end intrinsic call to copysign().  This takes two arguments,
442    the signature to which can be either:
443 
444 	float copysign (T to, float from);
445 	double copysign (T to, double from);
446 	real copysign (T to, real from);
447 
448    This computes a value composed of TO with the sign bit of FROM.  The original
449    call expression is held in CALLEXP.  */
450 
451 static tree
expand_intrinsic_copysign(tree callexp)452 expand_intrinsic_copysign (tree callexp)
453 {
454   tree to = CALL_EXPR_ARG (callexp, 0);
455   tree from = CALL_EXPR_ARG (callexp, 1);
456   tree type = TREE_TYPE (to);
457 
458   /* Convert parameters to the same type.  Prefer the first parameter unless it
459      is an integral type.  */
460   if (INTEGRAL_TYPE_P (type))
461     {
462       to = fold_convert (TREE_TYPE (from), to);
463       type = TREE_TYPE (to);
464     }
465   else
466     from = fold_convert (type, from);
467 
468   /* Which variant of __builtin_copysign* should we call?  */
469   built_in_function code = (type == float_type_node) ? BUILT_IN_COPYSIGNF
470     : (type == double_type_node) ? BUILT_IN_COPYSIGN
471     : (type == long_double_type_node) ? BUILT_IN_COPYSIGNL
472     : END_BUILTINS;
473 
474   gcc_assert (code != END_BUILTINS);
475 
476   return call_builtin_fn (callexp, code, 2, to, from);
477 }
478 
479 /* Expand a front-end intrinsic call to pow().  This takes two arguments, the
480    signature to which can be either:
481 
482 	float pow (float base, T exponent);
483 	double pow (double base, T exponent);
484 	real pow (real base, T exponent);
485 
486    This computes the value of BASE raised to the power of EXPONENT.
487    The original call expression is held in CALLEXP.  */
488 
489 static tree
expand_intrinsic_pow(tree callexp)490 expand_intrinsic_pow (tree callexp)
491 {
492   tree base = CALL_EXPR_ARG (callexp, 0);
493   tree exponent = CALL_EXPR_ARG (callexp, 1);
494   tree exptype = TREE_TYPE (exponent);
495 
496   /* Which variant of __builtin_pow* should we call?  */
497   built_in_function code = SCALAR_FLOAT_TYPE_P (exptype) ? BUILT_IN_POW
498     : INTEGRAL_TYPE_P (exptype) ? BUILT_IN_POWI
499     : END_BUILTINS;
500   gcc_assert (code != END_BUILTINS);
501 
502   tree builtin = mathfn_built_in (TREE_TYPE (base), code);
503   gcc_assert (builtin != NULL_TREE);
504 
505   return call_builtin_fn (callexp, DECL_FUNCTION_CODE (builtin), 2,
506 			  base, exponent);
507 }
508 
509 /* Expand a front-end intrinsic call to toPrec().  This takes one argument, the
510    signature to which can be either:
511 
512 	T toPrec(T)(float f);
513 	T toPrec(T)(double f);
514 	T toPrec(T)(real f);
515 
516     This rounds the argument F to the precision of the specified floating
517     point type T.  The original call expression is held in CALLEXP.  */
518 
519 static tree
expand_intrinsic_toprec(tree callexp)520 expand_intrinsic_toprec (tree callexp)
521 {
522   tree f = CALL_EXPR_ARG (callexp, 0);
523   tree type = TREE_TYPE (callexp);
524 
525   return convert (type, f);
526 }
527 
528 /* Expand a front-end intrinsic call to va_arg().  This takes either one or two
529    arguments, the signature to which can be either:
530 
531 	T va_arg(T) (ref va_list ap);
532 	void va_arg(T) (va_list ap, ref T parmn);
533 
534    This retrieves the next variadic parameter that is type T from the given
535    va_list.  If also given, store the value into parmn, otherwise return it.
536    The original call expression is held in CALLEXP.  */
537 
538 static tree
expand_intrinsic_vaarg(tree callexp)539 expand_intrinsic_vaarg (tree callexp)
540 {
541   tree ap = CALL_EXPR_ARG (callexp, 0);
542   tree parmn = NULL_TREE;
543   tree type;
544 
545   STRIP_NOPS (ap);
546 
547   if (call_expr_nargs (callexp) == 1)
548     type = TREE_TYPE (callexp);
549   else
550     {
551       parmn = CALL_EXPR_ARG (callexp, 1);
552       STRIP_NOPS (parmn);
553 
554       /* The `ref' argument to va_arg is either an address or reference,
555 	 get the value of it.  */
556       if (TREE_CODE (parmn) == PARM_DECL && POINTER_TYPE_P (TREE_TYPE (parmn)))
557 	parmn = build_deref (parmn);
558       else
559 	{
560 	  gcc_assert (TREE_CODE (parmn) == ADDR_EXPR);
561 	  parmn = TREE_OPERAND (parmn, 0);
562 	}
563 
564       type = TREE_TYPE (parmn);
565     }
566 
567   /* (T) VA_ARG_EXP<ap>;  */
568   tree exp = build1_loc (EXPR_LOCATION (callexp), VA_ARG_EXPR, type, ap);
569 
570   /* parmn = (T) VA_ARG_EXP<ap>;  */
571   if (parmn != NULL_TREE)
572     exp = modify_expr (parmn, exp);
573 
574   return exp;
575 }
576 
577 /* Expand a front-end intrinsic call to va_start(), which takes two arguments,
578    the signature to which is:
579 
580 	void va_start(T) (out va_list ap, ref T parmn);
581 
582    This initializes the va_list type, where parmn should be the last named
583    parameter.  The original call expression is held in CALLEXP.  */
584 
585 static tree
expand_intrinsic_vastart(tree callexp)586 expand_intrinsic_vastart (tree callexp)
587 {
588   tree ap = CALL_EXPR_ARG (callexp, 0);
589   tree parmn = CALL_EXPR_ARG (callexp, 1);
590 
591   STRIP_NOPS (ap);
592   STRIP_NOPS (parmn);
593 
594   /* The va_list argument should already have its address taken.  The second
595      argument, however, is inout and that needs to be fixed to prevent a
596      warning.  Could be casting, so need to check type too?  */
597   gcc_assert (TREE_CODE (ap) == ADDR_EXPR
598 	      || (TREE_CODE (ap) == PARM_DECL
599 		  && POINTER_TYPE_P (TREE_TYPE (ap))));
600 
601   /* Assuming nobody tries to change the return type.  */
602   if (TREE_CODE (parmn) != PARM_DECL)
603     {
604       gcc_assert (TREE_CODE (parmn) == ADDR_EXPR);
605       parmn = TREE_OPERAND (parmn, 0);
606     }
607 
608   return call_builtin_fn (callexp, BUILT_IN_VA_START, 2, ap, parmn);
609 }
610 
611 /* Expand a front-end instrinsic call to INTRINSIC, which is either a call to
612    adds(), addu(), subs(), subu(), negs(), muls(), or mulu().  These intrinsics
613    expect to take two or three arguments, the signature to which can be either:
614 
615 	int adds (int x, int y, ref bool overflow);
616 	long adds (long x, long y, ref bool overflow);
617 	int negs (int x, ref bool overflow);
618 	long negs (long x, ref bool overflow);
619 
620    This performs an operation on two signed or unsigned integers, checking for
621    overflow.  The overflow is sticky, meaning that a sequence of operations
622    can be done and overflow need only be checked at the end.  The original call
623    expression is held in CALLEXP.  */
624 
625 static tree
expand_intrinsic_checkedint(intrinsic_code intrinsic,tree callexp)626 expand_intrinsic_checkedint (intrinsic_code intrinsic, tree callexp)
627 {
628   tree type = TREE_TYPE (callexp);
629   tree x;
630   tree y;
631   tree overflow;
632   internal_fn icode;
633 
634   /* Which variant of *_OVERFLOW should we generate?  */
635   switch (intrinsic)
636     {
637     case INTRINSIC_ADDS:
638     case INTRINSIC_ADDSL:
639     case INTRINSIC_ADDU:
640     case INTRINSIC_ADDUL:
641       x = CALL_EXPR_ARG (callexp, 0);
642       y = CALL_EXPR_ARG (callexp, 1);
643       overflow = CALL_EXPR_ARG (callexp, 2);
644       icode = IFN_ADD_OVERFLOW;
645       break;
646 
647     case INTRINSIC_SUBS:
648     case INTRINSIC_SUBSL:
649     case INTRINSIC_SUBU:
650     case INTRINSIC_SUBUL:
651       x = CALL_EXPR_ARG (callexp, 0);
652       y = CALL_EXPR_ARG (callexp, 1);
653       overflow = CALL_EXPR_ARG (callexp, 2);
654       icode = IFN_SUB_OVERFLOW;
655       break;
656 
657     case INTRINSIC_MULS:
658     case INTRINSIC_MULSL:
659     case INTRINSIC_MULU:
660     case INTRINSIC_MULUI:
661     case INTRINSIC_MULUL:
662       x = CALL_EXPR_ARG (callexp, 0);
663       y = CALL_EXPR_ARG (callexp, 1);
664       overflow = CALL_EXPR_ARG (callexp, 2);
665       icode = IFN_MUL_OVERFLOW;
666       break;
667 
668     case INTRINSIC_NEGS:
669     case INTRINSIC_NEGSL:
670       /* The negs() intrinsic gets turned into SUB_OVERFLOW (0, y).  */
671       x = fold_convert (type, integer_zero_node);
672       y = CALL_EXPR_ARG (callexp, 0);
673       overflow = CALL_EXPR_ARG (callexp, 1);
674       icode = IFN_SUB_OVERFLOW;
675       break;
676 
677     default:
678       gcc_unreachable ();
679     }
680 
681   tree result
682     = build_call_expr_internal_loc (EXPR_LOCATION (callexp), icode,
683 				    build_complex_type (type), 2, x, y);
684 
685   STRIP_NOPS (overflow);
686   overflow = build_deref (overflow);
687 
688   /* Assign returned result to overflow parameter, however if overflow is
689      already true, maintain its value.  */
690   type = TREE_TYPE (overflow);
691   result = save_expr (result);
692 
693   tree exp = fold_build2 (BIT_IOR_EXPR, type, overflow,
694 			  fold_convert (type, imaginary_part (result)));
695   exp = modify_expr (overflow, exp);
696 
697   /* Return the value of result.  */
698   return compound_expr (exp, real_part (result));
699 }
700 
701 /* Expand a front-end instrinsic call to volatileLoad().  This takes one
702    argument, the signature to which can be either:
703 
704 	ubyte volatileLoad (ubyte* ptr);
705 	ushort volatileLoad (ushort* ptr);
706 	uint volatileLoad (uint* ptr);
707 	ulong volatileLoad (ulong* ptr);
708 
709    This reads a value from the memory location indicated by ptr.  Calls to
710    them are be guaranteed to not be removed (such as during DCE) or reordered
711    in the same thread.  The original call expression is held in CALLEXP.  */
712 
713 static tree
expand_volatile_load(tree callexp)714 expand_volatile_load (tree callexp)
715 {
716   tree ptr = CALL_EXPR_ARG (callexp, 0);
717   tree ptrtype = TREE_TYPE (ptr);
718   gcc_assert (POINTER_TYPE_P (ptrtype));
719 
720   /* (T) *(volatile T *) ptr;  */
721   tree type = build_qualified_type (TREE_TYPE (ptrtype), TYPE_QUAL_VOLATILE);
722   tree result = indirect_ref (type, ptr);
723   TREE_THIS_VOLATILE (result) = 1;
724 
725   return result;
726 }
727 
728 /* Expand a front-end instrinsic call to volatileStore().  This takes two
729    arguments, the signature to which can be either:
730 
731 	void volatileStore (ubyte* ptr, ubyte value);
732 	void volatileStore (ushort* ptr, ushort value);
733 	void volatileStore (uint* ptr, uint value);
734 	void volatileStore (ulong* ptr, ulong value);
735 
736    This writes a value to the memory location indicated by ptr.  Calls to
737    them are be guaranteed to not be removed (such as during DCE) or reordered
738    in the same thread.  The original call expression is held in CALLEXP.  */
739 
740 static tree
expand_volatile_store(tree callexp)741 expand_volatile_store (tree callexp)
742 {
743   tree ptr = CALL_EXPR_ARG (callexp, 0);
744   tree ptrtype = TREE_TYPE (ptr);
745   gcc_assert (POINTER_TYPE_P (ptrtype));
746 
747   /* (T) *(volatile T *) ptr;  */
748   tree type = build_qualified_type (TREE_TYPE (ptrtype), TYPE_QUAL_VOLATILE);
749   tree result = indirect_ref (type, ptr);
750   TREE_THIS_VOLATILE (result) = 1;
751 
752   /* (*(volatile T *) ptr) = value;  */
753   tree value = CALL_EXPR_ARG (callexp, 1);
754   return modify_expr (result, value);
755 }
756 
757 /* If CALLEXP is for an intrinsic , expand and return inlined compiler
758    generated instructions.  Most map directly to GCC builtins, others
759    require a little extra work around them.  */
760 
761 tree
maybe_expand_intrinsic(tree callexp)762 maybe_expand_intrinsic (tree callexp)
763 {
764   tree callee = CALL_EXPR_FN (callexp);
765 
766   if (TREE_CODE (callee) == ADDR_EXPR)
767     callee = TREE_OPERAND (callee, 0);
768 
769   if (TREE_CODE (callee) != FUNCTION_DECL)
770     return callexp;
771 
772   /* Don't expand CTFE-only intrinsics outside of semantic processing.  */
773   if (DECL_BUILT_IN_CTFE (callee) && !doing_semantic_analysis_p)
774     return callexp;
775 
776   intrinsic_code intrinsic = DECL_INTRINSIC_CODE (callee);
777   built_in_function code;
778 
779   switch (intrinsic)
780     {
781     case INTRINSIC_NONE:
782       return callexp;
783 
784     case INTRINSIC_BSF:
785     case INTRINSIC_BSF64:
786       return expand_intrinsic_bsf (callexp);
787 
788     case INTRINSIC_BSR:
789     case INTRINSIC_BSR64:
790       return expand_intrinsic_bsr (callexp);
791 
792     case INTRINSIC_BT:
793     case INTRINSIC_BT64:
794     case INTRINSIC_BTC:
795     case INTRINSIC_BTC64:
796     case INTRINSIC_BTR:
797     case INTRINSIC_BTR64:
798     case INTRINSIC_BTS:
799     case INTRINSIC_BTS64:
800       return expand_intrinsic_bt (intrinsic, callexp);
801 
802     case INTRINSIC_POPCNT32:
803     case INTRINSIC_POPCNT64:
804       return expand_intrinsic_popcnt (callexp);
805 
806     case INTRINSIC_ROL:
807     case INTRINSIC_ROL_TIARG:
808     case INTRINSIC_ROR:
809     case INTRINSIC_ROR_TIARG:
810       return expand_intrinsic_rotate (intrinsic, callexp);
811 
812     case INTRINSIC_BSWAP16:
813     case INTRINSIC_BSWAP32:
814     case INTRINSIC_BSWAP64:
815     case INTRINSIC_CEIL:
816     case INTRINSIC_CEILF:
817     case INTRINSIC_CEILL:
818     case INTRINSIC_COS:
819     case INTRINSIC_COSF:
820     case INTRINSIC_COSL:
821     case INTRINSIC_EXP:
822     case INTRINSIC_EXP2:
823     case INTRINSIC_EXPM1:
824     case INTRINSIC_FABS:
825     case INTRINSIC_FABSF:
826     case INTRINSIC_FABSL:
827     case INTRINSIC_FLOOR:
828     case INTRINSIC_FLOORF:
829     case INTRINSIC_FLOORL:
830     case INTRINSIC_ISFINITE:
831     case INTRINSIC_ISINFINITY:
832     case INTRINSIC_ISNAN:
833     case INTRINSIC_LOG:
834     case INTRINSIC_LOG10:
835     case INTRINSIC_LOG2:
836     case INTRINSIC_RINT:
837     case INTRINSIC_RINTF:
838     case INTRINSIC_RINTL:
839     case INTRINSIC_RNDTOL:
840     case INTRINSIC_RNDTOLF:
841     case INTRINSIC_RNDTOLL:
842     case INTRINSIC_ROUND:
843     case INTRINSIC_SIN:
844     case INTRINSIC_SINF:
845     case INTRINSIC_SINL:
846     case INTRINSIC_SQRT:
847     case INTRINSIC_SQRTF:
848     case INTRINSIC_SQRTL:
849     case INTRINSIC_TAN:
850     case INTRINSIC_TRUNC:
851       code = intrinsic_decls[intrinsic].built_in;
852       gcc_assert (code != BUILT_IN_NONE);
853       return call_builtin_fn (callexp, code, 1,
854 			      CALL_EXPR_ARG (callexp, 0));
855 
856     case INTRINSIC_FMAX:
857     case INTRINSIC_FMIN:
858     case INTRINSIC_LDEXP:
859     case INTRINSIC_LDEXPF:
860     case INTRINSIC_LDEXPL:
861       code = intrinsic_decls[intrinsic].built_in;
862       gcc_assert (code != BUILT_IN_NONE);
863       return call_builtin_fn (callexp, code, 2,
864 			      CALL_EXPR_ARG (callexp, 0),
865 			      CALL_EXPR_ARG (callexp, 1));
866 
867     case INTRINSIC_FMA:
868       code = intrinsic_decls[intrinsic].built_in;
869       gcc_assert (code != BUILT_IN_NONE);
870       return call_builtin_fn (callexp, code, 3,
871 			      CALL_EXPR_ARG (callexp, 0),
872 			      CALL_EXPR_ARG (callexp, 1),
873 			      CALL_EXPR_ARG (callexp, 2));
874 
875     case INTRINSIC_COPYSIGN:
876     case INTRINSIC_COPYSIGNI:
877       return expand_intrinsic_copysign (callexp);
878 
879     case INTRINSIC_POW:
880       return expand_intrinsic_pow (callexp);
881 
882     case INTRINSIC_TOPREC:
883     case INTRINSIC_TOPRECF:
884     case INTRINSIC_TOPRECL:
885       return expand_intrinsic_toprec (callexp);
886 
887     case INTRINSIC_VA_ARG:
888     case INTRINSIC_C_VA_ARG:
889       return expand_intrinsic_vaarg (callexp);
890 
891     case INTRINSIC_VASTART:
892       return expand_intrinsic_vastart (callexp);
893 
894     case INTRINSIC_ADDS:
895     case INTRINSIC_ADDSL:
896     case INTRINSIC_ADDU:
897     case INTRINSIC_ADDUL:
898     case INTRINSIC_SUBS:
899     case INTRINSIC_SUBSL:
900     case INTRINSIC_SUBU:
901     case INTRINSIC_SUBUL:
902     case INTRINSIC_MULS:
903     case INTRINSIC_MULSL:
904     case INTRINSIC_MULU:
905     case INTRINSIC_MULUI:
906     case INTRINSIC_MULUL:
907     case INTRINSIC_NEGS:
908     case INTRINSIC_NEGSL:
909       return expand_intrinsic_checkedint (intrinsic, callexp);
910 
911     case INTRINSIC_VLOAD8:
912     case INTRINSIC_VLOAD16:
913     case INTRINSIC_VLOAD32:
914     case INTRINSIC_VLOAD64:
915       return expand_volatile_load (callexp);
916 
917     case INTRINSIC_VSTORE8:
918     case INTRINSIC_VSTORE16:
919     case INTRINSIC_VSTORE32:
920     case INTRINSIC_VSTORE64:
921       return expand_volatile_store (callexp);
922 
923     default:
924       gcc_unreachable ();
925     }
926 }
927