1 /* Expand builtin functions.
2 Copyright (C) 1988-2014 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "stringpool.h"
28 #include "stor-layout.h"
29 #include "calls.h"
30 #include "varasm.h"
31 #include "tree-object-size.h"
32 #include "realmpfr.h"
33 #include "basic-block.h"
34 #include "tree-ssa-alias.h"
35 #include "internal-fn.h"
36 #include "gimple-expr.h"
37 #include "is-a.h"
38 #include "gimple.h"
39 #include "flags.h"
40 #include "regs.h"
41 #include "hard-reg-set.h"
42 #include "except.h"
43 #include "function.h"
44 #include "insn-config.h"
45 #include "expr.h"
46 #include "optabs.h"
47 #include "libfuncs.h"
48 #include "recog.h"
49 #include "output.h"
50 #include "typeclass.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "langhooks.h"
55 #include "tree-ssanames.h"
56 #include "tree-dfa.h"
57 #include "value-prof.h"
58 #include "diagnostic-core.h"
59 #include "builtins.h"
60 #include "ubsan.h"
61 #include "cilk.h"
62
63
64 static tree do_mpc_arg1 (tree, tree, int (*)(mpc_ptr, mpc_srcptr, mpc_rnd_t));
65
66 struct target_builtins default_target_builtins;
67 #if SWITCHABLE_TARGET
68 struct target_builtins *this_target_builtins = &default_target_builtins;
69 #endif
70
71 /* Define the names of the builtin function types and codes. */
72 const char *const built_in_class_names[BUILT_IN_LAST]
73 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
74
75 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
76 const char * built_in_names[(int) END_BUILTINS] =
77 {
78 #include "builtins.def"
79 };
80 #undef DEF_BUILTIN
81
82 /* Setup an array of _DECL trees, make sure each element is
83 initialized to NULL_TREE. */
84 builtin_info_type builtin_info;
85
86 /* Non-zero if __builtin_constant_p should be folded right away. */
87 bool force_folding_builtin_constant_p;
88
89 static const char *c_getstr (tree);
90 static rtx c_readstr (const char *, enum machine_mode);
91 static int target_char_cast (tree, char *);
92 static rtx get_memory_rtx (tree, tree);
93 static int apply_args_size (void);
94 static int apply_result_size (void);
95 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
96 static rtx result_vector (int, rtx);
97 #endif
98 static void expand_builtin_update_setjmp_buf (rtx);
99 static void expand_builtin_prefetch (tree);
100 static rtx expand_builtin_apply_args (void);
101 static rtx expand_builtin_apply_args_1 (void);
102 static rtx expand_builtin_apply (rtx, rtx, rtx);
103 static void expand_builtin_return (rtx);
104 static enum type_class type_to_class (tree);
105 static rtx expand_builtin_classify_type (tree);
106 static void expand_errno_check (tree, rtx);
107 static rtx expand_builtin_mathfn (tree, rtx, rtx);
108 static rtx expand_builtin_mathfn_2 (tree, rtx, rtx);
109 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
110 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
111 static rtx expand_builtin_interclass_mathfn (tree, rtx);
112 static rtx expand_builtin_sincos (tree);
113 static rtx expand_builtin_cexpi (tree, rtx);
114 static rtx expand_builtin_int_roundingfn (tree, rtx);
115 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
116 static rtx expand_builtin_next_arg (void);
117 static rtx expand_builtin_va_start (tree);
118 static rtx expand_builtin_va_end (tree);
119 static rtx expand_builtin_va_copy (tree);
120 static rtx expand_builtin_memcmp (tree, rtx, enum machine_mode);
121 static rtx expand_builtin_strcmp (tree, rtx);
122 static rtx expand_builtin_strncmp (tree, rtx, enum machine_mode);
123 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, enum machine_mode);
124 static rtx expand_builtin_memcpy (tree, rtx);
125 static rtx expand_builtin_mempcpy (tree, rtx, enum machine_mode);
126 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
127 enum machine_mode, int);
128 static rtx expand_builtin_strcpy (tree, rtx);
129 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
130 static rtx expand_builtin_stpcpy (tree, rtx, enum machine_mode);
131 static rtx expand_builtin_strncpy (tree, rtx);
132 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, enum machine_mode);
133 static rtx expand_builtin_memset (tree, rtx, enum machine_mode);
134 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, enum machine_mode, tree);
135 static rtx expand_builtin_bzero (tree);
136 static rtx expand_builtin_strlen (tree, rtx, enum machine_mode);
137 static rtx expand_builtin_alloca (tree, bool);
138 static rtx expand_builtin_unop (enum machine_mode, tree, rtx, rtx, optab);
139 static rtx expand_builtin_frame_address (tree, tree);
140 static tree stabilize_va_list_loc (location_t, tree, int);
141 static rtx expand_builtin_expect (tree, rtx);
142 static tree fold_builtin_constant_p (tree);
143 static tree fold_builtin_classify_type (tree);
144 static tree fold_builtin_strlen (location_t, tree, tree);
145 static tree fold_builtin_inf (location_t, tree, int);
146 static tree fold_builtin_nan (tree, tree, int);
147 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
148 static bool validate_arg (const_tree, enum tree_code code);
149 static bool integer_valued_real_p (tree);
150 static tree fold_trunc_transparent_mathfn (location_t, tree, tree);
151 static bool readonly_data_expr (tree);
152 static rtx expand_builtin_fabs (tree, rtx, rtx);
153 static rtx expand_builtin_signbit (tree, rtx);
154 static tree fold_builtin_sqrt (location_t, tree, tree);
155 static tree fold_builtin_cbrt (location_t, tree, tree);
156 static tree fold_builtin_pow (location_t, tree, tree, tree, tree);
157 static tree fold_builtin_powi (location_t, tree, tree, tree, tree);
158 static tree fold_builtin_cos (location_t, tree, tree, tree);
159 static tree fold_builtin_cosh (location_t, tree, tree, tree);
160 static tree fold_builtin_tan (tree, tree);
161 static tree fold_builtin_trunc (location_t, tree, tree);
162 static tree fold_builtin_floor (location_t, tree, tree);
163 static tree fold_builtin_ceil (location_t, tree, tree);
164 static tree fold_builtin_round (location_t, tree, tree);
165 static tree fold_builtin_int_roundingfn (location_t, tree, tree);
166 static tree fold_builtin_bitop (tree, tree);
167 static tree fold_builtin_memory_op (location_t, tree, tree, tree, tree, bool, int);
168 static tree fold_builtin_strchr (location_t, tree, tree, tree);
169 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
170 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
171 static tree fold_builtin_strcmp (location_t, tree, tree);
172 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
173 static tree fold_builtin_signbit (location_t, tree, tree);
174 static tree fold_builtin_copysign (location_t, tree, tree, tree, tree);
175 static tree fold_builtin_isascii (location_t, tree);
176 static tree fold_builtin_toascii (location_t, tree);
177 static tree fold_builtin_isdigit (location_t, tree);
178 static tree fold_builtin_fabs (location_t, tree, tree);
179 static tree fold_builtin_abs (location_t, tree, tree);
180 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
181 enum tree_code);
182 static tree fold_builtin_n (location_t, tree, tree *, int, bool);
183 static tree fold_builtin_0 (location_t, tree, bool);
184 static tree fold_builtin_1 (location_t, tree, tree, bool);
185 static tree fold_builtin_2 (location_t, tree, tree, tree, bool);
186 static tree fold_builtin_3 (location_t, tree, tree, tree, tree, bool);
187 static tree fold_builtin_4 (location_t, tree, tree, tree, tree, tree, bool);
188 static tree fold_builtin_varargs (location_t, tree, tree, bool);
189
190 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
191 static tree fold_builtin_strstr (location_t, tree, tree, tree);
192 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
193 static tree fold_builtin_strncat (location_t, tree, tree, tree);
194 static tree fold_builtin_strspn (location_t, tree, tree);
195 static tree fold_builtin_strcspn (location_t, tree, tree);
196 static tree fold_builtin_sprintf (location_t, tree, tree, tree, int);
197 static tree fold_builtin_snprintf (location_t, tree, tree, tree, tree, int);
198
199 static rtx expand_builtin_object_size (tree);
200 static rtx expand_builtin_memory_chk (tree, rtx, enum machine_mode,
201 enum built_in_function);
202 static void maybe_emit_chk_warning (tree, enum built_in_function);
203 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
204 static void maybe_emit_free_warning (tree);
205 static tree fold_builtin_object_size (tree, tree);
206 static tree fold_builtin_strcat_chk (location_t, tree, tree, tree, tree);
207 static tree fold_builtin_strncat_chk (location_t, tree, tree, tree, tree, tree);
208 static tree fold_builtin_sprintf_chk (location_t, tree, enum built_in_function);
209 static tree fold_builtin_printf (location_t, tree, tree, tree, bool, enum built_in_function);
210 static tree fold_builtin_fprintf (location_t, tree, tree, tree, tree, bool,
211 enum built_in_function);
212 static bool init_target_chars (void);
213
214 static unsigned HOST_WIDE_INT target_newline;
215 static unsigned HOST_WIDE_INT target_percent;
216 static unsigned HOST_WIDE_INT target_c;
217 static unsigned HOST_WIDE_INT target_s;
218 static char target_percent_c[3];
219 static char target_percent_s[3];
220 static char target_percent_s_newline[4];
221 static tree do_mpfr_arg1 (tree, tree, int (*)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
222 const REAL_VALUE_TYPE *, const REAL_VALUE_TYPE *, bool);
223 static tree do_mpfr_arg2 (tree, tree, tree,
224 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
225 static tree do_mpfr_arg3 (tree, tree, tree, tree,
226 int (*)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t));
227 static tree do_mpfr_sincos (tree, tree, tree);
228 static tree do_mpfr_bessel_n (tree, tree, tree,
229 int (*)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
230 const REAL_VALUE_TYPE *, bool);
231 static tree do_mpfr_remquo (tree, tree, tree);
232 static tree do_mpfr_lgamma_r (tree, tree, tree);
233 static void expand_builtin_sync_synchronize (void);
234
235 /* Return true if NAME starts with __builtin_ or __sync_. */
236
237 static bool
is_builtin_name(const char * name)238 is_builtin_name (const char *name)
239 {
240 if (strncmp (name, "__builtin_", 10) == 0)
241 return true;
242 if (strncmp (name, "__sync_", 7) == 0)
243 return true;
244 if (strncmp (name, "__atomic_", 9) == 0)
245 return true;
246 if (flag_cilkplus
247 && (!strcmp (name, "__cilkrts_detach")
248 || !strcmp (name, "__cilkrts_pop_frame")))
249 return true;
250 return false;
251 }
252
253
254 /* Return true if DECL is a function symbol representing a built-in. */
255
256 bool
is_builtin_fn(tree decl)257 is_builtin_fn (tree decl)
258 {
259 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
260 }
261
262 /* By default we assume that c99 functions are present at the runtime,
263 but sincos is not. */
264 bool
default_libc_has_function(enum function_class fn_class)265 default_libc_has_function (enum function_class fn_class)
266 {
267 if (fn_class == function_c94
268 || fn_class == function_c99_misc
269 || fn_class == function_c99_math_complex)
270 return true;
271
272 return false;
273 }
274
275 bool
gnu_libc_has_function(enum function_class fn_class ATTRIBUTE_UNUSED)276 gnu_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
277 {
278 return true;
279 }
280
281 bool
no_c99_libc_has_function(enum function_class fn_class ATTRIBUTE_UNUSED)282 no_c99_libc_has_function (enum function_class fn_class ATTRIBUTE_UNUSED)
283 {
284 return false;
285 }
286
287 /* Return true if NODE should be considered for inline expansion regardless
288 of the optimization level. This means whenever a function is invoked with
289 its "internal" name, which normally contains the prefix "__builtin". */
290
291 static bool
called_as_built_in(tree node)292 called_as_built_in (tree node)
293 {
294 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
295 we want the name used to call the function, not the name it
296 will have. */
297 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
298 return is_builtin_name (name);
299 }
300
301 /* Compute values M and N such that M divides (address of EXP - N) and such
302 that N < M. If these numbers can be determined, store M in alignp and N in
303 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
304 *alignp and any bit-offset to *bitposp.
305
306 Note that the address (and thus the alignment) computed here is based
307 on the address to which a symbol resolves, whereas DECL_ALIGN is based
308 on the address at which an object is actually located. These two
309 addresses are not always the same. For example, on ARM targets,
310 the address &foo of a Thumb function foo() has the lowest bit set,
311 whereas foo() itself starts on an even address.
312
313 If ADDR_P is true we are taking the address of the memory reference EXP
314 and thus cannot rely on the access taking place. */
315
316 static bool
get_object_alignment_2(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp,bool addr_p)317 get_object_alignment_2 (tree exp, unsigned int *alignp,
318 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
319 {
320 HOST_WIDE_INT bitsize, bitpos;
321 tree offset;
322 enum machine_mode mode;
323 int unsignedp, volatilep;
324 unsigned int align = BITS_PER_UNIT;
325 bool known_alignment = false;
326
327 /* Get the innermost object and the constant (bitpos) and possibly
328 variable (offset) offset of the access. */
329 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
330 &mode, &unsignedp, &volatilep, true);
331
332 /* Extract alignment information from the innermost object and
333 possibly adjust bitpos and offset. */
334 if (TREE_CODE (exp) == FUNCTION_DECL)
335 {
336 /* Function addresses can encode extra information besides their
337 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
338 allows the low bit to be used as a virtual bit, we know
339 that the address itself must be at least 2-byte aligned. */
340 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
341 align = 2 * BITS_PER_UNIT;
342 }
343 else if (TREE_CODE (exp) == LABEL_DECL)
344 ;
345 else if (TREE_CODE (exp) == CONST_DECL)
346 {
347 /* The alignment of a CONST_DECL is determined by its initializer. */
348 exp = DECL_INITIAL (exp);
349 align = TYPE_ALIGN (TREE_TYPE (exp));
350 #ifdef CONSTANT_ALIGNMENT
351 if (CONSTANT_CLASS_P (exp))
352 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
353 #endif
354 known_alignment = true;
355 }
356 else if (DECL_P (exp))
357 {
358 align = DECL_ALIGN (exp);
359 known_alignment = true;
360 }
361 else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
362 {
363 align = TYPE_ALIGN (TREE_TYPE (exp));
364 }
365 else if (TREE_CODE (exp) == INDIRECT_REF
366 || TREE_CODE (exp) == MEM_REF
367 || TREE_CODE (exp) == TARGET_MEM_REF)
368 {
369 tree addr = TREE_OPERAND (exp, 0);
370 unsigned ptr_align;
371 unsigned HOST_WIDE_INT ptr_bitpos;
372
373 if (TREE_CODE (addr) == BIT_AND_EXPR
374 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
375 {
376 align = (TREE_INT_CST_LOW (TREE_OPERAND (addr, 1))
377 & -TREE_INT_CST_LOW (TREE_OPERAND (addr, 1)));
378 align *= BITS_PER_UNIT;
379 addr = TREE_OPERAND (addr, 0);
380 }
381
382 known_alignment
383 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
384 align = MAX (ptr_align, align);
385
386 /* The alignment of the pointer operand in a TARGET_MEM_REF
387 has to take the variable offset parts into account. */
388 if (TREE_CODE (exp) == TARGET_MEM_REF)
389 {
390 if (TMR_INDEX (exp))
391 {
392 unsigned HOST_WIDE_INT step = 1;
393 if (TMR_STEP (exp))
394 step = TREE_INT_CST_LOW (TMR_STEP (exp));
395 align = MIN (align, (step & -step) * BITS_PER_UNIT);
396 }
397 if (TMR_INDEX2 (exp))
398 align = BITS_PER_UNIT;
399 known_alignment = false;
400 }
401
402 /* When EXP is an actual memory reference then we can use
403 TYPE_ALIGN of a pointer indirection to derive alignment.
404 Do so only if get_pointer_alignment_1 did not reveal absolute
405 alignment knowledge and if using that alignment would
406 improve the situation. */
407 if (!addr_p && !known_alignment
408 && TYPE_ALIGN (TREE_TYPE (exp)) > align)
409 align = TYPE_ALIGN (TREE_TYPE (exp));
410 else
411 {
412 /* Else adjust bitpos accordingly. */
413 bitpos += ptr_bitpos;
414 if (TREE_CODE (exp) == MEM_REF
415 || TREE_CODE (exp) == TARGET_MEM_REF)
416 bitpos += mem_ref_offset (exp).low * BITS_PER_UNIT;
417 }
418 }
419 else if (TREE_CODE (exp) == STRING_CST)
420 {
421 /* STRING_CST are the only constant objects we allow to be not
422 wrapped inside a CONST_DECL. */
423 align = TYPE_ALIGN (TREE_TYPE (exp));
424 #ifdef CONSTANT_ALIGNMENT
425 if (CONSTANT_CLASS_P (exp))
426 align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
427 #endif
428 known_alignment = true;
429 }
430
431 /* If there is a non-constant offset part extract the maximum
432 alignment that can prevail. */
433 if (offset)
434 {
435 unsigned int trailing_zeros = tree_ctz (offset);
436 if (trailing_zeros < HOST_BITS_PER_INT)
437 {
438 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
439 if (inner)
440 align = MIN (align, inner);
441 }
442 }
443
444 *alignp = align;
445 *bitposp = bitpos & (*alignp - 1);
446 return known_alignment;
447 }
448
449 /* For a memory reference expression EXP compute values M and N such that M
450 divides (&EXP - N) and such that N < M. If these numbers can be determined,
451 store M in alignp and N in *BITPOSP and return true. Otherwise return false
452 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
453
454 bool
get_object_alignment_1(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp)455 get_object_alignment_1 (tree exp, unsigned int *alignp,
456 unsigned HOST_WIDE_INT *bitposp)
457 {
458 return get_object_alignment_2 (exp, alignp, bitposp, false);
459 }
460
461 /* Return the alignment in bits of EXP, an object. */
462
463 unsigned int
get_object_alignment(tree exp)464 get_object_alignment (tree exp)
465 {
466 unsigned HOST_WIDE_INT bitpos = 0;
467 unsigned int align;
468
469 get_object_alignment_1 (exp, &align, &bitpos);
470
471 /* align and bitpos now specify known low bits of the pointer.
472 ptr & (align - 1) == bitpos. */
473
474 if (bitpos != 0)
475 align = (bitpos & -bitpos);
476 return align;
477 }
478
479 /* For a pointer valued expression EXP compute values M and N such that M
480 divides (EXP - N) and such that N < M. If these numbers can be determined,
481 store M in alignp and N in *BITPOSP and return true. Return false if
482 the results are just a conservative approximation.
483
484 If EXP is not a pointer, false is returned too. */
485
486 bool
get_pointer_alignment_1(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp)487 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
488 unsigned HOST_WIDE_INT *bitposp)
489 {
490 STRIP_NOPS (exp);
491
492 if (TREE_CODE (exp) == ADDR_EXPR)
493 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
494 alignp, bitposp, true);
495 else if (TREE_CODE (exp) == SSA_NAME
496 && POINTER_TYPE_P (TREE_TYPE (exp)))
497 {
498 unsigned int ptr_align, ptr_misalign;
499 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
500
501 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
502 {
503 *bitposp = ptr_misalign * BITS_PER_UNIT;
504 *alignp = ptr_align * BITS_PER_UNIT;
505 /* We cannot really tell whether this result is an approximation. */
506 return true;
507 }
508 else
509 {
510 *bitposp = 0;
511 *alignp = BITS_PER_UNIT;
512 return false;
513 }
514 }
515 else if (TREE_CODE (exp) == INTEGER_CST)
516 {
517 *alignp = BIGGEST_ALIGNMENT;
518 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
519 & (BIGGEST_ALIGNMENT - 1));
520 return true;
521 }
522
523 *bitposp = 0;
524 *alignp = BITS_PER_UNIT;
525 return false;
526 }
527
528 /* Return the alignment in bits of EXP, a pointer valued expression.
529 The alignment returned is, by default, the alignment of the thing that
530 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
531
532 Otherwise, look at the expression to see if we can do better, i.e., if the
533 expression is actually pointing at an object whose alignment is tighter. */
534
535 unsigned int
get_pointer_alignment(tree exp)536 get_pointer_alignment (tree exp)
537 {
538 unsigned HOST_WIDE_INT bitpos = 0;
539 unsigned int align;
540
541 get_pointer_alignment_1 (exp, &align, &bitpos);
542
543 /* align and bitpos now specify known low bits of the pointer.
544 ptr & (align - 1) == bitpos. */
545
546 if (bitpos != 0)
547 align = (bitpos & -bitpos);
548
549 return align;
550 }
551
552 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
553 way, because it could contain a zero byte in the middle.
554 TREE_STRING_LENGTH is the size of the character array, not the string.
555
556 ONLY_VALUE should be nonzero if the result is not going to be emitted
557 into the instruction stream and zero if it is going to be expanded.
558 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
559 is returned, otherwise NULL, since
560 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
561 evaluate the side-effects.
562
563 The value returned is of type `ssizetype'.
564
565 Unfortunately, string_constant can't access the values of const char
566 arrays with initializers, so neither can we do so here. */
567
568 tree
c_strlen(tree src,int only_value)569 c_strlen (tree src, int only_value)
570 {
571 tree offset_node;
572 HOST_WIDE_INT offset;
573 int max;
574 const char *ptr;
575 location_t loc;
576
577 STRIP_NOPS (src);
578 if (TREE_CODE (src) == COND_EXPR
579 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
580 {
581 tree len1, len2;
582
583 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
584 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
585 if (tree_int_cst_equal (len1, len2))
586 return len1;
587 }
588
589 if (TREE_CODE (src) == COMPOUND_EXPR
590 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
591 return c_strlen (TREE_OPERAND (src, 1), only_value);
592
593 loc = EXPR_LOC_OR_LOC (src, input_location);
594
595 src = string_constant (src, &offset_node);
596 if (src == 0)
597 return NULL_TREE;
598
599 max = TREE_STRING_LENGTH (src) - 1;
600 ptr = TREE_STRING_POINTER (src);
601
602 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
603 {
604 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
605 compute the offset to the following null if we don't know where to
606 start searching for it. */
607 int i;
608
609 for (i = 0; i < max; i++)
610 if (ptr[i] == 0)
611 return NULL_TREE;
612
613 /* We don't know the starting offset, but we do know that the string
614 has no internal zero bytes. We can assume that the offset falls
615 within the bounds of the string; otherwise, the programmer deserves
616 what he gets. Subtract the offset from the length of the string,
617 and return that. This would perhaps not be valid if we were dealing
618 with named arrays in addition to literal string constants. */
619
620 return size_diffop_loc (loc, size_int (max), offset_node);
621 }
622
623 /* We have a known offset into the string. Start searching there for
624 a null character if we can represent it as a single HOST_WIDE_INT. */
625 if (offset_node == 0)
626 offset = 0;
627 else if (! tree_fits_shwi_p (offset_node))
628 offset = -1;
629 else
630 offset = tree_to_shwi (offset_node);
631
632 /* If the offset is known to be out of bounds, warn, and call strlen at
633 runtime. */
634 if (offset < 0 || offset > max)
635 {
636 /* Suppress multiple warnings for propagated constant strings. */
637 if (! TREE_NO_WARNING (src))
638 {
639 warning_at (loc, 0, "offset outside bounds of constant string");
640 TREE_NO_WARNING (src) = 1;
641 }
642 return NULL_TREE;
643 }
644
645 /* Use strlen to search for the first zero byte. Since any strings
646 constructed with build_string will have nulls appended, we win even
647 if we get handed something like (char[4])"abcd".
648
649 Since OFFSET is our starting index into the string, no further
650 calculation is needed. */
651 return ssize_int (strlen (ptr + offset));
652 }
653
654 /* Return a char pointer for a C string if it is a string constant
655 or sum of string constant and integer constant. */
656
657 static const char *
c_getstr(tree src)658 c_getstr (tree src)
659 {
660 tree offset_node;
661
662 src = string_constant (src, &offset_node);
663 if (src == 0)
664 return 0;
665
666 if (offset_node == 0)
667 return TREE_STRING_POINTER (src);
668 else if (!tree_fits_uhwi_p (offset_node)
669 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
670 return 0;
671
672 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
673 }
674
675 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
676 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
677
678 static rtx
c_readstr(const char * str,enum machine_mode mode)679 c_readstr (const char *str, enum machine_mode mode)
680 {
681 HOST_WIDE_INT c[2];
682 HOST_WIDE_INT ch;
683 unsigned int i, j;
684
685 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
686
687 c[0] = 0;
688 c[1] = 0;
689 ch = 1;
690 for (i = 0; i < GET_MODE_SIZE (mode); i++)
691 {
692 j = i;
693 if (WORDS_BIG_ENDIAN)
694 j = GET_MODE_SIZE (mode) - i - 1;
695 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
696 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
697 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
698 j *= BITS_PER_UNIT;
699 gcc_assert (j < HOST_BITS_PER_DOUBLE_INT);
700
701 if (ch)
702 ch = (unsigned char) str[i];
703 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
704 }
705 return immed_double_const (c[0], c[1], mode);
706 }
707
708 /* Cast a target constant CST to target CHAR and if that value fits into
709 host char type, return zero and put that value into variable pointed to by
710 P. */
711
712 static int
target_char_cast(tree cst,char * p)713 target_char_cast (tree cst, char *p)
714 {
715 unsigned HOST_WIDE_INT val, hostval;
716
717 if (TREE_CODE (cst) != INTEGER_CST
718 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
719 return 1;
720
721 val = TREE_INT_CST_LOW (cst);
722 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
723 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
724
725 hostval = val;
726 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
727 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
728
729 if (val != hostval)
730 return 1;
731
732 *p = hostval;
733 return 0;
734 }
735
736 /* Similar to save_expr, but assumes that arbitrary code is not executed
737 in between the multiple evaluations. In particular, we assume that a
738 non-addressable local variable will not be modified. */
739
740 static tree
builtin_save_expr(tree exp)741 builtin_save_expr (tree exp)
742 {
743 if (TREE_CODE (exp) == SSA_NAME
744 || (TREE_ADDRESSABLE (exp) == 0
745 && (TREE_CODE (exp) == PARM_DECL
746 || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
747 return exp;
748
749 return save_expr (exp);
750 }
751
752 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
753 times to get the address of either a higher stack frame, or a return
754 address located within it (depending on FNDECL_CODE). */
755
756 static rtx
expand_builtin_return_addr(enum built_in_function fndecl_code,int count)757 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
758 {
759 int i;
760
761 #ifdef INITIAL_FRAME_ADDRESS_RTX
762 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
763 #else
764 rtx tem;
765
766 /* For a zero count with __builtin_return_address, we don't care what
767 frame address we return, because target-specific definitions will
768 override us. Therefore frame pointer elimination is OK, and using
769 the soft frame pointer is OK.
770
771 For a nonzero count, or a zero count with __builtin_frame_address,
772 we require a stable offset from the current frame pointer to the
773 previous one, so we must use the hard frame pointer, and
774 we must disable frame pointer elimination. */
775 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
776 tem = frame_pointer_rtx;
777 else
778 {
779 tem = hard_frame_pointer_rtx;
780
781 /* Tell reload not to eliminate the frame pointer. */
782 crtl->accesses_prior_frames = 1;
783 }
784 #endif
785
786 /* Some machines need special handling before we can access
787 arbitrary frames. For example, on the SPARC, we must first flush
788 all register windows to the stack. */
789 #ifdef SETUP_FRAME_ADDRESSES
790 if (count > 0)
791 SETUP_FRAME_ADDRESSES ();
792 #endif
793
794 /* On the SPARC, the return address is not in the frame, it is in a
795 register. There is no way to access it off of the current frame
796 pointer, but it can be accessed off the previous frame pointer by
797 reading the value from the register window save area. */
798 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
799 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
800 count--;
801 #endif
802
803 /* Scan back COUNT frames to the specified frame. */
804 for (i = 0; i < count; i++)
805 {
806 /* Assume the dynamic chain pointer is in the word that the
807 frame address points to, unless otherwise specified. */
808 #ifdef DYNAMIC_CHAIN_ADDRESS
809 tem = DYNAMIC_CHAIN_ADDRESS (tem);
810 #endif
811 tem = memory_address (Pmode, tem);
812 tem = gen_frame_mem (Pmode, tem);
813 tem = copy_to_reg (tem);
814 }
815
816 /* For __builtin_frame_address, return what we've got. But, on
817 the SPARC for example, we may have to add a bias. */
818 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
819 #ifdef FRAME_ADDR_RTX
820 return FRAME_ADDR_RTX (tem);
821 #else
822 return tem;
823 #endif
824
825 /* For __builtin_return_address, get the return address from that frame. */
826 #ifdef RETURN_ADDR_RTX
827 tem = RETURN_ADDR_RTX (count, tem);
828 #else
829 tem = memory_address (Pmode,
830 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
831 tem = gen_frame_mem (Pmode, tem);
832 #endif
833 return tem;
834 }
835
836 /* Alias set used for setjmp buffer. */
837 static alias_set_type setjmp_alias_set = -1;
838
839 /* Construct the leading half of a __builtin_setjmp call. Control will
840 return to RECEIVER_LABEL. This is also called directly by the SJLJ
841 exception handling code. */
842
843 void
expand_builtin_setjmp_setup(rtx buf_addr,rtx receiver_label)844 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
845 {
846 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
847 rtx stack_save;
848 rtx mem;
849
850 if (setjmp_alias_set == -1)
851 setjmp_alias_set = new_alias_set ();
852
853 buf_addr = convert_memory_address (Pmode, buf_addr);
854
855 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
856
857 /* We store the frame pointer and the address of receiver_label in
858 the buffer and use the rest of it for the stack save area, which
859 is machine-dependent. */
860
861 mem = gen_rtx_MEM (Pmode, buf_addr);
862 set_mem_alias_set (mem, setjmp_alias_set);
863 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
864
865 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
866 GET_MODE_SIZE (Pmode))),
867 set_mem_alias_set (mem, setjmp_alias_set);
868
869 emit_move_insn (validize_mem (mem),
870 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
871
872 stack_save = gen_rtx_MEM (sa_mode,
873 plus_constant (Pmode, buf_addr,
874 2 * GET_MODE_SIZE (Pmode)));
875 set_mem_alias_set (stack_save, setjmp_alias_set);
876 emit_stack_save (SAVE_NONLOCAL, &stack_save);
877
878 /* If there is further processing to do, do it. */
879 #ifdef HAVE_builtin_setjmp_setup
880 if (HAVE_builtin_setjmp_setup)
881 emit_insn (gen_builtin_setjmp_setup (buf_addr));
882 #endif
883
884 /* We have a nonlocal label. */
885 cfun->has_nonlocal_label = 1;
886 }
887
888 /* Construct the trailing part of a __builtin_setjmp call. This is
889 also called directly by the SJLJ exception handling code.
890 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
891
892 void
expand_builtin_setjmp_receiver(rtx receiver_label ATTRIBUTE_UNUSED)893 expand_builtin_setjmp_receiver (rtx receiver_label ATTRIBUTE_UNUSED)
894 {
895 rtx chain;
896
897 /* Mark the FP as used when we get here, so we have to make sure it's
898 marked as used by this function. */
899 emit_use (hard_frame_pointer_rtx);
900
901 /* Mark the static chain as clobbered here so life information
902 doesn't get messed up for it. */
903 chain = targetm.calls.static_chain (current_function_decl, true);
904 if (chain && REG_P (chain))
905 emit_clobber (chain);
906
907 /* Now put in the code to restore the frame pointer, and argument
908 pointer, if needed. */
909 #ifdef HAVE_nonlocal_goto
910 if (! HAVE_nonlocal_goto)
911 #endif
912 {
913 /* First adjust our frame pointer to its actual value. It was
914 previously set to the start of the virtual area corresponding to
915 the stacked variables when we branched here and now needs to be
916 adjusted to the actual hardware fp value.
917
918 Assignments to virtual registers are converted by
919 instantiate_virtual_regs into the corresponding assignment
920 to the underlying register (fp in this case) that makes
921 the original assignment true.
922 So the following insn will actually be decrementing fp by
923 STARTING_FRAME_OFFSET. */
924 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
925
926 /* Restoring the frame pointer also modifies the hard frame pointer.
927 Mark it used (so that the previous assignment remains live once
928 the frame pointer is eliminated) and clobbered (to represent the
929 implicit update from the assignment). */
930 emit_use (hard_frame_pointer_rtx);
931 emit_clobber (hard_frame_pointer_rtx);
932 }
933
934 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
935 if (fixed_regs[ARG_POINTER_REGNUM])
936 {
937 #ifdef ELIMINABLE_REGS
938 /* If the argument pointer can be eliminated in favor of the
939 frame pointer, we don't need to restore it. We assume here
940 that if such an elimination is present, it can always be used.
941 This is the case on all known machines; if we don't make this
942 assumption, we do unnecessary saving on many machines. */
943 size_t i;
944 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
945
946 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
947 if (elim_regs[i].from == ARG_POINTER_REGNUM
948 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
949 break;
950
951 if (i == ARRAY_SIZE (elim_regs))
952 #endif
953 {
954 /* Now restore our arg pointer from the address at which it
955 was saved in our stack frame. */
956 emit_move_insn (crtl->args.internal_arg_pointer,
957 copy_to_reg (get_arg_pointer_save_area ()));
958 }
959 }
960 #endif
961
962 #ifdef HAVE_builtin_setjmp_receiver
963 if (receiver_label != NULL && HAVE_builtin_setjmp_receiver)
964 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
965 else
966 #endif
967 #ifdef HAVE_nonlocal_goto_receiver
968 if (HAVE_nonlocal_goto_receiver)
969 emit_insn (gen_nonlocal_goto_receiver ());
970 else
971 #endif
972 { /* Nothing */ }
973
974 /* We must not allow the code we just generated to be reordered by
975 scheduling. Specifically, the update of the frame pointer must
976 happen immediately, not later. */
977 emit_insn (gen_blockage ());
978 }
979
980 /* __builtin_longjmp is passed a pointer to an array of five words (not
981 all will be used on all machines). It operates similarly to the C
982 library function of the same name, but is more efficient. Much of
983 the code below is copied from the handling of non-local gotos. */
984
985 static void
expand_builtin_longjmp(rtx buf_addr,rtx value)986 expand_builtin_longjmp (rtx buf_addr, rtx value)
987 {
988 rtx fp, lab, stack, insn, last;
989 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
990
991 /* DRAP is needed for stack realign if longjmp is expanded to current
992 function */
993 if (SUPPORTS_STACK_ALIGNMENT)
994 crtl->need_drap = true;
995
996 if (setjmp_alias_set == -1)
997 setjmp_alias_set = new_alias_set ();
998
999 buf_addr = convert_memory_address (Pmode, buf_addr);
1000
1001 buf_addr = force_reg (Pmode, buf_addr);
1002
1003 /* We require that the user must pass a second argument of 1, because
1004 that is what builtin_setjmp will return. */
1005 gcc_assert (value == const1_rtx);
1006
1007 last = get_last_insn ();
1008 #ifdef HAVE_builtin_longjmp
1009 if (HAVE_builtin_longjmp)
1010 emit_insn (gen_builtin_longjmp (buf_addr));
1011 else
1012 #endif
1013 {
1014 fp = gen_rtx_MEM (Pmode, buf_addr);
1015 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1016 GET_MODE_SIZE (Pmode)));
1017
1018 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1019 2 * GET_MODE_SIZE (Pmode)));
1020 set_mem_alias_set (fp, setjmp_alias_set);
1021 set_mem_alias_set (lab, setjmp_alias_set);
1022 set_mem_alias_set (stack, setjmp_alias_set);
1023
1024 /* Pick up FP, label, and SP from the block and jump. This code is
1025 from expand_goto in stmt.c; see there for detailed comments. */
1026 #ifdef HAVE_nonlocal_goto
1027 if (HAVE_nonlocal_goto)
1028 /* We have to pass a value to the nonlocal_goto pattern that will
1029 get copied into the static_chain pointer, but it does not matter
1030 what that value is, because builtin_setjmp does not use it. */
1031 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
1032 else
1033 #endif
1034 {
1035 lab = copy_to_reg (lab);
1036
1037 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1038 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1039
1040 emit_move_insn (hard_frame_pointer_rtx, fp);
1041 emit_stack_restore (SAVE_NONLOCAL, stack);
1042
1043 emit_use (hard_frame_pointer_rtx);
1044 emit_use (stack_pointer_rtx);
1045 emit_indirect_jump (lab);
1046 }
1047 }
1048
1049 /* Search backwards and mark the jump insn as a non-local goto.
1050 Note that this precludes the use of __builtin_longjmp to a
1051 __builtin_setjmp target in the same function. However, we've
1052 already cautioned the user that these functions are for
1053 internal exception handling use only. */
1054 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1055 {
1056 gcc_assert (insn != last);
1057
1058 if (JUMP_P (insn))
1059 {
1060 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1061 break;
1062 }
1063 else if (CALL_P (insn))
1064 break;
1065 }
1066 }
1067
1068 static inline bool
more_const_call_expr_args_p(const const_call_expr_arg_iterator * iter)1069 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1070 {
1071 return (iter->i < iter->n);
1072 }
1073
1074 /* This function validates the types of a function call argument list
1075 against a specified list of tree_codes. If the last specifier is a 0,
1076 that represents an ellipses, otherwise the last specifier must be a
1077 VOID_TYPE. */
1078
1079 static bool
validate_arglist(const_tree callexpr,...)1080 validate_arglist (const_tree callexpr, ...)
1081 {
1082 enum tree_code code;
1083 bool res = 0;
1084 va_list ap;
1085 const_call_expr_arg_iterator iter;
1086 const_tree arg;
1087
1088 va_start (ap, callexpr);
1089 init_const_call_expr_arg_iterator (callexpr, &iter);
1090
1091 do
1092 {
1093 code = (enum tree_code) va_arg (ap, int);
1094 switch (code)
1095 {
1096 case 0:
1097 /* This signifies an ellipses, any further arguments are all ok. */
1098 res = true;
1099 goto end;
1100 case VOID_TYPE:
1101 /* This signifies an endlink, if no arguments remain, return
1102 true, otherwise return false. */
1103 res = !more_const_call_expr_args_p (&iter);
1104 goto end;
1105 default:
1106 /* If no parameters remain or the parameter's code does not
1107 match the specified code, return false. Otherwise continue
1108 checking any remaining arguments. */
1109 arg = next_const_call_expr_arg (&iter);
1110 if (!validate_arg (arg, code))
1111 goto end;
1112 break;
1113 }
1114 }
1115 while (1);
1116
1117 /* We need gotos here since we can only have one VA_CLOSE in a
1118 function. */
1119 end: ;
1120 va_end (ap);
1121
1122 return res;
1123 }
1124
1125 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1126 and the address of the save area. */
1127
1128 static rtx
expand_builtin_nonlocal_goto(tree exp)1129 expand_builtin_nonlocal_goto (tree exp)
1130 {
1131 tree t_label, t_save_area;
1132 rtx r_label, r_save_area, r_fp, r_sp, insn;
1133
1134 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1135 return NULL_RTX;
1136
1137 t_label = CALL_EXPR_ARG (exp, 0);
1138 t_save_area = CALL_EXPR_ARG (exp, 1);
1139
1140 r_label = expand_normal (t_label);
1141 r_label = convert_memory_address (Pmode, r_label);
1142 r_save_area = expand_normal (t_save_area);
1143 r_save_area = convert_memory_address (Pmode, r_save_area);
1144 /* Copy the address of the save location to a register just in case it was
1145 based on the frame pointer. */
1146 r_save_area = copy_to_reg (r_save_area);
1147 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1148 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1149 plus_constant (Pmode, r_save_area,
1150 GET_MODE_SIZE (Pmode)));
1151
1152 crtl->has_nonlocal_goto = 1;
1153
1154 #ifdef HAVE_nonlocal_goto
1155 /* ??? We no longer need to pass the static chain value, afaik. */
1156 if (HAVE_nonlocal_goto)
1157 emit_insn (gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1158 else
1159 #endif
1160 {
1161 r_label = copy_to_reg (r_label);
1162
1163 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1164 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1165
1166 /* Restore frame pointer for containing function. */
1167 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1168 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1169
1170 /* USE of hard_frame_pointer_rtx added for consistency;
1171 not clear if really needed. */
1172 emit_use (hard_frame_pointer_rtx);
1173 emit_use (stack_pointer_rtx);
1174
1175 /* If the architecture is using a GP register, we must
1176 conservatively assume that the target function makes use of it.
1177 The prologue of functions with nonlocal gotos must therefore
1178 initialize the GP register to the appropriate value, and we
1179 must then make sure that this value is live at the point
1180 of the jump. (Note that this doesn't necessarily apply
1181 to targets with a nonlocal_goto pattern; they are free
1182 to implement it in their own way. Note also that this is
1183 a no-op if the GP register is a global invariant.) */
1184 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1185 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1186 emit_use (pic_offset_table_rtx);
1187
1188 emit_indirect_jump (r_label);
1189 }
1190
1191 /* Search backwards to the jump insn and mark it as a
1192 non-local goto. */
1193 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1194 {
1195 if (JUMP_P (insn))
1196 {
1197 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1198 break;
1199 }
1200 else if (CALL_P (insn))
1201 break;
1202 }
1203
1204 return const0_rtx;
1205 }
1206
1207 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1208 (not all will be used on all machines) that was passed to __builtin_setjmp.
1209 It updates the stack pointer in that block to correspond to the current
1210 stack pointer. */
1211
1212 static void
expand_builtin_update_setjmp_buf(rtx buf_addr)1213 expand_builtin_update_setjmp_buf (rtx buf_addr)
1214 {
1215 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1216 rtx stack_save
1217 = gen_rtx_MEM (sa_mode,
1218 memory_address
1219 (sa_mode,
1220 plus_constant (Pmode, buf_addr,
1221 2 * GET_MODE_SIZE (Pmode))));
1222
1223 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1224 }
1225
1226 /* Expand a call to __builtin_prefetch. For a target that does not support
1227 data prefetch, evaluate the memory address argument in case it has side
1228 effects. */
1229
1230 static void
expand_builtin_prefetch(tree exp)1231 expand_builtin_prefetch (tree exp)
1232 {
1233 tree arg0, arg1, arg2;
1234 int nargs;
1235 rtx op0, op1, op2;
1236
1237 if (!validate_arglist (exp, POINTER_TYPE, 0))
1238 return;
1239
1240 arg0 = CALL_EXPR_ARG (exp, 0);
1241
1242 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1243 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1244 locality). */
1245 nargs = call_expr_nargs (exp);
1246 if (nargs > 1)
1247 arg1 = CALL_EXPR_ARG (exp, 1);
1248 else
1249 arg1 = integer_zero_node;
1250 if (nargs > 2)
1251 arg2 = CALL_EXPR_ARG (exp, 2);
1252 else
1253 arg2 = integer_three_node;
1254
1255 /* Argument 0 is an address. */
1256 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1257
1258 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1259 if (TREE_CODE (arg1) != INTEGER_CST)
1260 {
1261 error ("second argument to %<__builtin_prefetch%> must be a constant");
1262 arg1 = integer_zero_node;
1263 }
1264 op1 = expand_normal (arg1);
1265 /* Argument 1 must be either zero or one. */
1266 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1267 {
1268 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1269 " using zero");
1270 op1 = const0_rtx;
1271 }
1272
1273 /* Argument 2 (locality) must be a compile-time constant int. */
1274 if (TREE_CODE (arg2) != INTEGER_CST)
1275 {
1276 error ("third argument to %<__builtin_prefetch%> must be a constant");
1277 arg2 = integer_zero_node;
1278 }
1279 op2 = expand_normal (arg2);
1280 /* Argument 2 must be 0, 1, 2, or 3. */
1281 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1282 {
1283 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1284 op2 = const0_rtx;
1285 }
1286
1287 #ifdef HAVE_prefetch
1288 if (HAVE_prefetch)
1289 {
1290 struct expand_operand ops[3];
1291
1292 create_address_operand (&ops[0], op0);
1293 create_integer_operand (&ops[1], INTVAL (op1));
1294 create_integer_operand (&ops[2], INTVAL (op2));
1295 if (maybe_expand_insn (CODE_FOR_prefetch, 3, ops))
1296 return;
1297 }
1298 #endif
1299
1300 /* Don't do anything with direct references to volatile memory, but
1301 generate code to handle other side effects. */
1302 if (!MEM_P (op0) && side_effects_p (op0))
1303 emit_insn (op0);
1304 }
1305
1306 /* Get a MEM rtx for expression EXP which is the address of an operand
1307 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1308 the maximum length of the block of memory that might be accessed or
1309 NULL if unknown. */
1310
1311 static rtx
get_memory_rtx(tree exp,tree len)1312 get_memory_rtx (tree exp, tree len)
1313 {
1314 tree orig_exp = exp;
1315 rtx addr, mem;
1316
1317 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1318 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1319 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1320 exp = TREE_OPERAND (exp, 0);
1321
1322 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1323 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1324
1325 /* Get an expression we can use to find the attributes to assign to MEM.
1326 First remove any nops. */
1327 while (CONVERT_EXPR_P (exp)
1328 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1329 exp = TREE_OPERAND (exp, 0);
1330
1331 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1332 (as builtin stringops may alias with anything). */
1333 exp = fold_build2 (MEM_REF,
1334 build_array_type (char_type_node,
1335 build_range_type (sizetype,
1336 size_one_node, len)),
1337 exp, build_int_cst (ptr_type_node, 0));
1338
1339 /* If the MEM_REF has no acceptable address, try to get the base object
1340 from the original address we got, and build an all-aliasing
1341 unknown-sized access to that one. */
1342 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1343 set_mem_attributes (mem, exp, 0);
1344 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1345 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1346 0))))
1347 {
1348 exp = build_fold_addr_expr (exp);
1349 exp = fold_build2 (MEM_REF,
1350 build_array_type (char_type_node,
1351 build_range_type (sizetype,
1352 size_zero_node,
1353 NULL)),
1354 exp, build_int_cst (ptr_type_node, 0));
1355 set_mem_attributes (mem, exp, 0);
1356 }
1357 set_mem_alias_set (mem, 0);
1358 return mem;
1359 }
1360
1361 /* Built-in functions to perform an untyped call and return. */
1362
1363 #define apply_args_mode \
1364 (this_target_builtins->x_apply_args_mode)
1365 #define apply_result_mode \
1366 (this_target_builtins->x_apply_result_mode)
1367
1368 /* Return the size required for the block returned by __builtin_apply_args,
1369 and initialize apply_args_mode. */
1370
1371 static int
apply_args_size(void)1372 apply_args_size (void)
1373 {
1374 static int size = -1;
1375 int align;
1376 unsigned int regno;
1377 enum machine_mode mode;
1378
1379 /* The values computed by this function never change. */
1380 if (size < 0)
1381 {
1382 /* The first value is the incoming arg-pointer. */
1383 size = GET_MODE_SIZE (Pmode);
1384
1385 /* The second value is the structure value address unless this is
1386 passed as an "invisible" first argument. */
1387 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1388 size += GET_MODE_SIZE (Pmode);
1389
1390 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1391 if (FUNCTION_ARG_REGNO_P (regno))
1392 {
1393 mode = targetm.calls.get_raw_arg_mode (regno);
1394
1395 gcc_assert (mode != VOIDmode);
1396
1397 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1398 if (size % align != 0)
1399 size = CEIL (size, align) * align;
1400 size += GET_MODE_SIZE (mode);
1401 apply_args_mode[regno] = mode;
1402 }
1403 else
1404 {
1405 apply_args_mode[regno] = VOIDmode;
1406 }
1407 }
1408 return size;
1409 }
1410
1411 /* Return the size required for the block returned by __builtin_apply,
1412 and initialize apply_result_mode. */
1413
1414 static int
apply_result_size(void)1415 apply_result_size (void)
1416 {
1417 static int size = -1;
1418 int align, regno;
1419 enum machine_mode mode;
1420
1421 /* The values computed by this function never change. */
1422 if (size < 0)
1423 {
1424 size = 0;
1425
1426 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1427 if (targetm.calls.function_value_regno_p (regno))
1428 {
1429 mode = targetm.calls.get_raw_result_mode (regno);
1430
1431 gcc_assert (mode != VOIDmode);
1432
1433 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1434 if (size % align != 0)
1435 size = CEIL (size, align) * align;
1436 size += GET_MODE_SIZE (mode);
1437 apply_result_mode[regno] = mode;
1438 }
1439 else
1440 apply_result_mode[regno] = VOIDmode;
1441
1442 /* Allow targets that use untyped_call and untyped_return to override
1443 the size so that machine-specific information can be stored here. */
1444 #ifdef APPLY_RESULT_SIZE
1445 size = APPLY_RESULT_SIZE;
1446 #endif
1447 }
1448 return size;
1449 }
1450
1451 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1452 /* Create a vector describing the result block RESULT. If SAVEP is true,
1453 the result block is used to save the values; otherwise it is used to
1454 restore the values. */
1455
1456 static rtx
result_vector(int savep,rtx result)1457 result_vector (int savep, rtx result)
1458 {
1459 int regno, size, align, nelts;
1460 enum machine_mode mode;
1461 rtx reg, mem;
1462 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1463
1464 size = nelts = 0;
1465 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1466 if ((mode = apply_result_mode[regno]) != VOIDmode)
1467 {
1468 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1469 if (size % align != 0)
1470 size = CEIL (size, align) * align;
1471 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1472 mem = adjust_address (result, mode, size);
1473 savevec[nelts++] = (savep
1474 ? gen_rtx_SET (VOIDmode, mem, reg)
1475 : gen_rtx_SET (VOIDmode, reg, mem));
1476 size += GET_MODE_SIZE (mode);
1477 }
1478 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1479 }
1480 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1481
1482 /* Save the state required to perform an untyped call with the same
1483 arguments as were passed to the current function. */
1484
1485 static rtx
expand_builtin_apply_args_1(void)1486 expand_builtin_apply_args_1 (void)
1487 {
1488 rtx registers, tem;
1489 int size, align, regno;
1490 enum machine_mode mode;
1491 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1492
1493 /* Create a block where the arg-pointer, structure value address,
1494 and argument registers can be saved. */
1495 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1496
1497 /* Walk past the arg-pointer and structure value address. */
1498 size = GET_MODE_SIZE (Pmode);
1499 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1500 size += GET_MODE_SIZE (Pmode);
1501
1502 /* Save each register used in calling a function to the block. */
1503 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1504 if ((mode = apply_args_mode[regno]) != VOIDmode)
1505 {
1506 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1507 if (size % align != 0)
1508 size = CEIL (size, align) * align;
1509
1510 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1511
1512 emit_move_insn (adjust_address (registers, mode, size), tem);
1513 size += GET_MODE_SIZE (mode);
1514 }
1515
1516 /* Save the arg pointer to the block. */
1517 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1518 #ifdef STACK_GROWS_DOWNWARD
1519 /* We need the pointer as the caller actually passed them to us, not
1520 as we might have pretended they were passed. Make sure it's a valid
1521 operand, as emit_move_insn isn't expected to handle a PLUS. */
1522 tem
1523 = force_operand (plus_constant (Pmode, tem, crtl->args.pretend_args_size),
1524 NULL_RTX);
1525 #endif
1526 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1527
1528 size = GET_MODE_SIZE (Pmode);
1529
1530 /* Save the structure value address unless this is passed as an
1531 "invisible" first argument. */
1532 if (struct_incoming_value)
1533 {
1534 emit_move_insn (adjust_address (registers, Pmode, size),
1535 copy_to_reg (struct_incoming_value));
1536 size += GET_MODE_SIZE (Pmode);
1537 }
1538
1539 /* Return the address of the block. */
1540 return copy_addr_to_reg (XEXP (registers, 0));
1541 }
1542
1543 /* __builtin_apply_args returns block of memory allocated on
1544 the stack into which is stored the arg pointer, structure
1545 value address, static chain, and all the registers that might
1546 possibly be used in performing a function call. The code is
1547 moved to the start of the function so the incoming values are
1548 saved. */
1549
1550 static rtx
expand_builtin_apply_args(void)1551 expand_builtin_apply_args (void)
1552 {
1553 /* Don't do __builtin_apply_args more than once in a function.
1554 Save the result of the first call and reuse it. */
1555 if (apply_args_value != 0)
1556 return apply_args_value;
1557 {
1558 /* When this function is called, it means that registers must be
1559 saved on entry to this function. So we migrate the
1560 call to the first insn of this function. */
1561 rtx temp;
1562 rtx seq;
1563
1564 start_sequence ();
1565 temp = expand_builtin_apply_args_1 ();
1566 seq = get_insns ();
1567 end_sequence ();
1568
1569 apply_args_value = temp;
1570
1571 /* Put the insns after the NOTE that starts the function.
1572 If this is inside a start_sequence, make the outer-level insn
1573 chain current, so the code is placed at the start of the
1574 function. If internal_arg_pointer is a non-virtual pseudo,
1575 it needs to be placed after the function that initializes
1576 that pseudo. */
1577 push_topmost_sequence ();
1578 if (REG_P (crtl->args.internal_arg_pointer)
1579 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1580 emit_insn_before (seq, parm_birth_insn);
1581 else
1582 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1583 pop_topmost_sequence ();
1584 return temp;
1585 }
1586 }
1587
1588 /* Perform an untyped call and save the state required to perform an
1589 untyped return of whatever value was returned by the given function. */
1590
1591 static rtx
expand_builtin_apply(rtx function,rtx arguments,rtx argsize)1592 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1593 {
1594 int size, align, regno;
1595 enum machine_mode mode;
1596 rtx incoming_args, result, reg, dest, src, call_insn;
1597 rtx old_stack_level = 0;
1598 rtx call_fusage = 0;
1599 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1600
1601 arguments = convert_memory_address (Pmode, arguments);
1602
1603 /* Create a block where the return registers can be saved. */
1604 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1605
1606 /* Fetch the arg pointer from the ARGUMENTS block. */
1607 incoming_args = gen_reg_rtx (Pmode);
1608 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1609 #ifndef STACK_GROWS_DOWNWARD
1610 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1611 incoming_args, 0, OPTAB_LIB_WIDEN);
1612 #endif
1613
1614 /* Push a new argument block and copy the arguments. Do not allow
1615 the (potential) memcpy call below to interfere with our stack
1616 manipulations. */
1617 do_pending_stack_adjust ();
1618 NO_DEFER_POP;
1619
1620 /* Save the stack with nonlocal if available. */
1621 #ifdef HAVE_save_stack_nonlocal
1622 if (HAVE_save_stack_nonlocal)
1623 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1624 else
1625 #endif
1626 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1627
1628 /* Allocate a block of memory onto the stack and copy the memory
1629 arguments to the outgoing arguments address. We can pass TRUE
1630 as the 4th argument because we just saved the stack pointer
1631 and will restore it right after the call. */
1632 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1633
1634 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1635 may have already set current_function_calls_alloca to true.
1636 current_function_calls_alloca won't be set if argsize is zero,
1637 so we have to guarantee need_drap is true here. */
1638 if (SUPPORTS_STACK_ALIGNMENT)
1639 crtl->need_drap = true;
1640
1641 dest = virtual_outgoing_args_rtx;
1642 #ifndef STACK_GROWS_DOWNWARD
1643 if (CONST_INT_P (argsize))
1644 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1645 else
1646 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1647 #endif
1648 dest = gen_rtx_MEM (BLKmode, dest);
1649 set_mem_align (dest, PARM_BOUNDARY);
1650 src = gen_rtx_MEM (BLKmode, incoming_args);
1651 set_mem_align (src, PARM_BOUNDARY);
1652 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1653
1654 /* Refer to the argument block. */
1655 apply_args_size ();
1656 arguments = gen_rtx_MEM (BLKmode, arguments);
1657 set_mem_align (arguments, PARM_BOUNDARY);
1658
1659 /* Walk past the arg-pointer and structure value address. */
1660 size = GET_MODE_SIZE (Pmode);
1661 if (struct_value)
1662 size += GET_MODE_SIZE (Pmode);
1663
1664 /* Restore each of the registers previously saved. Make USE insns
1665 for each of these registers for use in making the call. */
1666 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1667 if ((mode = apply_args_mode[regno]) != VOIDmode)
1668 {
1669 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1670 if (size % align != 0)
1671 size = CEIL (size, align) * align;
1672 reg = gen_rtx_REG (mode, regno);
1673 emit_move_insn (reg, adjust_address (arguments, mode, size));
1674 use_reg (&call_fusage, reg);
1675 size += GET_MODE_SIZE (mode);
1676 }
1677
1678 /* Restore the structure value address unless this is passed as an
1679 "invisible" first argument. */
1680 size = GET_MODE_SIZE (Pmode);
1681 if (struct_value)
1682 {
1683 rtx value = gen_reg_rtx (Pmode);
1684 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1685 emit_move_insn (struct_value, value);
1686 if (REG_P (struct_value))
1687 use_reg (&call_fusage, struct_value);
1688 size += GET_MODE_SIZE (Pmode);
1689 }
1690
1691 /* All arguments and registers used for the call are set up by now! */
1692 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1693
1694 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1695 and we don't want to load it into a register as an optimization,
1696 because prepare_call_address already did it if it should be done. */
1697 if (GET_CODE (function) != SYMBOL_REF)
1698 function = memory_address (FUNCTION_MODE, function);
1699
1700 /* Generate the actual call instruction and save the return value. */
1701 #ifdef HAVE_untyped_call
1702 if (HAVE_untyped_call)
1703 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1704 result, result_vector (1, result)));
1705 else
1706 #endif
1707 #ifdef HAVE_call_value
1708 if (HAVE_call_value)
1709 {
1710 rtx valreg = 0;
1711
1712 /* Locate the unique return register. It is not possible to
1713 express a call that sets more than one return register using
1714 call_value; use untyped_call for that. In fact, untyped_call
1715 only needs to save the return registers in the given block. */
1716 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1717 if ((mode = apply_result_mode[regno]) != VOIDmode)
1718 {
1719 gcc_assert (!valreg); /* HAVE_untyped_call required. */
1720
1721 valreg = gen_rtx_REG (mode, regno);
1722 }
1723
1724 emit_call_insn (GEN_CALL_VALUE (valreg,
1725 gen_rtx_MEM (FUNCTION_MODE, function),
1726 const0_rtx, NULL_RTX, const0_rtx));
1727
1728 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1729 }
1730 else
1731 #endif
1732 gcc_unreachable ();
1733
1734 /* Find the CALL insn we just emitted, and attach the register usage
1735 information. */
1736 call_insn = last_call_insn ();
1737 add_function_usage_to (call_insn, call_fusage);
1738
1739 /* Restore the stack. */
1740 #ifdef HAVE_save_stack_nonlocal
1741 if (HAVE_save_stack_nonlocal)
1742 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1743 else
1744 #endif
1745 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1746 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1747
1748 OK_DEFER_POP;
1749
1750 /* Return the address of the result block. */
1751 result = copy_addr_to_reg (XEXP (result, 0));
1752 return convert_memory_address (ptr_mode, result);
1753 }
1754
1755 /* Perform an untyped return. */
1756
1757 static void
expand_builtin_return(rtx result)1758 expand_builtin_return (rtx result)
1759 {
1760 int size, align, regno;
1761 enum machine_mode mode;
1762 rtx reg;
1763 rtx call_fusage = 0;
1764
1765 result = convert_memory_address (Pmode, result);
1766
1767 apply_result_size ();
1768 result = gen_rtx_MEM (BLKmode, result);
1769
1770 #ifdef HAVE_untyped_return
1771 if (HAVE_untyped_return)
1772 {
1773 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1774 emit_barrier ();
1775 return;
1776 }
1777 #endif
1778
1779 /* Restore the return value and note that each value is used. */
1780 size = 0;
1781 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1782 if ((mode = apply_result_mode[regno]) != VOIDmode)
1783 {
1784 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1785 if (size % align != 0)
1786 size = CEIL (size, align) * align;
1787 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1788 emit_move_insn (reg, adjust_address (result, mode, size));
1789
1790 push_to_sequence (call_fusage);
1791 emit_use (reg);
1792 call_fusage = get_insns ();
1793 end_sequence ();
1794 size += GET_MODE_SIZE (mode);
1795 }
1796
1797 /* Put the USE insns before the return. */
1798 emit_insn (call_fusage);
1799
1800 /* Return whatever values was restored by jumping directly to the end
1801 of the function. */
1802 expand_naked_return ();
1803 }
1804
1805 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1806
1807 static enum type_class
type_to_class(tree type)1808 type_to_class (tree type)
1809 {
1810 switch (TREE_CODE (type))
1811 {
1812 case VOID_TYPE: return void_type_class;
1813 case INTEGER_TYPE: return integer_type_class;
1814 case ENUMERAL_TYPE: return enumeral_type_class;
1815 case BOOLEAN_TYPE: return boolean_type_class;
1816 case POINTER_TYPE: return pointer_type_class;
1817 case REFERENCE_TYPE: return reference_type_class;
1818 case OFFSET_TYPE: return offset_type_class;
1819 case REAL_TYPE: return real_type_class;
1820 case COMPLEX_TYPE: return complex_type_class;
1821 case FUNCTION_TYPE: return function_type_class;
1822 case METHOD_TYPE: return method_type_class;
1823 case RECORD_TYPE: return record_type_class;
1824 case UNION_TYPE:
1825 case QUAL_UNION_TYPE: return union_type_class;
1826 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1827 ? string_type_class : array_type_class);
1828 case LANG_TYPE: return lang_type_class;
1829 default: return no_type_class;
1830 }
1831 }
1832
1833 /* Expand a call EXP to __builtin_classify_type. */
1834
1835 static rtx
expand_builtin_classify_type(tree exp)1836 expand_builtin_classify_type (tree exp)
1837 {
1838 if (call_expr_nargs (exp))
1839 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1840 return GEN_INT (no_type_class);
1841 }
1842
1843 /* This helper macro, meant to be used in mathfn_built_in below,
1844 determines which among a set of three builtin math functions is
1845 appropriate for a given type mode. The `F' and `L' cases are
1846 automatically generated from the `double' case. */
1847 #define CASE_MATHFN(BUILT_IN_MATHFN) \
1848 case BUILT_IN_MATHFN: case BUILT_IN_MATHFN##F: case BUILT_IN_MATHFN##L: \
1849 fcode = BUILT_IN_MATHFN; fcodef = BUILT_IN_MATHFN##F ; \
1850 fcodel = BUILT_IN_MATHFN##L ; break;
1851 /* Similar to above, but appends _R after any F/L suffix. */
1852 #define CASE_MATHFN_REENT(BUILT_IN_MATHFN) \
1853 case BUILT_IN_MATHFN##_R: case BUILT_IN_MATHFN##F_R: case BUILT_IN_MATHFN##L_R: \
1854 fcode = BUILT_IN_MATHFN##_R; fcodef = BUILT_IN_MATHFN##F_R ; \
1855 fcodel = BUILT_IN_MATHFN##L_R ; break;
1856
1857 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1858 if available. If IMPLICIT is true use the implicit builtin declaration,
1859 otherwise use the explicit declaration. If we can't do the conversion,
1860 return zero. */
1861
1862 static tree
mathfn_built_in_1(tree type,enum built_in_function fn,bool implicit_p)1863 mathfn_built_in_1 (tree type, enum built_in_function fn, bool implicit_p)
1864 {
1865 enum built_in_function fcode, fcodef, fcodel, fcode2;
1866
1867 switch (fn)
1868 {
1869 CASE_MATHFN (BUILT_IN_ACOS)
1870 CASE_MATHFN (BUILT_IN_ACOSH)
1871 CASE_MATHFN (BUILT_IN_ASIN)
1872 CASE_MATHFN (BUILT_IN_ASINH)
1873 CASE_MATHFN (BUILT_IN_ATAN)
1874 CASE_MATHFN (BUILT_IN_ATAN2)
1875 CASE_MATHFN (BUILT_IN_ATANH)
1876 CASE_MATHFN (BUILT_IN_CBRT)
1877 CASE_MATHFN (BUILT_IN_CEIL)
1878 CASE_MATHFN (BUILT_IN_CEXPI)
1879 CASE_MATHFN (BUILT_IN_COPYSIGN)
1880 CASE_MATHFN (BUILT_IN_COS)
1881 CASE_MATHFN (BUILT_IN_COSH)
1882 CASE_MATHFN (BUILT_IN_DREM)
1883 CASE_MATHFN (BUILT_IN_ERF)
1884 CASE_MATHFN (BUILT_IN_ERFC)
1885 CASE_MATHFN (BUILT_IN_EXP)
1886 CASE_MATHFN (BUILT_IN_EXP10)
1887 CASE_MATHFN (BUILT_IN_EXP2)
1888 CASE_MATHFN (BUILT_IN_EXPM1)
1889 CASE_MATHFN (BUILT_IN_FABS)
1890 CASE_MATHFN (BUILT_IN_FDIM)
1891 CASE_MATHFN (BUILT_IN_FLOOR)
1892 CASE_MATHFN (BUILT_IN_FMA)
1893 CASE_MATHFN (BUILT_IN_FMAX)
1894 CASE_MATHFN (BUILT_IN_FMIN)
1895 CASE_MATHFN (BUILT_IN_FMOD)
1896 CASE_MATHFN (BUILT_IN_FREXP)
1897 CASE_MATHFN (BUILT_IN_GAMMA)
1898 CASE_MATHFN_REENT (BUILT_IN_GAMMA) /* GAMMA_R */
1899 CASE_MATHFN (BUILT_IN_HUGE_VAL)
1900 CASE_MATHFN (BUILT_IN_HYPOT)
1901 CASE_MATHFN (BUILT_IN_ILOGB)
1902 CASE_MATHFN (BUILT_IN_ICEIL)
1903 CASE_MATHFN (BUILT_IN_IFLOOR)
1904 CASE_MATHFN (BUILT_IN_INF)
1905 CASE_MATHFN (BUILT_IN_IRINT)
1906 CASE_MATHFN (BUILT_IN_IROUND)
1907 CASE_MATHFN (BUILT_IN_ISINF)
1908 CASE_MATHFN (BUILT_IN_J0)
1909 CASE_MATHFN (BUILT_IN_J1)
1910 CASE_MATHFN (BUILT_IN_JN)
1911 CASE_MATHFN (BUILT_IN_LCEIL)
1912 CASE_MATHFN (BUILT_IN_LDEXP)
1913 CASE_MATHFN (BUILT_IN_LFLOOR)
1914 CASE_MATHFN (BUILT_IN_LGAMMA)
1915 CASE_MATHFN_REENT (BUILT_IN_LGAMMA) /* LGAMMA_R */
1916 CASE_MATHFN (BUILT_IN_LLCEIL)
1917 CASE_MATHFN (BUILT_IN_LLFLOOR)
1918 CASE_MATHFN (BUILT_IN_LLRINT)
1919 CASE_MATHFN (BUILT_IN_LLROUND)
1920 CASE_MATHFN (BUILT_IN_LOG)
1921 CASE_MATHFN (BUILT_IN_LOG10)
1922 CASE_MATHFN (BUILT_IN_LOG1P)
1923 CASE_MATHFN (BUILT_IN_LOG2)
1924 CASE_MATHFN (BUILT_IN_LOGB)
1925 CASE_MATHFN (BUILT_IN_LRINT)
1926 CASE_MATHFN (BUILT_IN_LROUND)
1927 CASE_MATHFN (BUILT_IN_MODF)
1928 CASE_MATHFN (BUILT_IN_NAN)
1929 CASE_MATHFN (BUILT_IN_NANS)
1930 CASE_MATHFN (BUILT_IN_NEARBYINT)
1931 CASE_MATHFN (BUILT_IN_NEXTAFTER)
1932 CASE_MATHFN (BUILT_IN_NEXTTOWARD)
1933 CASE_MATHFN (BUILT_IN_POW)
1934 CASE_MATHFN (BUILT_IN_POWI)
1935 CASE_MATHFN (BUILT_IN_POW10)
1936 CASE_MATHFN (BUILT_IN_REMAINDER)
1937 CASE_MATHFN (BUILT_IN_REMQUO)
1938 CASE_MATHFN (BUILT_IN_RINT)
1939 CASE_MATHFN (BUILT_IN_ROUND)
1940 CASE_MATHFN (BUILT_IN_SCALB)
1941 CASE_MATHFN (BUILT_IN_SCALBLN)
1942 CASE_MATHFN (BUILT_IN_SCALBN)
1943 CASE_MATHFN (BUILT_IN_SIGNBIT)
1944 CASE_MATHFN (BUILT_IN_SIGNIFICAND)
1945 CASE_MATHFN (BUILT_IN_SIN)
1946 CASE_MATHFN (BUILT_IN_SINCOS)
1947 CASE_MATHFN (BUILT_IN_SINH)
1948 CASE_MATHFN (BUILT_IN_SQRT)
1949 CASE_MATHFN (BUILT_IN_TAN)
1950 CASE_MATHFN (BUILT_IN_TANH)
1951 CASE_MATHFN (BUILT_IN_TGAMMA)
1952 CASE_MATHFN (BUILT_IN_TRUNC)
1953 CASE_MATHFN (BUILT_IN_Y0)
1954 CASE_MATHFN (BUILT_IN_Y1)
1955 CASE_MATHFN (BUILT_IN_YN)
1956
1957 default:
1958 return NULL_TREE;
1959 }
1960
1961 if (TYPE_MAIN_VARIANT (type) == double_type_node)
1962 fcode2 = fcode;
1963 else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1964 fcode2 = fcodef;
1965 else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1966 fcode2 = fcodel;
1967 else
1968 return NULL_TREE;
1969
1970 if (implicit_p && !builtin_decl_implicit_p (fcode2))
1971 return NULL_TREE;
1972
1973 return builtin_decl_explicit (fcode2);
1974 }
1975
1976 /* Like mathfn_built_in_1(), but always use the implicit array. */
1977
1978 tree
mathfn_built_in(tree type,enum built_in_function fn)1979 mathfn_built_in (tree type, enum built_in_function fn)
1980 {
1981 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1982 }
1983
1984 /* If errno must be maintained, expand the RTL to check if the result,
1985 TARGET, of a built-in function call, EXP, is NaN, and if so set
1986 errno to EDOM. */
1987
1988 static void
expand_errno_check(tree exp,rtx target)1989 expand_errno_check (tree exp, rtx target)
1990 {
1991 rtx lab = gen_label_rtx ();
1992
1993 /* Test the result; if it is NaN, set errno=EDOM because
1994 the argument was not in the domain. */
1995 do_compare_rtx_and_jump (target, target, EQ, 0, GET_MODE (target),
1996 NULL_RTX, NULL_RTX, lab,
1997 /* The jump is very likely. */
1998 REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1));
1999
2000 #ifdef TARGET_EDOM
2001 /* If this built-in doesn't throw an exception, set errno directly. */
2002 if (TREE_NOTHROW (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
2003 {
2004 #ifdef GEN_ERRNO_RTX
2005 rtx errno_rtx = GEN_ERRNO_RTX;
2006 #else
2007 rtx errno_rtx
2008 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
2009 #endif
2010 emit_move_insn (errno_rtx,
2011 gen_int_mode (TARGET_EDOM, GET_MODE (errno_rtx)));
2012 emit_label (lab);
2013 return;
2014 }
2015 #endif
2016
2017 /* Make sure the library call isn't expanded as a tail call. */
2018 CALL_EXPR_TAILCALL (exp) = 0;
2019
2020 /* We can't set errno=EDOM directly; let the library call do it.
2021 Pop the arguments right away in case the call gets deleted. */
2022 NO_DEFER_POP;
2023 expand_call (exp, target, 0);
2024 OK_DEFER_POP;
2025 emit_label (lab);
2026 }
2027
2028 /* Expand a call to one of the builtin math functions (sqrt, exp, or log).
2029 Return NULL_RTX if a normal call should be emitted rather than expanding
2030 the function in-line. EXP is the expression that is a call to the builtin
2031 function; if convenient, the result should be placed in TARGET.
2032 SUBTARGET may be used as the target for computing one of EXP's operands. */
2033
2034 static rtx
expand_builtin_mathfn(tree exp,rtx target,rtx subtarget)2035 expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
2036 {
2037 optab builtin_optab;
2038 rtx op0, insns;
2039 tree fndecl = get_callee_fndecl (exp);
2040 enum machine_mode mode;
2041 bool errno_set = false;
2042 bool try_widening = false;
2043 tree arg;
2044
2045 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2046 return NULL_RTX;
2047
2048 arg = CALL_EXPR_ARG (exp, 0);
2049
2050 switch (DECL_FUNCTION_CODE (fndecl))
2051 {
2052 CASE_FLT_FN (BUILT_IN_SQRT):
2053 errno_set = ! tree_expr_nonnegative_p (arg);
2054 try_widening = true;
2055 builtin_optab = sqrt_optab;
2056 break;
2057 CASE_FLT_FN (BUILT_IN_EXP):
2058 errno_set = true; builtin_optab = exp_optab; break;
2059 CASE_FLT_FN (BUILT_IN_EXP10):
2060 CASE_FLT_FN (BUILT_IN_POW10):
2061 errno_set = true; builtin_optab = exp10_optab; break;
2062 CASE_FLT_FN (BUILT_IN_EXP2):
2063 errno_set = true; builtin_optab = exp2_optab; break;
2064 CASE_FLT_FN (BUILT_IN_EXPM1):
2065 errno_set = true; builtin_optab = expm1_optab; break;
2066 CASE_FLT_FN (BUILT_IN_LOGB):
2067 errno_set = true; builtin_optab = logb_optab; break;
2068 CASE_FLT_FN (BUILT_IN_LOG):
2069 errno_set = true; builtin_optab = log_optab; break;
2070 CASE_FLT_FN (BUILT_IN_LOG10):
2071 errno_set = true; builtin_optab = log10_optab; break;
2072 CASE_FLT_FN (BUILT_IN_LOG2):
2073 errno_set = true; builtin_optab = log2_optab; break;
2074 CASE_FLT_FN (BUILT_IN_LOG1P):
2075 errno_set = true; builtin_optab = log1p_optab; break;
2076 CASE_FLT_FN (BUILT_IN_ASIN):
2077 builtin_optab = asin_optab; break;
2078 CASE_FLT_FN (BUILT_IN_ACOS):
2079 builtin_optab = acos_optab; break;
2080 CASE_FLT_FN (BUILT_IN_TAN):
2081 builtin_optab = tan_optab; break;
2082 CASE_FLT_FN (BUILT_IN_ATAN):
2083 builtin_optab = atan_optab; break;
2084 CASE_FLT_FN (BUILT_IN_FLOOR):
2085 builtin_optab = floor_optab; break;
2086 CASE_FLT_FN (BUILT_IN_CEIL):
2087 builtin_optab = ceil_optab; break;
2088 CASE_FLT_FN (BUILT_IN_TRUNC):
2089 builtin_optab = btrunc_optab; break;
2090 CASE_FLT_FN (BUILT_IN_ROUND):
2091 builtin_optab = round_optab; break;
2092 CASE_FLT_FN (BUILT_IN_NEARBYINT):
2093 builtin_optab = nearbyint_optab;
2094 if (flag_trapping_math)
2095 break;
2096 /* Else fallthrough and expand as rint. */
2097 CASE_FLT_FN (BUILT_IN_RINT):
2098 builtin_optab = rint_optab; break;
2099 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
2100 builtin_optab = significand_optab; break;
2101 default:
2102 gcc_unreachable ();
2103 }
2104
2105 /* Make a suitable register to place result in. */
2106 mode = TYPE_MODE (TREE_TYPE (exp));
2107
2108 if (! flag_errno_math || ! HONOR_NANS (mode))
2109 errno_set = false;
2110
2111 /* Before working hard, check whether the instruction is available, but try
2112 to widen the mode for specific operations. */
2113 if ((optab_handler (builtin_optab, mode) != CODE_FOR_nothing
2114 || (try_widening && !excess_precision_type (TREE_TYPE (exp))))
2115 && (!errno_set || !optimize_insn_for_size_p ()))
2116 {
2117 rtx result = gen_reg_rtx (mode);
2118
2119 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2120 need to expand the argument again. This way, we will not perform
2121 side-effects more the once. */
2122 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2123
2124 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2125
2126 start_sequence ();
2127
2128 /* Compute into RESULT.
2129 Set RESULT to wherever the result comes back. */
2130 result = expand_unop (mode, builtin_optab, op0, result, 0);
2131
2132 if (result != 0)
2133 {
2134 if (errno_set)
2135 expand_errno_check (exp, result);
2136
2137 /* Output the entire sequence. */
2138 insns = get_insns ();
2139 end_sequence ();
2140 emit_insn (insns);
2141 return result;
2142 }
2143
2144 /* If we were unable to expand via the builtin, stop the sequence
2145 (without outputting the insns) and call to the library function
2146 with the stabilized argument list. */
2147 end_sequence ();
2148 }
2149
2150 return expand_call (exp, target, target == const0_rtx);
2151 }
2152
2153 /* Expand a call to the builtin binary math functions (pow and atan2).
2154 Return NULL_RTX if a normal call should be emitted rather than expanding the
2155 function in-line. EXP is the expression that is a call to the builtin
2156 function; if convenient, the result should be placed in TARGET.
2157 SUBTARGET may be used as the target for computing one of EXP's
2158 operands. */
2159
2160 static rtx
expand_builtin_mathfn_2(tree exp,rtx target,rtx subtarget)2161 expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
2162 {
2163 optab builtin_optab;
2164 rtx op0, op1, insns, result;
2165 int op1_type = REAL_TYPE;
2166 tree fndecl = get_callee_fndecl (exp);
2167 tree arg0, arg1;
2168 enum machine_mode mode;
2169 bool errno_set = true;
2170
2171 switch (DECL_FUNCTION_CODE (fndecl))
2172 {
2173 CASE_FLT_FN (BUILT_IN_SCALBN):
2174 CASE_FLT_FN (BUILT_IN_SCALBLN):
2175 CASE_FLT_FN (BUILT_IN_LDEXP):
2176 op1_type = INTEGER_TYPE;
2177 default:
2178 break;
2179 }
2180
2181 if (!validate_arglist (exp, REAL_TYPE, op1_type, VOID_TYPE))
2182 return NULL_RTX;
2183
2184 arg0 = CALL_EXPR_ARG (exp, 0);
2185 arg1 = CALL_EXPR_ARG (exp, 1);
2186
2187 switch (DECL_FUNCTION_CODE (fndecl))
2188 {
2189 CASE_FLT_FN (BUILT_IN_POW):
2190 builtin_optab = pow_optab; break;
2191 CASE_FLT_FN (BUILT_IN_ATAN2):
2192 builtin_optab = atan2_optab; break;
2193 CASE_FLT_FN (BUILT_IN_SCALB):
2194 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2195 return 0;
2196 builtin_optab = scalb_optab; break;
2197 CASE_FLT_FN (BUILT_IN_SCALBN):
2198 CASE_FLT_FN (BUILT_IN_SCALBLN):
2199 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (exp)))->b != 2)
2200 return 0;
2201 /* Fall through... */
2202 CASE_FLT_FN (BUILT_IN_LDEXP):
2203 builtin_optab = ldexp_optab; break;
2204 CASE_FLT_FN (BUILT_IN_FMOD):
2205 builtin_optab = fmod_optab; break;
2206 CASE_FLT_FN (BUILT_IN_REMAINDER):
2207 CASE_FLT_FN (BUILT_IN_DREM):
2208 builtin_optab = remainder_optab; break;
2209 default:
2210 gcc_unreachable ();
2211 }
2212
2213 /* Make a suitable register to place result in. */
2214 mode = TYPE_MODE (TREE_TYPE (exp));
2215
2216 /* Before working hard, check whether the instruction is available. */
2217 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2218 return NULL_RTX;
2219
2220 result = gen_reg_rtx (mode);
2221
2222 if (! flag_errno_math || ! HONOR_NANS (mode))
2223 errno_set = false;
2224
2225 if (errno_set && optimize_insn_for_size_p ())
2226 return 0;
2227
2228 /* Always stabilize the argument list. */
2229 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2230 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2231
2232 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2233 op1 = expand_normal (arg1);
2234
2235 start_sequence ();
2236
2237 /* Compute into RESULT.
2238 Set RESULT to wherever the result comes back. */
2239 result = expand_binop (mode, builtin_optab, op0, op1,
2240 result, 0, OPTAB_DIRECT);
2241
2242 /* If we were unable to expand via the builtin, stop the sequence
2243 (without outputting the insns) and call to the library function
2244 with the stabilized argument list. */
2245 if (result == 0)
2246 {
2247 end_sequence ();
2248 return expand_call (exp, target, target == const0_rtx);
2249 }
2250
2251 if (errno_set)
2252 expand_errno_check (exp, result);
2253
2254 /* Output the entire sequence. */
2255 insns = get_insns ();
2256 end_sequence ();
2257 emit_insn (insns);
2258
2259 return result;
2260 }
2261
2262 /* Expand a call to the builtin trinary math functions (fma).
2263 Return NULL_RTX if a normal call should be emitted rather than expanding the
2264 function in-line. EXP is the expression that is a call to the builtin
2265 function; if convenient, the result should be placed in TARGET.
2266 SUBTARGET may be used as the target for computing one of EXP's
2267 operands. */
2268
2269 static rtx
expand_builtin_mathfn_ternary(tree exp,rtx target,rtx subtarget)2270 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2271 {
2272 optab builtin_optab;
2273 rtx op0, op1, op2, insns, result;
2274 tree fndecl = get_callee_fndecl (exp);
2275 tree arg0, arg1, arg2;
2276 enum machine_mode mode;
2277
2278 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2279 return NULL_RTX;
2280
2281 arg0 = CALL_EXPR_ARG (exp, 0);
2282 arg1 = CALL_EXPR_ARG (exp, 1);
2283 arg2 = CALL_EXPR_ARG (exp, 2);
2284
2285 switch (DECL_FUNCTION_CODE (fndecl))
2286 {
2287 CASE_FLT_FN (BUILT_IN_FMA):
2288 builtin_optab = fma_optab; break;
2289 default:
2290 gcc_unreachable ();
2291 }
2292
2293 /* Make a suitable register to place result in. */
2294 mode = TYPE_MODE (TREE_TYPE (exp));
2295
2296 /* Before working hard, check whether the instruction is available. */
2297 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2298 return NULL_RTX;
2299
2300 result = gen_reg_rtx (mode);
2301
2302 /* Always stabilize the argument list. */
2303 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2304 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2305 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2306
2307 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2308 op1 = expand_normal (arg1);
2309 op2 = expand_normal (arg2);
2310
2311 start_sequence ();
2312
2313 /* Compute into RESULT.
2314 Set RESULT to wherever the result comes back. */
2315 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2316 result, 0);
2317
2318 /* If we were unable to expand via the builtin, stop the sequence
2319 (without outputting the insns) and call to the library function
2320 with the stabilized argument list. */
2321 if (result == 0)
2322 {
2323 end_sequence ();
2324 return expand_call (exp, target, target == const0_rtx);
2325 }
2326
2327 /* Output the entire sequence. */
2328 insns = get_insns ();
2329 end_sequence ();
2330 emit_insn (insns);
2331
2332 return result;
2333 }
2334
2335 /* Expand a call to the builtin sin and cos math functions.
2336 Return NULL_RTX if a normal call should be emitted rather than expanding the
2337 function in-line. EXP is the expression that is a call to the builtin
2338 function; if convenient, the result should be placed in TARGET.
2339 SUBTARGET may be used as the target for computing one of EXP's
2340 operands. */
2341
2342 static rtx
expand_builtin_mathfn_3(tree exp,rtx target,rtx subtarget)2343 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2344 {
2345 optab builtin_optab;
2346 rtx op0, insns;
2347 tree fndecl = get_callee_fndecl (exp);
2348 enum machine_mode mode;
2349 tree arg;
2350
2351 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2352 return NULL_RTX;
2353
2354 arg = CALL_EXPR_ARG (exp, 0);
2355
2356 switch (DECL_FUNCTION_CODE (fndecl))
2357 {
2358 CASE_FLT_FN (BUILT_IN_SIN):
2359 CASE_FLT_FN (BUILT_IN_COS):
2360 builtin_optab = sincos_optab; break;
2361 default:
2362 gcc_unreachable ();
2363 }
2364
2365 /* Make a suitable register to place result in. */
2366 mode = TYPE_MODE (TREE_TYPE (exp));
2367
2368 /* Check if sincos insn is available, otherwise fallback
2369 to sin or cos insn. */
2370 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2371 switch (DECL_FUNCTION_CODE (fndecl))
2372 {
2373 CASE_FLT_FN (BUILT_IN_SIN):
2374 builtin_optab = sin_optab; break;
2375 CASE_FLT_FN (BUILT_IN_COS):
2376 builtin_optab = cos_optab; break;
2377 default:
2378 gcc_unreachable ();
2379 }
2380
2381 /* Before working hard, check whether the instruction is available. */
2382 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2383 {
2384 rtx result = gen_reg_rtx (mode);
2385
2386 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2387 need to expand the argument again. This way, we will not perform
2388 side-effects more the once. */
2389 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2390
2391 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2392
2393 start_sequence ();
2394
2395 /* Compute into RESULT.
2396 Set RESULT to wherever the result comes back. */
2397 if (builtin_optab == sincos_optab)
2398 {
2399 int ok;
2400
2401 switch (DECL_FUNCTION_CODE (fndecl))
2402 {
2403 CASE_FLT_FN (BUILT_IN_SIN):
2404 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2405 break;
2406 CASE_FLT_FN (BUILT_IN_COS):
2407 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2408 break;
2409 default:
2410 gcc_unreachable ();
2411 }
2412 gcc_assert (ok);
2413 }
2414 else
2415 result = expand_unop (mode, builtin_optab, op0, result, 0);
2416
2417 if (result != 0)
2418 {
2419 /* Output the entire sequence. */
2420 insns = get_insns ();
2421 end_sequence ();
2422 emit_insn (insns);
2423 return result;
2424 }
2425
2426 /* If we were unable to expand via the builtin, stop the sequence
2427 (without outputting the insns) and call to the library function
2428 with the stabilized argument list. */
2429 end_sequence ();
2430 }
2431
2432 return expand_call (exp, target, target == const0_rtx);
2433 }
2434
2435 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2436 return an RTL instruction code that implements the functionality.
2437 If that isn't possible or available return CODE_FOR_nothing. */
2438
2439 static enum insn_code
interclass_mathfn_icode(tree arg,tree fndecl)2440 interclass_mathfn_icode (tree arg, tree fndecl)
2441 {
2442 bool errno_set = false;
2443 optab builtin_optab = unknown_optab;
2444 enum machine_mode mode;
2445
2446 switch (DECL_FUNCTION_CODE (fndecl))
2447 {
2448 CASE_FLT_FN (BUILT_IN_ILOGB):
2449 errno_set = true; builtin_optab = ilogb_optab; break;
2450 CASE_FLT_FN (BUILT_IN_ISINF):
2451 builtin_optab = isinf_optab; break;
2452 case BUILT_IN_ISNORMAL:
2453 case BUILT_IN_ISFINITE:
2454 CASE_FLT_FN (BUILT_IN_FINITE):
2455 case BUILT_IN_FINITED32:
2456 case BUILT_IN_FINITED64:
2457 case BUILT_IN_FINITED128:
2458 case BUILT_IN_ISINFD32:
2459 case BUILT_IN_ISINFD64:
2460 case BUILT_IN_ISINFD128:
2461 /* These builtins have no optabs (yet). */
2462 break;
2463 default:
2464 gcc_unreachable ();
2465 }
2466
2467 /* There's no easy way to detect the case we need to set EDOM. */
2468 if (flag_errno_math && errno_set)
2469 return CODE_FOR_nothing;
2470
2471 /* Optab mode depends on the mode of the input argument. */
2472 mode = TYPE_MODE (TREE_TYPE (arg));
2473
2474 if (builtin_optab)
2475 return optab_handler (builtin_optab, mode);
2476 return CODE_FOR_nothing;
2477 }
2478
2479 /* Expand a call to one of the builtin math functions that operate on
2480 floating point argument and output an integer result (ilogb, isinf,
2481 isnan, etc).
2482 Return 0 if a normal call should be emitted rather than expanding the
2483 function in-line. EXP is the expression that is a call to the builtin
2484 function; if convenient, the result should be placed in TARGET. */
2485
2486 static rtx
expand_builtin_interclass_mathfn(tree exp,rtx target)2487 expand_builtin_interclass_mathfn (tree exp, rtx target)
2488 {
2489 enum insn_code icode = CODE_FOR_nothing;
2490 rtx op0;
2491 tree fndecl = get_callee_fndecl (exp);
2492 enum machine_mode mode;
2493 tree arg;
2494
2495 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2496 return NULL_RTX;
2497
2498 arg = CALL_EXPR_ARG (exp, 0);
2499 icode = interclass_mathfn_icode (arg, fndecl);
2500 mode = TYPE_MODE (TREE_TYPE (arg));
2501
2502 if (icode != CODE_FOR_nothing)
2503 {
2504 struct expand_operand ops[1];
2505 rtx last = get_last_insn ();
2506 tree orig_arg = arg;
2507
2508 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2509 need to expand the argument again. This way, we will not perform
2510 side-effects more the once. */
2511 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2512
2513 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2514
2515 if (mode != GET_MODE (op0))
2516 op0 = convert_to_mode (mode, op0, 0);
2517
2518 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2519 if (maybe_legitimize_operands (icode, 0, 1, ops)
2520 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2521 return ops[0].value;
2522
2523 delete_insns_since (last);
2524 CALL_EXPR_ARG (exp, 0) = orig_arg;
2525 }
2526
2527 return NULL_RTX;
2528 }
2529
2530 /* Expand a call to the builtin sincos math function.
2531 Return NULL_RTX if a normal call should be emitted rather than expanding the
2532 function in-line. EXP is the expression that is a call to the builtin
2533 function. */
2534
2535 static rtx
expand_builtin_sincos(tree exp)2536 expand_builtin_sincos (tree exp)
2537 {
2538 rtx op0, op1, op2, target1, target2;
2539 enum machine_mode mode;
2540 tree arg, sinp, cosp;
2541 int result;
2542 location_t loc = EXPR_LOCATION (exp);
2543 tree alias_type, alias_off;
2544
2545 if (!validate_arglist (exp, REAL_TYPE,
2546 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2547 return NULL_RTX;
2548
2549 arg = CALL_EXPR_ARG (exp, 0);
2550 sinp = CALL_EXPR_ARG (exp, 1);
2551 cosp = CALL_EXPR_ARG (exp, 2);
2552
2553 /* Make a suitable register to place result in. */
2554 mode = TYPE_MODE (TREE_TYPE (arg));
2555
2556 /* Check if sincos insn is available, otherwise emit the call. */
2557 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2558 return NULL_RTX;
2559
2560 target1 = gen_reg_rtx (mode);
2561 target2 = gen_reg_rtx (mode);
2562
2563 op0 = expand_normal (arg);
2564 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2565 alias_off = build_int_cst (alias_type, 0);
2566 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2567 sinp, alias_off));
2568 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2569 cosp, alias_off));
2570
2571 /* Compute into target1 and target2.
2572 Set TARGET to wherever the result comes back. */
2573 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2574 gcc_assert (result);
2575
2576 /* Move target1 and target2 to the memory locations indicated
2577 by op1 and op2. */
2578 emit_move_insn (op1, target1);
2579 emit_move_insn (op2, target2);
2580
2581 return const0_rtx;
2582 }
2583
2584 /* Expand a call to the internal cexpi builtin to the sincos math function.
2585 EXP is the expression that is a call to the builtin function; if convenient,
2586 the result should be placed in TARGET. */
2587
2588 static rtx
expand_builtin_cexpi(tree exp,rtx target)2589 expand_builtin_cexpi (tree exp, rtx target)
2590 {
2591 tree fndecl = get_callee_fndecl (exp);
2592 tree arg, type;
2593 enum machine_mode mode;
2594 rtx op0, op1, op2;
2595 location_t loc = EXPR_LOCATION (exp);
2596
2597 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2598 return NULL_RTX;
2599
2600 arg = CALL_EXPR_ARG (exp, 0);
2601 type = TREE_TYPE (arg);
2602 mode = TYPE_MODE (TREE_TYPE (arg));
2603
2604 /* Try expanding via a sincos optab, fall back to emitting a libcall
2605 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2606 is only generated from sincos, cexp or if we have either of them. */
2607 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2608 {
2609 op1 = gen_reg_rtx (mode);
2610 op2 = gen_reg_rtx (mode);
2611
2612 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2613
2614 /* Compute into op1 and op2. */
2615 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2616 }
2617 else if (targetm.libc_has_function (function_sincos))
2618 {
2619 tree call, fn = NULL_TREE;
2620 tree top1, top2;
2621 rtx op1a, op2a;
2622
2623 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2624 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2625 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2626 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2627 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2628 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2629 else
2630 gcc_unreachable ();
2631
2632 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2633 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2634 op1a = copy_addr_to_reg (XEXP (op1, 0));
2635 op2a = copy_addr_to_reg (XEXP (op2, 0));
2636 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2637 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2638
2639 /* Make sure not to fold the sincos call again. */
2640 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2641 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2642 call, 3, arg, top1, top2));
2643 }
2644 else
2645 {
2646 tree call, fn = NULL_TREE, narg;
2647 tree ctype = build_complex_type (type);
2648
2649 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2650 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2651 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2652 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2653 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2654 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2655 else
2656 gcc_unreachable ();
2657
2658 /* If we don't have a decl for cexp create one. This is the
2659 friendliest fallback if the user calls __builtin_cexpi
2660 without full target C99 function support. */
2661 if (fn == NULL_TREE)
2662 {
2663 tree fntype;
2664 const char *name = NULL;
2665
2666 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2667 name = "cexpf";
2668 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2669 name = "cexp";
2670 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2671 name = "cexpl";
2672
2673 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2674 fn = build_fn_decl (name, fntype);
2675 }
2676
2677 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2678 build_real (type, dconst0), arg);
2679
2680 /* Make sure not to fold the cexp call again. */
2681 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2682 return expand_expr (build_call_nary (ctype, call, 1, narg),
2683 target, VOIDmode, EXPAND_NORMAL);
2684 }
2685
2686 /* Now build the proper return type. */
2687 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2688 make_tree (TREE_TYPE (arg), op2),
2689 make_tree (TREE_TYPE (arg), op1)),
2690 target, VOIDmode, EXPAND_NORMAL);
2691 }
2692
2693 /* Conveniently construct a function call expression. FNDECL names the
2694 function to be called, N is the number of arguments, and the "..."
2695 parameters are the argument expressions. Unlike build_call_exr
2696 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2697
2698 static tree
build_call_nofold_loc(location_t loc,tree fndecl,int n,...)2699 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2700 {
2701 va_list ap;
2702 tree fntype = TREE_TYPE (fndecl);
2703 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2704
2705 va_start (ap, n);
2706 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2707 va_end (ap);
2708 SET_EXPR_LOCATION (fn, loc);
2709 return fn;
2710 }
2711
2712 /* Expand a call to one of the builtin rounding functions gcc defines
2713 as an extension (lfloor and lceil). As these are gcc extensions we
2714 do not need to worry about setting errno to EDOM.
2715 If expanding via optab fails, lower expression to (int)(floor(x)).
2716 EXP is the expression that is a call to the builtin function;
2717 if convenient, the result should be placed in TARGET. */
2718
2719 static rtx
expand_builtin_int_roundingfn(tree exp,rtx target)2720 expand_builtin_int_roundingfn (tree exp, rtx target)
2721 {
2722 convert_optab builtin_optab;
2723 rtx op0, insns, tmp;
2724 tree fndecl = get_callee_fndecl (exp);
2725 enum built_in_function fallback_fn;
2726 tree fallback_fndecl;
2727 enum machine_mode mode;
2728 tree arg;
2729
2730 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2731 gcc_unreachable ();
2732
2733 arg = CALL_EXPR_ARG (exp, 0);
2734
2735 switch (DECL_FUNCTION_CODE (fndecl))
2736 {
2737 CASE_FLT_FN (BUILT_IN_ICEIL):
2738 CASE_FLT_FN (BUILT_IN_LCEIL):
2739 CASE_FLT_FN (BUILT_IN_LLCEIL):
2740 builtin_optab = lceil_optab;
2741 fallback_fn = BUILT_IN_CEIL;
2742 break;
2743
2744 CASE_FLT_FN (BUILT_IN_IFLOOR):
2745 CASE_FLT_FN (BUILT_IN_LFLOOR):
2746 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2747 builtin_optab = lfloor_optab;
2748 fallback_fn = BUILT_IN_FLOOR;
2749 break;
2750
2751 default:
2752 gcc_unreachable ();
2753 }
2754
2755 /* Make a suitable register to place result in. */
2756 mode = TYPE_MODE (TREE_TYPE (exp));
2757
2758 target = gen_reg_rtx (mode);
2759
2760 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2761 need to expand the argument again. This way, we will not perform
2762 side-effects more the once. */
2763 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2764
2765 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2766
2767 start_sequence ();
2768
2769 /* Compute into TARGET. */
2770 if (expand_sfix_optab (target, op0, builtin_optab))
2771 {
2772 /* Output the entire sequence. */
2773 insns = get_insns ();
2774 end_sequence ();
2775 emit_insn (insns);
2776 return target;
2777 }
2778
2779 /* If we were unable to expand via the builtin, stop the sequence
2780 (without outputting the insns). */
2781 end_sequence ();
2782
2783 /* Fall back to floating point rounding optab. */
2784 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2785
2786 /* For non-C99 targets we may end up without a fallback fndecl here
2787 if the user called __builtin_lfloor directly. In this case emit
2788 a call to the floor/ceil variants nevertheless. This should result
2789 in the best user experience for not full C99 targets. */
2790 if (fallback_fndecl == NULL_TREE)
2791 {
2792 tree fntype;
2793 const char *name = NULL;
2794
2795 switch (DECL_FUNCTION_CODE (fndecl))
2796 {
2797 case BUILT_IN_ICEIL:
2798 case BUILT_IN_LCEIL:
2799 case BUILT_IN_LLCEIL:
2800 name = "ceil";
2801 break;
2802 case BUILT_IN_ICEILF:
2803 case BUILT_IN_LCEILF:
2804 case BUILT_IN_LLCEILF:
2805 name = "ceilf";
2806 break;
2807 case BUILT_IN_ICEILL:
2808 case BUILT_IN_LCEILL:
2809 case BUILT_IN_LLCEILL:
2810 name = "ceill";
2811 break;
2812 case BUILT_IN_IFLOOR:
2813 case BUILT_IN_LFLOOR:
2814 case BUILT_IN_LLFLOOR:
2815 name = "floor";
2816 break;
2817 case BUILT_IN_IFLOORF:
2818 case BUILT_IN_LFLOORF:
2819 case BUILT_IN_LLFLOORF:
2820 name = "floorf";
2821 break;
2822 case BUILT_IN_IFLOORL:
2823 case BUILT_IN_LFLOORL:
2824 case BUILT_IN_LLFLOORL:
2825 name = "floorl";
2826 break;
2827 default:
2828 gcc_unreachable ();
2829 }
2830
2831 fntype = build_function_type_list (TREE_TYPE (arg),
2832 TREE_TYPE (arg), NULL_TREE);
2833 fallback_fndecl = build_fn_decl (name, fntype);
2834 }
2835
2836 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2837
2838 tmp = expand_normal (exp);
2839 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2840
2841 /* Truncate the result of floating point optab to integer
2842 via expand_fix (). */
2843 target = gen_reg_rtx (mode);
2844 expand_fix (target, tmp, 0);
2845
2846 return target;
2847 }
2848
2849 /* Expand a call to one of the builtin math functions doing integer
2850 conversion (lrint).
2851 Return 0 if a normal call should be emitted rather than expanding the
2852 function in-line. EXP is the expression that is a call to the builtin
2853 function; if convenient, the result should be placed in TARGET. */
2854
2855 static rtx
expand_builtin_int_roundingfn_2(tree exp,rtx target)2856 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2857 {
2858 convert_optab builtin_optab;
2859 rtx op0, insns;
2860 tree fndecl = get_callee_fndecl (exp);
2861 tree arg;
2862 enum machine_mode mode;
2863 enum built_in_function fallback_fn = BUILT_IN_NONE;
2864
2865 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2866 gcc_unreachable ();
2867
2868 arg = CALL_EXPR_ARG (exp, 0);
2869
2870 switch (DECL_FUNCTION_CODE (fndecl))
2871 {
2872 CASE_FLT_FN (BUILT_IN_IRINT):
2873 fallback_fn = BUILT_IN_LRINT;
2874 /* FALLTHRU */
2875 CASE_FLT_FN (BUILT_IN_LRINT):
2876 CASE_FLT_FN (BUILT_IN_LLRINT):
2877 builtin_optab = lrint_optab;
2878 break;
2879
2880 CASE_FLT_FN (BUILT_IN_IROUND):
2881 fallback_fn = BUILT_IN_LROUND;
2882 /* FALLTHRU */
2883 CASE_FLT_FN (BUILT_IN_LROUND):
2884 CASE_FLT_FN (BUILT_IN_LLROUND):
2885 builtin_optab = lround_optab;
2886 break;
2887
2888 default:
2889 gcc_unreachable ();
2890 }
2891
2892 /* There's no easy way to detect the case we need to set EDOM. */
2893 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2894 return NULL_RTX;
2895
2896 /* Make a suitable register to place result in. */
2897 mode = TYPE_MODE (TREE_TYPE (exp));
2898
2899 /* There's no easy way to detect the case we need to set EDOM. */
2900 if (!flag_errno_math)
2901 {
2902 rtx result = gen_reg_rtx (mode);
2903
2904 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2905 need to expand the argument again. This way, we will not perform
2906 side-effects more the once. */
2907 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2908
2909 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2910
2911 start_sequence ();
2912
2913 if (expand_sfix_optab (result, op0, builtin_optab))
2914 {
2915 /* Output the entire sequence. */
2916 insns = get_insns ();
2917 end_sequence ();
2918 emit_insn (insns);
2919 return result;
2920 }
2921
2922 /* If we were unable to expand via the builtin, stop the sequence
2923 (without outputting the insns) and call to the library function
2924 with the stabilized argument list. */
2925 end_sequence ();
2926 }
2927
2928 if (fallback_fn != BUILT_IN_NONE)
2929 {
2930 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2931 targets, (int) round (x) should never be transformed into
2932 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2933 a call to lround in the hope that the target provides at least some
2934 C99 functions. This should result in the best user experience for
2935 not full C99 targets. */
2936 tree fallback_fndecl = mathfn_built_in_1 (TREE_TYPE (arg),
2937 fallback_fn, 0);
2938
2939 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2940 fallback_fndecl, 1, arg);
2941
2942 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2943 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2944 return convert_to_mode (mode, target, 0);
2945 }
2946
2947 return expand_call (exp, target, target == const0_rtx);
2948 }
2949
2950 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2951 a normal call should be emitted rather than expanding the function
2952 in-line. EXP is the expression that is a call to the builtin
2953 function; if convenient, the result should be placed in TARGET. */
2954
2955 static rtx
expand_builtin_powi(tree exp,rtx target)2956 expand_builtin_powi (tree exp, rtx target)
2957 {
2958 tree arg0, arg1;
2959 rtx op0, op1;
2960 enum machine_mode mode;
2961 enum machine_mode mode2;
2962
2963 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2964 return NULL_RTX;
2965
2966 arg0 = CALL_EXPR_ARG (exp, 0);
2967 arg1 = CALL_EXPR_ARG (exp, 1);
2968 mode = TYPE_MODE (TREE_TYPE (exp));
2969
2970 /* Emit a libcall to libgcc. */
2971
2972 /* Mode of the 2nd argument must match that of an int. */
2973 mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2974
2975 if (target == NULL_RTX)
2976 target = gen_reg_rtx (mode);
2977
2978 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2979 if (GET_MODE (op0) != mode)
2980 op0 = convert_to_mode (mode, op0, 0);
2981 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2982 if (GET_MODE (op1) != mode2)
2983 op1 = convert_to_mode (mode2, op1, 0);
2984
2985 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2986 target, LCT_CONST, mode, 2,
2987 op0, mode, op1, mode2);
2988
2989 return target;
2990 }
2991
2992 /* Expand expression EXP which is a call to the strlen builtin. Return
2993 NULL_RTX if we failed the caller should emit a normal call, otherwise
2994 try to get the result in TARGET, if convenient. */
2995
2996 static rtx
expand_builtin_strlen(tree exp,rtx target,enum machine_mode target_mode)2997 expand_builtin_strlen (tree exp, rtx target,
2998 enum machine_mode target_mode)
2999 {
3000 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3001 return NULL_RTX;
3002 else
3003 {
3004 struct expand_operand ops[4];
3005 rtx pat;
3006 tree len;
3007 tree src = CALL_EXPR_ARG (exp, 0);
3008 rtx src_reg, before_strlen;
3009 enum machine_mode insn_mode = target_mode;
3010 enum insn_code icode = CODE_FOR_nothing;
3011 unsigned int align;
3012
3013 /* If the length can be computed at compile-time, return it. */
3014 len = c_strlen (src, 0);
3015 if (len)
3016 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3017
3018 /* If the length can be computed at compile-time and is constant
3019 integer, but there are side-effects in src, evaluate
3020 src for side-effects, then return len.
3021 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3022 can be optimized into: i++; x = 3; */
3023 len = c_strlen (src, 1);
3024 if (len && TREE_CODE (len) == INTEGER_CST)
3025 {
3026 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3027 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3028 }
3029
3030 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3031
3032 /* If SRC is not a pointer type, don't do this operation inline. */
3033 if (align == 0)
3034 return NULL_RTX;
3035
3036 /* Bail out if we can't compute strlen in the right mode. */
3037 while (insn_mode != VOIDmode)
3038 {
3039 icode = optab_handler (strlen_optab, insn_mode);
3040 if (icode != CODE_FOR_nothing)
3041 break;
3042
3043 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
3044 }
3045 if (insn_mode == VOIDmode)
3046 return NULL_RTX;
3047
3048 /* Make a place to hold the source address. We will not expand
3049 the actual source until we are sure that the expansion will
3050 not fail -- there are trees that cannot be expanded twice. */
3051 src_reg = gen_reg_rtx (Pmode);
3052
3053 /* Mark the beginning of the strlen sequence so we can emit the
3054 source operand later. */
3055 before_strlen = get_last_insn ();
3056
3057 create_output_operand (&ops[0], target, insn_mode);
3058 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3059 create_integer_operand (&ops[2], 0);
3060 create_integer_operand (&ops[3], align);
3061 if (!maybe_expand_insn (icode, 4, ops))
3062 return NULL_RTX;
3063
3064 /* Now that we are assured of success, expand the source. */
3065 start_sequence ();
3066 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3067 if (pat != src_reg)
3068 {
3069 #ifdef POINTERS_EXTEND_UNSIGNED
3070 if (GET_MODE (pat) != Pmode)
3071 pat = convert_to_mode (Pmode, pat,
3072 POINTERS_EXTEND_UNSIGNED);
3073 #endif
3074 emit_move_insn (src_reg, pat);
3075 }
3076 pat = get_insns ();
3077 end_sequence ();
3078
3079 if (before_strlen)
3080 emit_insn_after (pat, before_strlen);
3081 else
3082 emit_insn_before (pat, get_insns ());
3083
3084 /* Return the value in the proper mode for this function. */
3085 if (GET_MODE (ops[0].value) == target_mode)
3086 target = ops[0].value;
3087 else if (target != 0)
3088 convert_move (target, ops[0].value, 0);
3089 else
3090 target = convert_to_mode (target_mode, ops[0].value, 0);
3091
3092 return target;
3093 }
3094 }
3095
3096 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3097 bytes from constant string DATA + OFFSET and return it as target
3098 constant. */
3099
3100 static rtx
builtin_memcpy_read_str(void * data,HOST_WIDE_INT offset,enum machine_mode mode)3101 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3102 enum machine_mode mode)
3103 {
3104 const char *str = (const char *) data;
3105
3106 gcc_assert (offset >= 0
3107 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3108 <= strlen (str) + 1));
3109
3110 return c_readstr (str + offset, mode);
3111 }
3112
3113 /* LEN specify length of the block of memcpy/memset operation.
3114 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3115 In some cases we can make very likely guess on max size, then we
3116 set it into PROBABLE_MAX_SIZE. */
3117
3118 static void
determine_block_size(tree len,rtx len_rtx,unsigned HOST_WIDE_INT * min_size,unsigned HOST_WIDE_INT * max_size,unsigned HOST_WIDE_INT * probable_max_size)3119 determine_block_size (tree len, rtx len_rtx,
3120 unsigned HOST_WIDE_INT *min_size,
3121 unsigned HOST_WIDE_INT *max_size,
3122 unsigned HOST_WIDE_INT *probable_max_size)
3123 {
3124 if (CONST_INT_P (len_rtx))
3125 {
3126 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3127 return;
3128 }
3129 else
3130 {
3131 double_int min, max;
3132 enum value_range_type range_type = VR_UNDEFINED;
3133
3134 /* Determine bounds from the type. */
3135 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3136 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3137 else
3138 *min_size = 0;
3139 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3140 *probable_max_size = *max_size
3141 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3142 else
3143 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3144
3145 if (TREE_CODE (len) == SSA_NAME)
3146 range_type = get_range_info (len, &min, &max);
3147 if (range_type == VR_RANGE)
3148 {
3149 if (min.fits_uhwi () && *min_size < min.to_uhwi ())
3150 *min_size = min.to_uhwi ();
3151 if (max.fits_uhwi () && *max_size > max.to_uhwi ())
3152 *probable_max_size = *max_size = max.to_uhwi ();
3153 }
3154 else if (range_type == VR_ANTI_RANGE)
3155 {
3156 /* Anti range 0...N lets us to determine minimal size to N+1. */
3157 if (min.is_zero ())
3158 {
3159 if ((max + double_int_one).fits_uhwi ())
3160 *min_size = (max + double_int_one).to_uhwi ();
3161 }
3162 /* Code like
3163
3164 int n;
3165 if (n < 100)
3166 memcpy (a, b, n)
3167
3168 Produce anti range allowing negative values of N. We still
3169 can use the information and make a guess that N is not negative.
3170 */
3171 else if (!max.ule (double_int_one.lshift (30))
3172 && min.fits_uhwi ())
3173 *probable_max_size = min.to_uhwi () - 1;
3174 }
3175 }
3176 gcc_checking_assert (*max_size <=
3177 (unsigned HOST_WIDE_INT)
3178 GET_MODE_MASK (GET_MODE (len_rtx)));
3179 }
3180
3181 /* Expand a call EXP to the memcpy builtin.
3182 Return NULL_RTX if we failed, the caller should emit a normal call,
3183 otherwise try to get the result in TARGET, if convenient (and in
3184 mode MODE if that's convenient). */
3185
3186 static rtx
expand_builtin_memcpy(tree exp,rtx target)3187 expand_builtin_memcpy (tree exp, rtx target)
3188 {
3189 if (!validate_arglist (exp,
3190 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3191 return NULL_RTX;
3192 else
3193 {
3194 tree dest = CALL_EXPR_ARG (exp, 0);
3195 tree src = CALL_EXPR_ARG (exp, 1);
3196 tree len = CALL_EXPR_ARG (exp, 2);
3197 const char *src_str;
3198 unsigned int src_align = get_pointer_alignment (src);
3199 unsigned int dest_align = get_pointer_alignment (dest);
3200 rtx dest_mem, src_mem, dest_addr, len_rtx;
3201 HOST_WIDE_INT expected_size = -1;
3202 unsigned int expected_align = 0;
3203 unsigned HOST_WIDE_INT min_size;
3204 unsigned HOST_WIDE_INT max_size;
3205 unsigned HOST_WIDE_INT probable_max_size;
3206
3207 /* If DEST is not a pointer type, call the normal function. */
3208 if (dest_align == 0)
3209 return NULL_RTX;
3210
3211 /* If either SRC is not a pointer type, don't do this
3212 operation in-line. */
3213 if (src_align == 0)
3214 return NULL_RTX;
3215
3216 if (currently_expanding_gimple_stmt)
3217 stringop_block_profile (currently_expanding_gimple_stmt,
3218 &expected_align, &expected_size);
3219
3220 if (expected_align < dest_align)
3221 expected_align = dest_align;
3222 dest_mem = get_memory_rtx (dest, len);
3223 set_mem_align (dest_mem, dest_align);
3224 len_rtx = expand_normal (len);
3225 determine_block_size (len, len_rtx, &min_size, &max_size,
3226 &probable_max_size);
3227 src_str = c_getstr (src);
3228
3229 /* If SRC is a string constant and block move would be done
3230 by pieces, we can avoid loading the string from memory
3231 and only stored the computed constants. */
3232 if (src_str
3233 && CONST_INT_P (len_rtx)
3234 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3235 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3236 CONST_CAST (char *, src_str),
3237 dest_align, false))
3238 {
3239 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3240 builtin_memcpy_read_str,
3241 CONST_CAST (char *, src_str),
3242 dest_align, false, 0);
3243 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3244 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3245 return dest_mem;
3246 }
3247
3248 src_mem = get_memory_rtx (src, len);
3249 set_mem_align (src_mem, src_align);
3250
3251 /* Copy word part most expediently. */
3252 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
3253 CALL_EXPR_TAILCALL (exp)
3254 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3255 expected_align, expected_size,
3256 min_size, max_size, probable_max_size);
3257
3258 if (dest_addr == 0)
3259 {
3260 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3261 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3262 }
3263 return dest_addr;
3264 }
3265 }
3266
3267 /* Expand a call EXP to the mempcpy builtin.
3268 Return NULL_RTX if we failed; the caller should emit a normal call,
3269 otherwise try to get the result in TARGET, if convenient (and in
3270 mode MODE if that's convenient). If ENDP is 0 return the
3271 destination pointer, if ENDP is 1 return the end pointer ala
3272 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3273 stpcpy. */
3274
3275 static rtx
expand_builtin_mempcpy(tree exp,rtx target,enum machine_mode mode)3276 expand_builtin_mempcpy (tree exp, rtx target, enum machine_mode mode)
3277 {
3278 if (!validate_arglist (exp,
3279 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3280 return NULL_RTX;
3281 else
3282 {
3283 tree dest = CALL_EXPR_ARG (exp, 0);
3284 tree src = CALL_EXPR_ARG (exp, 1);
3285 tree len = CALL_EXPR_ARG (exp, 2);
3286 return expand_builtin_mempcpy_args (dest, src, len,
3287 target, mode, /*endp=*/ 1);
3288 }
3289 }
3290
3291 /* Helper function to do the actual work for expand_builtin_mempcpy. The
3292 arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3293 so that this can also be called without constructing an actual CALL_EXPR.
3294 The other arguments and return value are the same as for
3295 expand_builtin_mempcpy. */
3296
3297 static rtx
expand_builtin_mempcpy_args(tree dest,tree src,tree len,rtx target,enum machine_mode mode,int endp)3298 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3299 rtx target, enum machine_mode mode, int endp)
3300 {
3301 /* If return value is ignored, transform mempcpy into memcpy. */
3302 if (target == const0_rtx && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3303 {
3304 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3305 tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3306 dest, src, len);
3307 return expand_expr (result, target, mode, EXPAND_NORMAL);
3308 }
3309 else
3310 {
3311 const char *src_str;
3312 unsigned int src_align = get_pointer_alignment (src);
3313 unsigned int dest_align = get_pointer_alignment (dest);
3314 rtx dest_mem, src_mem, len_rtx;
3315
3316 /* If either SRC or DEST is not a pointer type, don't do this
3317 operation in-line. */
3318 if (dest_align == 0 || src_align == 0)
3319 return NULL_RTX;
3320
3321 /* If LEN is not constant, call the normal function. */
3322 if (! tree_fits_uhwi_p (len))
3323 return NULL_RTX;
3324
3325 len_rtx = expand_normal (len);
3326 src_str = c_getstr (src);
3327
3328 /* If SRC is a string constant and block move would be done
3329 by pieces, we can avoid loading the string from memory
3330 and only stored the computed constants. */
3331 if (src_str
3332 && CONST_INT_P (len_rtx)
3333 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3334 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3335 CONST_CAST (char *, src_str),
3336 dest_align, false))
3337 {
3338 dest_mem = get_memory_rtx (dest, len);
3339 set_mem_align (dest_mem, dest_align);
3340 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3341 builtin_memcpy_read_str,
3342 CONST_CAST (char *, src_str),
3343 dest_align, false, endp);
3344 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3345 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3346 return dest_mem;
3347 }
3348
3349 if (CONST_INT_P (len_rtx)
3350 && can_move_by_pieces (INTVAL (len_rtx),
3351 MIN (dest_align, src_align)))
3352 {
3353 dest_mem = get_memory_rtx (dest, len);
3354 set_mem_align (dest_mem, dest_align);
3355 src_mem = get_memory_rtx (src, len);
3356 set_mem_align (src_mem, src_align);
3357 dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3358 MIN (dest_align, src_align), endp);
3359 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3360 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3361 return dest_mem;
3362 }
3363
3364 return NULL_RTX;
3365 }
3366 }
3367
3368 #ifndef HAVE_movstr
3369 # define HAVE_movstr 0
3370 # define CODE_FOR_movstr CODE_FOR_nothing
3371 #endif
3372
3373 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3374 we failed, the caller should emit a normal call, otherwise try to
3375 get the result in TARGET, if convenient. If ENDP is 0 return the
3376 destination pointer, if ENDP is 1 return the end pointer ala
3377 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3378 stpcpy. */
3379
3380 static rtx
expand_movstr(tree dest,tree src,rtx target,int endp)3381 expand_movstr (tree dest, tree src, rtx target, int endp)
3382 {
3383 struct expand_operand ops[3];
3384 rtx dest_mem;
3385 rtx src_mem;
3386
3387 if (!HAVE_movstr)
3388 return NULL_RTX;
3389
3390 dest_mem = get_memory_rtx (dest, NULL);
3391 src_mem = get_memory_rtx (src, NULL);
3392 if (!endp)
3393 {
3394 target = force_reg (Pmode, XEXP (dest_mem, 0));
3395 dest_mem = replace_equiv_address (dest_mem, target);
3396 }
3397
3398 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3399 create_fixed_operand (&ops[1], dest_mem);
3400 create_fixed_operand (&ops[2], src_mem);
3401 if (!maybe_expand_insn (CODE_FOR_movstr, 3, ops))
3402 return NULL_RTX;
3403
3404 if (endp && target != const0_rtx)
3405 {
3406 target = ops[0].value;
3407 /* movstr is supposed to set end to the address of the NUL
3408 terminator. If the caller requested a mempcpy-like return value,
3409 adjust it. */
3410 if (endp == 1)
3411 {
3412 rtx tem = plus_constant (GET_MODE (target),
3413 gen_lowpart (GET_MODE (target), target), 1);
3414 emit_move_insn (target, force_operand (tem, NULL_RTX));
3415 }
3416 }
3417 return target;
3418 }
3419
3420 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3421 NULL_RTX if we failed the caller should emit a normal call, otherwise
3422 try to get the result in TARGET, if convenient (and in mode MODE if that's
3423 convenient). */
3424
3425 static rtx
expand_builtin_strcpy(tree exp,rtx target)3426 expand_builtin_strcpy (tree exp, rtx target)
3427 {
3428 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3429 {
3430 tree dest = CALL_EXPR_ARG (exp, 0);
3431 tree src = CALL_EXPR_ARG (exp, 1);
3432 return expand_builtin_strcpy_args (dest, src, target);
3433 }
3434 return NULL_RTX;
3435 }
3436
3437 /* Helper function to do the actual work for expand_builtin_strcpy. The
3438 arguments to the builtin_strcpy call DEST and SRC are broken out
3439 so that this can also be called without constructing an actual CALL_EXPR.
3440 The other arguments and return value are the same as for
3441 expand_builtin_strcpy. */
3442
3443 static rtx
expand_builtin_strcpy_args(tree dest,tree src,rtx target)3444 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3445 {
3446 return expand_movstr (dest, src, target, /*endp=*/0);
3447 }
3448
3449 /* Expand a call EXP to the stpcpy builtin.
3450 Return NULL_RTX if we failed the caller should emit a normal call,
3451 otherwise try to get the result in TARGET, if convenient (and in
3452 mode MODE if that's convenient). */
3453
3454 static rtx
expand_builtin_stpcpy(tree exp,rtx target,enum machine_mode mode)3455 expand_builtin_stpcpy (tree exp, rtx target, enum machine_mode mode)
3456 {
3457 tree dst, src;
3458 location_t loc = EXPR_LOCATION (exp);
3459
3460 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3461 return NULL_RTX;
3462
3463 dst = CALL_EXPR_ARG (exp, 0);
3464 src = CALL_EXPR_ARG (exp, 1);
3465
3466 /* If return value is ignored, transform stpcpy into strcpy. */
3467 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3468 {
3469 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3470 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3471 return expand_expr (result, target, mode, EXPAND_NORMAL);
3472 }
3473 else
3474 {
3475 tree len, lenp1;
3476 rtx ret;
3477
3478 /* Ensure we get an actual string whose length can be evaluated at
3479 compile-time, not an expression containing a string. This is
3480 because the latter will potentially produce pessimized code
3481 when used to produce the return value. */
3482 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3483 return expand_movstr (dst, src, target, /*endp=*/2);
3484
3485 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3486 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3487 target, mode, /*endp=*/2);
3488
3489 if (ret)
3490 return ret;
3491
3492 if (TREE_CODE (len) == INTEGER_CST)
3493 {
3494 rtx len_rtx = expand_normal (len);
3495
3496 if (CONST_INT_P (len_rtx))
3497 {
3498 ret = expand_builtin_strcpy_args (dst, src, target);
3499
3500 if (ret)
3501 {
3502 if (! target)
3503 {
3504 if (mode != VOIDmode)
3505 target = gen_reg_rtx (mode);
3506 else
3507 target = gen_reg_rtx (GET_MODE (ret));
3508 }
3509 if (GET_MODE (target) != GET_MODE (ret))
3510 ret = gen_lowpart (GET_MODE (target), ret);
3511
3512 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3513 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3514 gcc_assert (ret);
3515
3516 return target;
3517 }
3518 }
3519 }
3520
3521 return expand_movstr (dst, src, target, /*endp=*/2);
3522 }
3523 }
3524
3525 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3526 bytes from constant string DATA + OFFSET and return it as target
3527 constant. */
3528
3529 rtx
builtin_strncpy_read_str(void * data,HOST_WIDE_INT offset,enum machine_mode mode)3530 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3531 enum machine_mode mode)
3532 {
3533 const char *str = (const char *) data;
3534
3535 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3536 return const0_rtx;
3537
3538 return c_readstr (str + offset, mode);
3539 }
3540
3541 /* Expand expression EXP, which is a call to the strncpy builtin. Return
3542 NULL_RTX if we failed the caller should emit a normal call. */
3543
3544 static rtx
expand_builtin_strncpy(tree exp,rtx target)3545 expand_builtin_strncpy (tree exp, rtx target)
3546 {
3547 location_t loc = EXPR_LOCATION (exp);
3548
3549 if (validate_arglist (exp,
3550 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3551 {
3552 tree dest = CALL_EXPR_ARG (exp, 0);
3553 tree src = CALL_EXPR_ARG (exp, 1);
3554 tree len = CALL_EXPR_ARG (exp, 2);
3555 tree slen = c_strlen (src, 1);
3556
3557 /* We must be passed a constant len and src parameter. */
3558 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3559 return NULL_RTX;
3560
3561 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3562
3563 /* We're required to pad with trailing zeros if the requested
3564 len is greater than strlen(s2)+1. In that case try to
3565 use store_by_pieces, if it fails, punt. */
3566 if (tree_int_cst_lt (slen, len))
3567 {
3568 unsigned int dest_align = get_pointer_alignment (dest);
3569 const char *p = c_getstr (src);
3570 rtx dest_mem;
3571
3572 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3573 || !can_store_by_pieces (tree_to_uhwi (len),
3574 builtin_strncpy_read_str,
3575 CONST_CAST (char *, p),
3576 dest_align, false))
3577 return NULL_RTX;
3578
3579 dest_mem = get_memory_rtx (dest, len);
3580 store_by_pieces (dest_mem, tree_to_uhwi (len),
3581 builtin_strncpy_read_str,
3582 CONST_CAST (char *, p), dest_align, false, 0);
3583 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3584 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3585 return dest_mem;
3586 }
3587 }
3588 return NULL_RTX;
3589 }
3590
3591 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3592 bytes from constant string DATA + OFFSET and return it as target
3593 constant. */
3594
3595 rtx
builtin_memset_read_str(void * data,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,enum machine_mode mode)3596 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3597 enum machine_mode mode)
3598 {
3599 const char *c = (const char *) data;
3600 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3601
3602 memset (p, *c, GET_MODE_SIZE (mode));
3603
3604 return c_readstr (p, mode);
3605 }
3606
3607 /* Callback routine for store_by_pieces. Return the RTL of a register
3608 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3609 char value given in the RTL register data. For example, if mode is
3610 4 bytes wide, return the RTL for 0x01010101*data. */
3611
3612 static rtx
builtin_memset_gen_str(void * data,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,enum machine_mode mode)3613 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3614 enum machine_mode mode)
3615 {
3616 rtx target, coeff;
3617 size_t size;
3618 char *p;
3619
3620 size = GET_MODE_SIZE (mode);
3621 if (size == 1)
3622 return (rtx) data;
3623
3624 p = XALLOCAVEC (char, size);
3625 memset (p, 1, size);
3626 coeff = c_readstr (p, mode);
3627
3628 target = convert_to_mode (mode, (rtx) data, 1);
3629 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3630 return force_reg (mode, target);
3631 }
3632
3633 /* Expand expression EXP, which is a call to the memset builtin. Return
3634 NULL_RTX if we failed the caller should emit a normal call, otherwise
3635 try to get the result in TARGET, if convenient (and in mode MODE if that's
3636 convenient). */
3637
3638 static rtx
expand_builtin_memset(tree exp,rtx target,enum machine_mode mode)3639 expand_builtin_memset (tree exp, rtx target, enum machine_mode mode)
3640 {
3641 if (!validate_arglist (exp,
3642 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3643 return NULL_RTX;
3644 else
3645 {
3646 tree dest = CALL_EXPR_ARG (exp, 0);
3647 tree val = CALL_EXPR_ARG (exp, 1);
3648 tree len = CALL_EXPR_ARG (exp, 2);
3649 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3650 }
3651 }
3652
3653 /* Helper function to do the actual work for expand_builtin_memset. The
3654 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3655 so that this can also be called without constructing an actual CALL_EXPR.
3656 The other arguments and return value are the same as for
3657 expand_builtin_memset. */
3658
3659 static rtx
expand_builtin_memset_args(tree dest,tree val,tree len,rtx target,enum machine_mode mode,tree orig_exp)3660 expand_builtin_memset_args (tree dest, tree val, tree len,
3661 rtx target, enum machine_mode mode, tree orig_exp)
3662 {
3663 tree fndecl, fn;
3664 enum built_in_function fcode;
3665 enum machine_mode val_mode;
3666 char c;
3667 unsigned int dest_align;
3668 rtx dest_mem, dest_addr, len_rtx;
3669 HOST_WIDE_INT expected_size = -1;
3670 unsigned int expected_align = 0;
3671 unsigned HOST_WIDE_INT min_size;
3672 unsigned HOST_WIDE_INT max_size;
3673 unsigned HOST_WIDE_INT probable_max_size;
3674
3675 dest_align = get_pointer_alignment (dest);
3676
3677 /* If DEST is not a pointer type, don't do this operation in-line. */
3678 if (dest_align == 0)
3679 return NULL_RTX;
3680
3681 if (currently_expanding_gimple_stmt)
3682 stringop_block_profile (currently_expanding_gimple_stmt,
3683 &expected_align, &expected_size);
3684
3685 if (expected_align < dest_align)
3686 expected_align = dest_align;
3687
3688 /* If the LEN parameter is zero, return DEST. */
3689 if (integer_zerop (len))
3690 {
3691 /* Evaluate and ignore VAL in case it has side-effects. */
3692 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3693 return expand_expr (dest, target, mode, EXPAND_NORMAL);
3694 }
3695
3696 /* Stabilize the arguments in case we fail. */
3697 dest = builtin_save_expr (dest);
3698 val = builtin_save_expr (val);
3699 len = builtin_save_expr (len);
3700
3701 len_rtx = expand_normal (len);
3702 determine_block_size (len, len_rtx, &min_size, &max_size,
3703 &probable_max_size);
3704 dest_mem = get_memory_rtx (dest, len);
3705 val_mode = TYPE_MODE (unsigned_char_type_node);
3706
3707 if (TREE_CODE (val) != INTEGER_CST)
3708 {
3709 rtx val_rtx;
3710
3711 val_rtx = expand_normal (val);
3712 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3713
3714 /* Assume that we can memset by pieces if we can store
3715 * the coefficients by pieces (in the required modes).
3716 * We can't pass builtin_memset_gen_str as that emits RTL. */
3717 c = 1;
3718 if (tree_fits_uhwi_p (len)
3719 && can_store_by_pieces (tree_to_uhwi (len),
3720 builtin_memset_read_str, &c, dest_align,
3721 true))
3722 {
3723 val_rtx = force_reg (val_mode, val_rtx);
3724 store_by_pieces (dest_mem, tree_to_uhwi (len),
3725 builtin_memset_gen_str, val_rtx, dest_align,
3726 true, 0);
3727 }
3728 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3729 dest_align, expected_align,
3730 expected_size, min_size, max_size,
3731 probable_max_size))
3732 goto do_libcall;
3733
3734 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3735 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3736 return dest_mem;
3737 }
3738
3739 if (target_char_cast (val, &c))
3740 goto do_libcall;
3741
3742 if (c)
3743 {
3744 if (tree_fits_uhwi_p (len)
3745 && can_store_by_pieces (tree_to_uhwi (len),
3746 builtin_memset_read_str, &c, dest_align,
3747 true))
3748 store_by_pieces (dest_mem, tree_to_uhwi (len),
3749 builtin_memset_read_str, &c, dest_align, true, 0);
3750 else if (!set_storage_via_setmem (dest_mem, len_rtx,
3751 gen_int_mode (c, val_mode),
3752 dest_align, expected_align,
3753 expected_size, min_size, max_size,
3754 probable_max_size))
3755 goto do_libcall;
3756
3757 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3758 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3759 return dest_mem;
3760 }
3761
3762 set_mem_align (dest_mem, dest_align);
3763 dest_addr = clear_storage_hints (dest_mem, len_rtx,
3764 CALL_EXPR_TAILCALL (orig_exp)
3765 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3766 expected_align, expected_size,
3767 min_size, max_size,
3768 probable_max_size);
3769
3770 if (dest_addr == 0)
3771 {
3772 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3773 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3774 }
3775
3776 return dest_addr;
3777
3778 do_libcall:
3779 fndecl = get_callee_fndecl (orig_exp);
3780 fcode = DECL_FUNCTION_CODE (fndecl);
3781 if (fcode == BUILT_IN_MEMSET)
3782 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3783 dest, val, len);
3784 else if (fcode == BUILT_IN_BZERO)
3785 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3786 dest, len);
3787 else
3788 gcc_unreachable ();
3789 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3790 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3791 return expand_call (fn, target, target == const0_rtx);
3792 }
3793
3794 /* Expand expression EXP, which is a call to the bzero builtin. Return
3795 NULL_RTX if we failed the caller should emit a normal call. */
3796
3797 static rtx
expand_builtin_bzero(tree exp)3798 expand_builtin_bzero (tree exp)
3799 {
3800 tree dest, size;
3801 location_t loc = EXPR_LOCATION (exp);
3802
3803 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3804 return NULL_RTX;
3805
3806 dest = CALL_EXPR_ARG (exp, 0);
3807 size = CALL_EXPR_ARG (exp, 1);
3808
3809 /* New argument list transforming bzero(ptr x, int y) to
3810 memset(ptr x, int 0, size_t y). This is done this way
3811 so that if it isn't expanded inline, we fallback to
3812 calling bzero instead of memset. */
3813
3814 return expand_builtin_memset_args (dest, integer_zero_node,
3815 fold_convert_loc (loc,
3816 size_type_node, size),
3817 const0_rtx, VOIDmode, exp);
3818 }
3819
3820 /* Expand expression EXP, which is a call to the memcmp built-in function.
3821 Return NULL_RTX if we failed and the caller should emit a normal call,
3822 otherwise try to get the result in TARGET, if convenient (and in mode
3823 MODE, if that's convenient). */
3824
3825 static rtx
expand_builtin_memcmp(tree exp,ATTRIBUTE_UNUSED rtx target,ATTRIBUTE_UNUSED enum machine_mode mode)3826 expand_builtin_memcmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3827 ATTRIBUTE_UNUSED enum machine_mode mode)
3828 {
3829 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3830
3831 if (!validate_arglist (exp,
3832 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3833 return NULL_RTX;
3834
3835 /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3836 implementing memcmp because it will stop if it encounters two
3837 zero bytes. */
3838 #if defined HAVE_cmpmemsi
3839 {
3840 rtx arg1_rtx, arg2_rtx, arg3_rtx;
3841 rtx result;
3842 rtx insn;
3843 tree arg1 = CALL_EXPR_ARG (exp, 0);
3844 tree arg2 = CALL_EXPR_ARG (exp, 1);
3845 tree len = CALL_EXPR_ARG (exp, 2);
3846
3847 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3848 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3849 enum machine_mode insn_mode;
3850
3851 if (HAVE_cmpmemsi)
3852 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
3853 else
3854 return NULL_RTX;
3855
3856 /* If we don't have POINTER_TYPE, call the function. */
3857 if (arg1_align == 0 || arg2_align == 0)
3858 return NULL_RTX;
3859
3860 /* Make a place to write the result of the instruction. */
3861 result = target;
3862 if (! (result != 0
3863 && REG_P (result) && GET_MODE (result) == insn_mode
3864 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3865 result = gen_reg_rtx (insn_mode);
3866
3867 arg1_rtx = get_memory_rtx (arg1, len);
3868 arg2_rtx = get_memory_rtx (arg2, len);
3869 arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3870
3871 /* Set MEM_SIZE as appropriate. */
3872 if (CONST_INT_P (arg3_rtx))
3873 {
3874 set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3875 set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3876 }
3877
3878 if (HAVE_cmpmemsi)
3879 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
3880 GEN_INT (MIN (arg1_align, arg2_align)));
3881 else
3882 gcc_unreachable ();
3883
3884 if (insn)
3885 emit_insn (insn);
3886 else
3887 emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3888 TYPE_MODE (integer_type_node), 3,
3889 XEXP (arg1_rtx, 0), Pmode,
3890 XEXP (arg2_rtx, 0), Pmode,
3891 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3892 TYPE_UNSIGNED (sizetype)),
3893 TYPE_MODE (sizetype));
3894
3895 /* Return the value in the proper mode for this function. */
3896 mode = TYPE_MODE (TREE_TYPE (exp));
3897 if (GET_MODE (result) == mode)
3898 return result;
3899 else if (target != 0)
3900 {
3901 convert_move (target, result, 0);
3902 return target;
3903 }
3904 else
3905 return convert_to_mode (mode, result, 0);
3906 }
3907 #endif /* HAVE_cmpmemsi. */
3908
3909 return NULL_RTX;
3910 }
3911
3912 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
3913 if we failed the caller should emit a normal call, otherwise try to get
3914 the result in TARGET, if convenient. */
3915
3916 static rtx
expand_builtin_strcmp(tree exp,ATTRIBUTE_UNUSED rtx target)3917 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3918 {
3919 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3920 return NULL_RTX;
3921
3922 #if defined HAVE_cmpstrsi || defined HAVE_cmpstrnsi
3923 if (direct_optab_handler (cmpstr_optab, SImode) != CODE_FOR_nothing
3924 || direct_optab_handler (cmpstrn_optab, SImode) != CODE_FOR_nothing)
3925 {
3926 rtx arg1_rtx, arg2_rtx;
3927 rtx result, insn = NULL_RTX;
3928 tree fndecl, fn;
3929 tree arg1 = CALL_EXPR_ARG (exp, 0);
3930 tree arg2 = CALL_EXPR_ARG (exp, 1);
3931
3932 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3933 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3934
3935 /* If we don't have POINTER_TYPE, call the function. */
3936 if (arg1_align == 0 || arg2_align == 0)
3937 return NULL_RTX;
3938
3939 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
3940 arg1 = builtin_save_expr (arg1);
3941 arg2 = builtin_save_expr (arg2);
3942
3943 arg1_rtx = get_memory_rtx (arg1, NULL);
3944 arg2_rtx = get_memory_rtx (arg2, NULL);
3945
3946 #ifdef HAVE_cmpstrsi
3947 /* Try to call cmpstrsi. */
3948 if (HAVE_cmpstrsi)
3949 {
3950 enum machine_mode insn_mode
3951 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
3952
3953 /* Make a place to write the result of the instruction. */
3954 result = target;
3955 if (! (result != 0
3956 && REG_P (result) && GET_MODE (result) == insn_mode
3957 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3958 result = gen_reg_rtx (insn_mode);
3959
3960 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx,
3961 GEN_INT (MIN (arg1_align, arg2_align)));
3962 }
3963 #endif
3964 #ifdef HAVE_cmpstrnsi
3965 /* Try to determine at least one length and call cmpstrnsi. */
3966 if (!insn && HAVE_cmpstrnsi)
3967 {
3968 tree len;
3969 rtx arg3_rtx;
3970
3971 enum machine_mode insn_mode
3972 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
3973 tree len1 = c_strlen (arg1, 1);
3974 tree len2 = c_strlen (arg2, 1);
3975
3976 if (len1)
3977 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3978 if (len2)
3979 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3980
3981 /* If we don't have a constant length for the first, use the length
3982 of the second, if we know it. We don't require a constant for
3983 this case; some cost analysis could be done if both are available
3984 but neither is constant. For now, assume they're equally cheap,
3985 unless one has side effects. If both strings have constant lengths,
3986 use the smaller. */
3987
3988 if (!len1)
3989 len = len2;
3990 else if (!len2)
3991 len = len1;
3992 else if (TREE_SIDE_EFFECTS (len1))
3993 len = len2;
3994 else if (TREE_SIDE_EFFECTS (len2))
3995 len = len1;
3996 else if (TREE_CODE (len1) != INTEGER_CST)
3997 len = len2;
3998 else if (TREE_CODE (len2) != INTEGER_CST)
3999 len = len1;
4000 else if (tree_int_cst_lt (len1, len2))
4001 len = len1;
4002 else
4003 len = len2;
4004
4005 /* If both arguments have side effects, we cannot optimize. */
4006 if (!len || TREE_SIDE_EFFECTS (len))
4007 goto do_libcall;
4008
4009 arg3_rtx = expand_normal (len);
4010
4011 /* Make a place to write the result of the instruction. */
4012 result = target;
4013 if (! (result != 0
4014 && REG_P (result) && GET_MODE (result) == insn_mode
4015 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4016 result = gen_reg_rtx (insn_mode);
4017
4018 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4019 GEN_INT (MIN (arg1_align, arg2_align)));
4020 }
4021 #endif
4022
4023 if (insn)
4024 {
4025 enum machine_mode mode;
4026 emit_insn (insn);
4027
4028 /* Return the value in the proper mode for this function. */
4029 mode = TYPE_MODE (TREE_TYPE (exp));
4030 if (GET_MODE (result) == mode)
4031 return result;
4032 if (target == 0)
4033 return convert_to_mode (mode, result, 0);
4034 convert_move (target, result, 0);
4035 return target;
4036 }
4037
4038 /* Expand the library call ourselves using a stabilized argument
4039 list to avoid re-evaluating the function's arguments twice. */
4040 #ifdef HAVE_cmpstrnsi
4041 do_libcall:
4042 #endif
4043 fndecl = get_callee_fndecl (exp);
4044 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4045 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4046 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4047 return expand_call (fn, target, target == const0_rtx);
4048 }
4049 #endif
4050 return NULL_RTX;
4051 }
4052
4053 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4054 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4055 the result in TARGET, if convenient. */
4056
4057 static rtx
expand_builtin_strncmp(tree exp,ATTRIBUTE_UNUSED rtx target,ATTRIBUTE_UNUSED enum machine_mode mode)4058 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4059 ATTRIBUTE_UNUSED enum machine_mode mode)
4060 {
4061 location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
4062
4063 if (!validate_arglist (exp,
4064 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4065 return NULL_RTX;
4066
4067 /* If c_strlen can determine an expression for one of the string
4068 lengths, and it doesn't have side effects, then emit cmpstrnsi
4069 using length MIN(strlen(string)+1, arg3). */
4070 #ifdef HAVE_cmpstrnsi
4071 if (HAVE_cmpstrnsi)
4072 {
4073 tree len, len1, len2;
4074 rtx arg1_rtx, arg2_rtx, arg3_rtx;
4075 rtx result, insn;
4076 tree fndecl, fn;
4077 tree arg1 = CALL_EXPR_ARG (exp, 0);
4078 tree arg2 = CALL_EXPR_ARG (exp, 1);
4079 tree arg3 = CALL_EXPR_ARG (exp, 2);
4080
4081 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4082 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4083 enum machine_mode insn_mode
4084 = insn_data[(int) CODE_FOR_cmpstrnsi].operand[0].mode;
4085
4086 len1 = c_strlen (arg1, 1);
4087 len2 = c_strlen (arg2, 1);
4088
4089 if (len1)
4090 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4091 if (len2)
4092 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4093
4094 /* If we don't have a constant length for the first, use the length
4095 of the second, if we know it. We don't require a constant for
4096 this case; some cost analysis could be done if both are available
4097 but neither is constant. For now, assume they're equally cheap,
4098 unless one has side effects. If both strings have constant lengths,
4099 use the smaller. */
4100
4101 if (!len1)
4102 len = len2;
4103 else if (!len2)
4104 len = len1;
4105 else if (TREE_SIDE_EFFECTS (len1))
4106 len = len2;
4107 else if (TREE_SIDE_EFFECTS (len2))
4108 len = len1;
4109 else if (TREE_CODE (len1) != INTEGER_CST)
4110 len = len2;
4111 else if (TREE_CODE (len2) != INTEGER_CST)
4112 len = len1;
4113 else if (tree_int_cst_lt (len1, len2))
4114 len = len1;
4115 else
4116 len = len2;
4117
4118 /* If both arguments have side effects, we cannot optimize. */
4119 if (!len || TREE_SIDE_EFFECTS (len))
4120 return NULL_RTX;
4121
4122 /* The actual new length parameter is MIN(len,arg3). */
4123 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
4124 fold_convert_loc (loc, TREE_TYPE (len), arg3));
4125
4126 /* If we don't have POINTER_TYPE, call the function. */
4127 if (arg1_align == 0 || arg2_align == 0)
4128 return NULL_RTX;
4129
4130 /* Make a place to write the result of the instruction. */
4131 result = target;
4132 if (! (result != 0
4133 && REG_P (result) && GET_MODE (result) == insn_mode
4134 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
4135 result = gen_reg_rtx (insn_mode);
4136
4137 /* Stabilize the arguments in case gen_cmpstrnsi fails. */
4138 arg1 = builtin_save_expr (arg1);
4139 arg2 = builtin_save_expr (arg2);
4140 len = builtin_save_expr (len);
4141
4142 arg1_rtx = get_memory_rtx (arg1, len);
4143 arg2_rtx = get_memory_rtx (arg2, len);
4144 arg3_rtx = expand_normal (len);
4145 insn = gen_cmpstrnsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
4146 GEN_INT (MIN (arg1_align, arg2_align)));
4147 if (insn)
4148 {
4149 emit_insn (insn);
4150
4151 /* Return the value in the proper mode for this function. */
4152 mode = TYPE_MODE (TREE_TYPE (exp));
4153 if (GET_MODE (result) == mode)
4154 return result;
4155 if (target == 0)
4156 return convert_to_mode (mode, result, 0);
4157 convert_move (target, result, 0);
4158 return target;
4159 }
4160
4161 /* Expand the library call ourselves using a stabilized argument
4162 list to avoid re-evaluating the function's arguments twice. */
4163 fndecl = get_callee_fndecl (exp);
4164 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
4165 arg1, arg2, len);
4166 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4167 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4168 return expand_call (fn, target, target == const0_rtx);
4169 }
4170 #endif
4171 return NULL_RTX;
4172 }
4173
4174 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4175 if that's convenient. */
4176
4177 rtx
expand_builtin_saveregs(void)4178 expand_builtin_saveregs (void)
4179 {
4180 rtx val, seq;
4181
4182 /* Don't do __builtin_saveregs more than once in a function.
4183 Save the result of the first call and reuse it. */
4184 if (saveregs_value != 0)
4185 return saveregs_value;
4186
4187 /* When this function is called, it means that registers must be
4188 saved on entry to this function. So we migrate the call to the
4189 first insn of this function. */
4190
4191 start_sequence ();
4192
4193 /* Do whatever the machine needs done in this case. */
4194 val = targetm.calls.expand_builtin_saveregs ();
4195
4196 seq = get_insns ();
4197 end_sequence ();
4198
4199 saveregs_value = val;
4200
4201 /* Put the insns after the NOTE that starts the function. If this
4202 is inside a start_sequence, make the outer-level insn chain current, so
4203 the code is placed at the start of the function. */
4204 push_topmost_sequence ();
4205 emit_insn_after (seq, entry_of_function ());
4206 pop_topmost_sequence ();
4207
4208 return val;
4209 }
4210
4211 /* Expand a call to __builtin_next_arg. */
4212
4213 static rtx
expand_builtin_next_arg(void)4214 expand_builtin_next_arg (void)
4215 {
4216 /* Checking arguments is already done in fold_builtin_next_arg
4217 that must be called before this function. */
4218 return expand_binop (ptr_mode, add_optab,
4219 crtl->args.internal_arg_pointer,
4220 crtl->args.arg_offset_rtx,
4221 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4222 }
4223
4224 /* Make it easier for the backends by protecting the valist argument
4225 from multiple evaluations. */
4226
4227 static tree
stabilize_va_list_loc(location_t loc,tree valist,int needs_lvalue)4228 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4229 {
4230 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4231
4232 /* The current way of determining the type of valist is completely
4233 bogus. We should have the information on the va builtin instead. */
4234 if (!vatype)
4235 vatype = targetm.fn_abi_va_list (cfun->decl);
4236
4237 if (TREE_CODE (vatype) == ARRAY_TYPE)
4238 {
4239 if (TREE_SIDE_EFFECTS (valist))
4240 valist = save_expr (valist);
4241
4242 /* For this case, the backends will be expecting a pointer to
4243 vatype, but it's possible we've actually been given an array
4244 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4245 So fix it. */
4246 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4247 {
4248 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4249 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4250 }
4251 }
4252 else
4253 {
4254 tree pt = build_pointer_type (vatype);
4255
4256 if (! needs_lvalue)
4257 {
4258 if (! TREE_SIDE_EFFECTS (valist))
4259 return valist;
4260
4261 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4262 TREE_SIDE_EFFECTS (valist) = 1;
4263 }
4264
4265 if (TREE_SIDE_EFFECTS (valist))
4266 valist = save_expr (valist);
4267 valist = fold_build2_loc (loc, MEM_REF,
4268 vatype, valist, build_int_cst (pt, 0));
4269 }
4270
4271 return valist;
4272 }
4273
4274 /* The "standard" definition of va_list is void*. */
4275
4276 tree
std_build_builtin_va_list(void)4277 std_build_builtin_va_list (void)
4278 {
4279 return ptr_type_node;
4280 }
4281
4282 /* The "standard" abi va_list is va_list_type_node. */
4283
4284 tree
std_fn_abi_va_list(tree fndecl ATTRIBUTE_UNUSED)4285 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4286 {
4287 return va_list_type_node;
4288 }
4289
4290 /* The "standard" type of va_list is va_list_type_node. */
4291
4292 tree
std_canonical_va_list_type(tree type)4293 std_canonical_va_list_type (tree type)
4294 {
4295 tree wtype, htype;
4296
4297 if (INDIRECT_REF_P (type))
4298 type = TREE_TYPE (type);
4299 else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4300 type = TREE_TYPE (type);
4301 wtype = va_list_type_node;
4302 htype = type;
4303 /* Treat structure va_list types. */
4304 if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4305 htype = TREE_TYPE (htype);
4306 else if (TREE_CODE (wtype) == ARRAY_TYPE)
4307 {
4308 /* If va_list is an array type, the argument may have decayed
4309 to a pointer type, e.g. by being passed to another function.
4310 In that case, unwrap both types so that we can compare the
4311 underlying records. */
4312 if (TREE_CODE (htype) == ARRAY_TYPE
4313 || POINTER_TYPE_P (htype))
4314 {
4315 wtype = TREE_TYPE (wtype);
4316 htype = TREE_TYPE (htype);
4317 }
4318 }
4319 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4320 return va_list_type_node;
4321
4322 return NULL_TREE;
4323 }
4324
4325 /* The "standard" implementation of va_start: just assign `nextarg' to
4326 the variable. */
4327
4328 void
std_expand_builtin_va_start(tree valist,rtx nextarg)4329 std_expand_builtin_va_start (tree valist, rtx nextarg)
4330 {
4331 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4332 convert_move (va_r, nextarg, 0);
4333 }
4334
4335 /* Expand EXP, a call to __builtin_va_start. */
4336
4337 static rtx
expand_builtin_va_start(tree exp)4338 expand_builtin_va_start (tree exp)
4339 {
4340 rtx nextarg;
4341 tree valist;
4342 location_t loc = EXPR_LOCATION (exp);
4343
4344 if (call_expr_nargs (exp) < 2)
4345 {
4346 error_at (loc, "too few arguments to function %<va_start%>");
4347 return const0_rtx;
4348 }
4349
4350 if (fold_builtin_next_arg (exp, true))
4351 return const0_rtx;
4352
4353 nextarg = expand_builtin_next_arg ();
4354 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4355
4356 if (targetm.expand_builtin_va_start)
4357 targetm.expand_builtin_va_start (valist, nextarg);
4358 else
4359 std_expand_builtin_va_start (valist, nextarg);
4360
4361 return const0_rtx;
4362 }
4363
4364 /* Expand EXP, a call to __builtin_va_end. */
4365
4366 static rtx
expand_builtin_va_end(tree exp)4367 expand_builtin_va_end (tree exp)
4368 {
4369 tree valist = CALL_EXPR_ARG (exp, 0);
4370
4371 /* Evaluate for side effects, if needed. I hate macros that don't
4372 do that. */
4373 if (TREE_SIDE_EFFECTS (valist))
4374 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4375
4376 return const0_rtx;
4377 }
4378
4379 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4380 builtin rather than just as an assignment in stdarg.h because of the
4381 nastiness of array-type va_list types. */
4382
4383 static rtx
expand_builtin_va_copy(tree exp)4384 expand_builtin_va_copy (tree exp)
4385 {
4386 tree dst, src, t;
4387 location_t loc = EXPR_LOCATION (exp);
4388
4389 dst = CALL_EXPR_ARG (exp, 0);
4390 src = CALL_EXPR_ARG (exp, 1);
4391
4392 dst = stabilize_va_list_loc (loc, dst, 1);
4393 src = stabilize_va_list_loc (loc, src, 0);
4394
4395 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4396
4397 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4398 {
4399 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4400 TREE_SIDE_EFFECTS (t) = 1;
4401 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4402 }
4403 else
4404 {
4405 rtx dstb, srcb, size;
4406
4407 /* Evaluate to pointers. */
4408 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4409 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4410 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4411 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4412
4413 dstb = convert_memory_address (Pmode, dstb);
4414 srcb = convert_memory_address (Pmode, srcb);
4415
4416 /* "Dereference" to BLKmode memories. */
4417 dstb = gen_rtx_MEM (BLKmode, dstb);
4418 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4419 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4420 srcb = gen_rtx_MEM (BLKmode, srcb);
4421 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4422 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4423
4424 /* Copy. */
4425 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4426 }
4427
4428 return const0_rtx;
4429 }
4430
4431 /* Expand a call to one of the builtin functions __builtin_frame_address or
4432 __builtin_return_address. */
4433
4434 static rtx
expand_builtin_frame_address(tree fndecl,tree exp)4435 expand_builtin_frame_address (tree fndecl, tree exp)
4436 {
4437 /* The argument must be a nonnegative integer constant.
4438 It counts the number of frames to scan up the stack.
4439 The value is the return address saved in that frame. */
4440 if (call_expr_nargs (exp) == 0)
4441 /* Warning about missing arg was already issued. */
4442 return const0_rtx;
4443 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4444 {
4445 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4446 error ("invalid argument to %<__builtin_frame_address%>");
4447 else
4448 error ("invalid argument to %<__builtin_return_address%>");
4449 return const0_rtx;
4450 }
4451 else
4452 {
4453 rtx tem
4454 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
4455 tree_to_uhwi (CALL_EXPR_ARG (exp, 0)));
4456
4457 /* Some ports cannot access arbitrary stack frames. */
4458 if (tem == NULL)
4459 {
4460 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4461 warning (0, "unsupported argument to %<__builtin_frame_address%>");
4462 else
4463 warning (0, "unsupported argument to %<__builtin_return_address%>");
4464 return const0_rtx;
4465 }
4466
4467 /* For __builtin_frame_address, return what we've got. */
4468 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4469 return tem;
4470
4471 if (!REG_P (tem)
4472 && ! CONSTANT_P (tem))
4473 tem = copy_addr_to_reg (tem);
4474 return tem;
4475 }
4476 }
4477
4478 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
4479 failed and the caller should emit a normal call. CANNOT_ACCUMULATE
4480 is the same as for allocate_dynamic_stack_space. */
4481
4482 static rtx
expand_builtin_alloca(tree exp,bool cannot_accumulate)4483 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4484 {
4485 rtx op0;
4486 rtx result;
4487 bool valid_arglist;
4488 unsigned int align;
4489 bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4490 == BUILT_IN_ALLOCA_WITH_ALIGN);
4491
4492 valid_arglist
4493 = (alloca_with_align
4494 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4495 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4496
4497 if (!valid_arglist)
4498 return NULL_RTX;
4499
4500 /* Compute the argument. */
4501 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4502
4503 /* Compute the alignment. */
4504 align = (alloca_with_align
4505 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4506 : BIGGEST_ALIGNMENT);
4507
4508 /* Allocate the desired space. */
4509 result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4510 result = convert_memory_address (ptr_mode, result);
4511
4512 return result;
4513 }
4514
4515 /* Expand a call to bswap builtin in EXP.
4516 Return NULL_RTX if a normal call should be emitted rather than expanding the
4517 function in-line. If convenient, the result should be placed in TARGET.
4518 SUBTARGET may be used as the target for computing one of EXP's operands. */
4519
4520 static rtx
expand_builtin_bswap(enum machine_mode target_mode,tree exp,rtx target,rtx subtarget)4521 expand_builtin_bswap (enum machine_mode target_mode, tree exp, rtx target,
4522 rtx subtarget)
4523 {
4524 tree arg;
4525 rtx op0;
4526
4527 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4528 return NULL_RTX;
4529
4530 arg = CALL_EXPR_ARG (exp, 0);
4531 op0 = expand_expr (arg,
4532 subtarget && GET_MODE (subtarget) == target_mode
4533 ? subtarget : NULL_RTX,
4534 target_mode, EXPAND_NORMAL);
4535 if (GET_MODE (op0) != target_mode)
4536 op0 = convert_to_mode (target_mode, op0, 1);
4537
4538 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4539
4540 gcc_assert (target);
4541
4542 return convert_to_mode (target_mode, target, 1);
4543 }
4544
4545 /* Expand a call to a unary builtin in EXP.
4546 Return NULL_RTX if a normal call should be emitted rather than expanding the
4547 function in-line. If convenient, the result should be placed in TARGET.
4548 SUBTARGET may be used as the target for computing one of EXP's operands. */
4549
4550 static rtx
expand_builtin_unop(enum machine_mode target_mode,tree exp,rtx target,rtx subtarget,optab op_optab)4551 expand_builtin_unop (enum machine_mode target_mode, tree exp, rtx target,
4552 rtx subtarget, optab op_optab)
4553 {
4554 rtx op0;
4555
4556 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4557 return NULL_RTX;
4558
4559 /* Compute the argument. */
4560 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4561 (subtarget
4562 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4563 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4564 VOIDmode, EXPAND_NORMAL);
4565 /* Compute op, into TARGET if possible.
4566 Set TARGET to wherever the result comes back. */
4567 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4568 op_optab, op0, target, op_optab != clrsb_optab);
4569 gcc_assert (target);
4570
4571 return convert_to_mode (target_mode, target, 0);
4572 }
4573
4574 /* Expand a call to __builtin_expect. We just return our argument
4575 as the builtin_expect semantic should've been already executed by
4576 tree branch prediction pass. */
4577
4578 static rtx
expand_builtin_expect(tree exp,rtx target)4579 expand_builtin_expect (tree exp, rtx target)
4580 {
4581 tree arg;
4582
4583 if (call_expr_nargs (exp) < 2)
4584 return const0_rtx;
4585 arg = CALL_EXPR_ARG (exp, 0);
4586
4587 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4588 /* When guessing was done, the hints should be already stripped away. */
4589 gcc_assert (!flag_guess_branch_prob
4590 || optimize == 0 || seen_error ());
4591 return target;
4592 }
4593
4594 /* Expand a call to __builtin_assume_aligned. We just return our first
4595 argument as the builtin_assume_aligned semantic should've been already
4596 executed by CCP. */
4597
4598 static rtx
expand_builtin_assume_aligned(tree exp,rtx target)4599 expand_builtin_assume_aligned (tree exp, rtx target)
4600 {
4601 if (call_expr_nargs (exp) < 2)
4602 return const0_rtx;
4603 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4604 EXPAND_NORMAL);
4605 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4606 && (call_expr_nargs (exp) < 3
4607 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4608 return target;
4609 }
4610
4611 void
expand_builtin_trap(void)4612 expand_builtin_trap (void)
4613 {
4614 #ifdef HAVE_trap
4615 if (HAVE_trap)
4616 {
4617 rtx insn = emit_insn (gen_trap ());
4618 /* For trap insns when not accumulating outgoing args force
4619 REG_ARGS_SIZE note to prevent crossjumping of calls with
4620 different args sizes. */
4621 if (!ACCUMULATE_OUTGOING_ARGS)
4622 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4623 }
4624 else
4625 #endif
4626 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4627 emit_barrier ();
4628 }
4629
4630 /* Expand a call to __builtin_unreachable. We do nothing except emit
4631 a barrier saying that control flow will not pass here.
4632
4633 It is the responsibility of the program being compiled to ensure
4634 that control flow does never reach __builtin_unreachable. */
4635 static void
expand_builtin_unreachable(void)4636 expand_builtin_unreachable (void)
4637 {
4638 emit_barrier ();
4639 }
4640
4641 /* Expand EXP, a call to fabs, fabsf or fabsl.
4642 Return NULL_RTX if a normal call should be emitted rather than expanding
4643 the function inline. If convenient, the result should be placed
4644 in TARGET. SUBTARGET may be used as the target for computing
4645 the operand. */
4646
4647 static rtx
expand_builtin_fabs(tree exp,rtx target,rtx subtarget)4648 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4649 {
4650 enum machine_mode mode;
4651 tree arg;
4652 rtx op0;
4653
4654 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4655 return NULL_RTX;
4656
4657 arg = CALL_EXPR_ARG (exp, 0);
4658 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4659 mode = TYPE_MODE (TREE_TYPE (arg));
4660 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4661 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4662 }
4663
4664 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4665 Return NULL is a normal call should be emitted rather than expanding the
4666 function inline. If convenient, the result should be placed in TARGET.
4667 SUBTARGET may be used as the target for computing the operand. */
4668
4669 static rtx
expand_builtin_copysign(tree exp,rtx target,rtx subtarget)4670 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4671 {
4672 rtx op0, op1;
4673 tree arg;
4674
4675 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4676 return NULL_RTX;
4677
4678 arg = CALL_EXPR_ARG (exp, 0);
4679 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4680
4681 arg = CALL_EXPR_ARG (exp, 1);
4682 op1 = expand_normal (arg);
4683
4684 return expand_copysign (op0, op1, target);
4685 }
4686
4687 /* Create a new constant string literal and return a char* pointer to it.
4688 The STRING_CST value is the LEN characters at STR. */
4689 tree
build_string_literal(int len,const char * str)4690 build_string_literal (int len, const char *str)
4691 {
4692 tree t, elem, index, type;
4693
4694 t = build_string (len, str);
4695 elem = build_type_variant (char_type_node, 1, 0);
4696 index = build_index_type (size_int (len - 1));
4697 type = build_array_type (elem, index);
4698 TREE_TYPE (t) = type;
4699 TREE_CONSTANT (t) = 1;
4700 TREE_READONLY (t) = 1;
4701 TREE_STATIC (t) = 1;
4702
4703 type = build_pointer_type (elem);
4704 t = build1 (ADDR_EXPR, type,
4705 build4 (ARRAY_REF, elem,
4706 t, integer_zero_node, NULL_TREE, NULL_TREE));
4707 return t;
4708 }
4709
4710 /* Expand a call to __builtin___clear_cache. */
4711
4712 static rtx
expand_builtin___clear_cache(tree exp ATTRIBUTE_UNUSED)4713 expand_builtin___clear_cache (tree exp ATTRIBUTE_UNUSED)
4714 {
4715 #ifndef HAVE_clear_cache
4716 #ifdef CLEAR_INSN_CACHE
4717 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4718 does something. Just do the default expansion to a call to
4719 __clear_cache(). */
4720 return NULL_RTX;
4721 #else
4722 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4723 does nothing. There is no need to call it. Do nothing. */
4724 return const0_rtx;
4725 #endif /* CLEAR_INSN_CACHE */
4726 #else
4727 /* We have a "clear_cache" insn, and it will handle everything. */
4728 tree begin, end;
4729 rtx begin_rtx, end_rtx;
4730
4731 /* We must not expand to a library call. If we did, any
4732 fallback library function in libgcc that might contain a call to
4733 __builtin___clear_cache() would recurse infinitely. */
4734 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4735 {
4736 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4737 return const0_rtx;
4738 }
4739
4740 if (HAVE_clear_cache)
4741 {
4742 struct expand_operand ops[2];
4743
4744 begin = CALL_EXPR_ARG (exp, 0);
4745 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4746
4747 end = CALL_EXPR_ARG (exp, 1);
4748 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4749
4750 create_address_operand (&ops[0], begin_rtx);
4751 create_address_operand (&ops[1], end_rtx);
4752 if (maybe_expand_insn (CODE_FOR_clear_cache, 2, ops))
4753 return const0_rtx;
4754 }
4755 return const0_rtx;
4756 #endif /* HAVE_clear_cache */
4757 }
4758
4759 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
4760
4761 static rtx
round_trampoline_addr(rtx tramp)4762 round_trampoline_addr (rtx tramp)
4763 {
4764 rtx temp, addend, mask;
4765
4766 /* If we don't need too much alignment, we'll have been guaranteed
4767 proper alignment by get_trampoline_type. */
4768 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4769 return tramp;
4770
4771 /* Round address up to desired boundary. */
4772 temp = gen_reg_rtx (Pmode);
4773 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4774 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4775
4776 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
4777 temp, 0, OPTAB_LIB_WIDEN);
4778 tramp = expand_simple_binop (Pmode, AND, temp, mask,
4779 temp, 0, OPTAB_LIB_WIDEN);
4780
4781 return tramp;
4782 }
4783
4784 static rtx
expand_builtin_init_trampoline(tree exp,bool onstack)4785 expand_builtin_init_trampoline (tree exp, bool onstack)
4786 {
4787 tree t_tramp, t_func, t_chain;
4788 rtx m_tramp, r_tramp, r_chain, tmp;
4789
4790 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4791 POINTER_TYPE, VOID_TYPE))
4792 return NULL_RTX;
4793
4794 t_tramp = CALL_EXPR_ARG (exp, 0);
4795 t_func = CALL_EXPR_ARG (exp, 1);
4796 t_chain = CALL_EXPR_ARG (exp, 2);
4797
4798 r_tramp = expand_normal (t_tramp);
4799 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4800 MEM_NOTRAP_P (m_tramp) = 1;
4801
4802 /* If ONSTACK, the TRAMP argument should be the address of a field
4803 within the local function's FRAME decl. Either way, let's see if
4804 we can fill in the MEM_ATTRs for this memory. */
4805 if (TREE_CODE (t_tramp) == ADDR_EXPR)
4806 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4807
4808 /* Creator of a heap trampoline is responsible for making sure the
4809 address is aligned to at least STACK_BOUNDARY. Normally malloc
4810 will ensure this anyhow. */
4811 tmp = round_trampoline_addr (r_tramp);
4812 if (tmp != r_tramp)
4813 {
4814 m_tramp = change_address (m_tramp, BLKmode, tmp);
4815 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4816 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4817 }
4818
4819 /* The FUNC argument should be the address of the nested function.
4820 Extract the actual function decl to pass to the hook. */
4821 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4822 t_func = TREE_OPERAND (t_func, 0);
4823 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4824
4825 r_chain = expand_normal (t_chain);
4826
4827 /* Generate insns to initialize the trampoline. */
4828 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4829
4830 if (onstack)
4831 {
4832 trampolines_created = 1;
4833
4834 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4835 "trampoline generated for nested function %qD", t_func);
4836 }
4837
4838 return const0_rtx;
4839 }
4840
4841 static rtx
expand_builtin_adjust_trampoline(tree exp)4842 expand_builtin_adjust_trampoline (tree exp)
4843 {
4844 rtx tramp;
4845
4846 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4847 return NULL_RTX;
4848
4849 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4850 tramp = round_trampoline_addr (tramp);
4851 if (targetm.calls.trampoline_adjust_address)
4852 tramp = targetm.calls.trampoline_adjust_address (tramp);
4853
4854 return tramp;
4855 }
4856
4857 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4858 function. The function first checks whether the back end provides
4859 an insn to implement signbit for the respective mode. If not, it
4860 checks whether the floating point format of the value is such that
4861 the sign bit can be extracted. If that is not the case, the
4862 function returns NULL_RTX to indicate that a normal call should be
4863 emitted rather than expanding the function in-line. EXP is the
4864 expression that is a call to the builtin function; if convenient,
4865 the result should be placed in TARGET. */
4866 static rtx
expand_builtin_signbit(tree exp,rtx target)4867 expand_builtin_signbit (tree exp, rtx target)
4868 {
4869 const struct real_format *fmt;
4870 enum machine_mode fmode, imode, rmode;
4871 tree arg;
4872 int word, bitpos;
4873 enum insn_code icode;
4874 rtx temp;
4875 location_t loc = EXPR_LOCATION (exp);
4876
4877 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4878 return NULL_RTX;
4879
4880 arg = CALL_EXPR_ARG (exp, 0);
4881 fmode = TYPE_MODE (TREE_TYPE (arg));
4882 rmode = TYPE_MODE (TREE_TYPE (exp));
4883 fmt = REAL_MODE_FORMAT (fmode);
4884
4885 arg = builtin_save_expr (arg);
4886
4887 /* Expand the argument yielding a RTX expression. */
4888 temp = expand_normal (arg);
4889
4890 /* Check if the back end provides an insn that handles signbit for the
4891 argument's mode. */
4892 icode = optab_handler (signbit_optab, fmode);
4893 if (icode != CODE_FOR_nothing)
4894 {
4895 rtx last = get_last_insn ();
4896 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4897 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4898 return target;
4899 delete_insns_since (last);
4900 }
4901
4902 /* For floating point formats without a sign bit, implement signbit
4903 as "ARG < 0.0". */
4904 bitpos = fmt->signbit_ro;
4905 if (bitpos < 0)
4906 {
4907 /* But we can't do this if the format supports signed zero. */
4908 if (fmt->has_signed_zero && HONOR_SIGNED_ZEROS (fmode))
4909 return NULL_RTX;
4910
4911 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4912 build_real (TREE_TYPE (arg), dconst0));
4913 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4914 }
4915
4916 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4917 {
4918 imode = int_mode_for_mode (fmode);
4919 if (imode == BLKmode)
4920 return NULL_RTX;
4921 temp = gen_lowpart (imode, temp);
4922 }
4923 else
4924 {
4925 imode = word_mode;
4926 /* Handle targets with different FP word orders. */
4927 if (FLOAT_WORDS_BIG_ENDIAN)
4928 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4929 else
4930 word = bitpos / BITS_PER_WORD;
4931 temp = operand_subword_force (temp, word, fmode);
4932 bitpos = bitpos % BITS_PER_WORD;
4933 }
4934
4935 /* Force the intermediate word_mode (or narrower) result into a
4936 register. This avoids attempting to create paradoxical SUBREGs
4937 of floating point modes below. */
4938 temp = force_reg (imode, temp);
4939
4940 /* If the bitpos is within the "result mode" lowpart, the operation
4941 can be implement with a single bitwise AND. Otherwise, we need
4942 a right shift and an AND. */
4943
4944 if (bitpos < GET_MODE_BITSIZE (rmode))
4945 {
4946 double_int mask = double_int_zero.set_bit (bitpos);
4947
4948 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4949 temp = gen_lowpart (rmode, temp);
4950 temp = expand_binop (rmode, and_optab, temp,
4951 immed_double_int_const (mask, rmode),
4952 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4953 }
4954 else
4955 {
4956 /* Perform a logical right shift to place the signbit in the least
4957 significant bit, then truncate the result to the desired mode
4958 and mask just this bit. */
4959 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4960 temp = gen_lowpart (rmode, temp);
4961 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4962 NULL_RTX, 1, OPTAB_LIB_WIDEN);
4963 }
4964
4965 return temp;
4966 }
4967
4968 /* Expand fork or exec calls. TARGET is the desired target of the
4969 call. EXP is the call. FN is the
4970 identificator of the actual function. IGNORE is nonzero if the
4971 value is to be ignored. */
4972
4973 static rtx
expand_builtin_fork_or_exec(tree fn,tree exp,rtx target,int ignore)4974 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4975 {
4976 tree id, decl;
4977 tree call;
4978
4979 /* If we are not profiling, just call the function. */
4980 if (!profile_arc_flag)
4981 return NULL_RTX;
4982
4983 /* Otherwise call the wrapper. This should be equivalent for the rest of
4984 compiler, so the code does not diverge, and the wrapper may run the
4985 code necessary for keeping the profiling sane. */
4986
4987 switch (DECL_FUNCTION_CODE (fn))
4988 {
4989 case BUILT_IN_FORK:
4990 id = get_identifier ("__gcov_fork");
4991 break;
4992
4993 case BUILT_IN_EXECL:
4994 id = get_identifier ("__gcov_execl");
4995 break;
4996
4997 case BUILT_IN_EXECV:
4998 id = get_identifier ("__gcov_execv");
4999 break;
5000
5001 case BUILT_IN_EXECLP:
5002 id = get_identifier ("__gcov_execlp");
5003 break;
5004
5005 case BUILT_IN_EXECLE:
5006 id = get_identifier ("__gcov_execle");
5007 break;
5008
5009 case BUILT_IN_EXECVP:
5010 id = get_identifier ("__gcov_execvp");
5011 break;
5012
5013 case BUILT_IN_EXECVE:
5014 id = get_identifier ("__gcov_execve");
5015 break;
5016
5017 default:
5018 gcc_unreachable ();
5019 }
5020
5021 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5022 FUNCTION_DECL, id, TREE_TYPE (fn));
5023 DECL_EXTERNAL (decl) = 1;
5024 TREE_PUBLIC (decl) = 1;
5025 DECL_ARTIFICIAL (decl) = 1;
5026 TREE_NOTHROW (decl) = 1;
5027 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5028 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5029 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5030 return expand_call (call, target, ignore);
5031 }
5032
5033
5034
5035 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5036 the pointer in these functions is void*, the tree optimizers may remove
5037 casts. The mode computed in expand_builtin isn't reliable either, due
5038 to __sync_bool_compare_and_swap.
5039
5040 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5041 group of builtins. This gives us log2 of the mode size. */
5042
5043 static inline enum machine_mode
get_builtin_sync_mode(int fcode_diff)5044 get_builtin_sync_mode (int fcode_diff)
5045 {
5046 /* The size is not negotiable, so ask not to get BLKmode in return
5047 if the target indicates that a smaller size would be better. */
5048 return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
5049 }
5050
5051 /* Expand the memory expression LOC and return the appropriate memory operand
5052 for the builtin_sync operations. */
5053
5054 static rtx
get_builtin_sync_mem(tree loc,enum machine_mode mode)5055 get_builtin_sync_mem (tree loc, enum machine_mode mode)
5056 {
5057 rtx addr, mem;
5058
5059 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5060 addr = convert_memory_address (Pmode, addr);
5061
5062 /* Note that we explicitly do not want any alias information for this
5063 memory, so that we kill all other live memories. Otherwise we don't
5064 satisfy the full barrier semantics of the intrinsic. */
5065 mem = validize_mem (gen_rtx_MEM (mode, addr));
5066
5067 /* The alignment needs to be at least according to that of the mode. */
5068 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5069 get_pointer_alignment (loc)));
5070 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5071 MEM_VOLATILE_P (mem) = 1;
5072
5073 return mem;
5074 }
5075
5076 /* Make sure an argument is in the right mode.
5077 EXP is the tree argument.
5078 MODE is the mode it should be in. */
5079
5080 static rtx
expand_expr_force_mode(tree exp,enum machine_mode mode)5081 expand_expr_force_mode (tree exp, enum machine_mode mode)
5082 {
5083 rtx val;
5084 enum machine_mode old_mode;
5085
5086 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5087 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5088 of CONST_INTs, where we know the old_mode only from the call argument. */
5089
5090 old_mode = GET_MODE (val);
5091 if (old_mode == VOIDmode)
5092 old_mode = TYPE_MODE (TREE_TYPE (exp));
5093 val = convert_modes (mode, old_mode, val, 1);
5094 return val;
5095 }
5096
5097
5098 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5099 EXP is the CALL_EXPR. CODE is the rtx code
5100 that corresponds to the arithmetic or logical operation from the name;
5101 an exception here is that NOT actually means NAND. TARGET is an optional
5102 place for us to store the results; AFTER is true if this is the
5103 fetch_and_xxx form. */
5104
5105 static rtx
expand_builtin_sync_operation(enum machine_mode mode,tree exp,enum rtx_code code,bool after,rtx target)5106 expand_builtin_sync_operation (enum machine_mode mode, tree exp,
5107 enum rtx_code code, bool after,
5108 rtx target)
5109 {
5110 rtx val, mem;
5111 location_t loc = EXPR_LOCATION (exp);
5112
5113 if (code == NOT && warn_sync_nand)
5114 {
5115 tree fndecl = get_callee_fndecl (exp);
5116 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5117
5118 static bool warned_f_a_n, warned_n_a_f;
5119
5120 switch (fcode)
5121 {
5122 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5123 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5124 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5125 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5126 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5127 if (warned_f_a_n)
5128 break;
5129
5130 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5131 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5132 warned_f_a_n = true;
5133 break;
5134
5135 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5136 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5137 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5138 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5139 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5140 if (warned_n_a_f)
5141 break;
5142
5143 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5144 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5145 warned_n_a_f = true;
5146 break;
5147
5148 default:
5149 gcc_unreachable ();
5150 }
5151 }
5152
5153 /* Expand the operands. */
5154 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5155 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5156
5157 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SEQ_CST,
5158 after);
5159 }
5160
5161 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5162 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5163 true if this is the boolean form. TARGET is a place for us to store the
5164 results; this is NOT optional if IS_BOOL is true. */
5165
5166 static rtx
expand_builtin_compare_and_swap(enum machine_mode mode,tree exp,bool is_bool,rtx target)5167 expand_builtin_compare_and_swap (enum machine_mode mode, tree exp,
5168 bool is_bool, rtx target)
5169 {
5170 rtx old_val, new_val, mem;
5171 rtx *pbool, *poval;
5172
5173 /* Expand the operands. */
5174 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5175 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5176 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5177
5178 pbool = poval = NULL;
5179 if (target != const0_rtx)
5180 {
5181 if (is_bool)
5182 pbool = ⌖
5183 else
5184 poval = ⌖
5185 }
5186 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5187 false, MEMMODEL_SEQ_CST,
5188 MEMMODEL_SEQ_CST))
5189 return NULL_RTX;
5190
5191 return target;
5192 }
5193
5194 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5195 general form is actually an atomic exchange, and some targets only
5196 support a reduced form with the second argument being a constant 1.
5197 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5198 the results. */
5199
5200 static rtx
expand_builtin_sync_lock_test_and_set(enum machine_mode mode,tree exp,rtx target)5201 expand_builtin_sync_lock_test_and_set (enum machine_mode mode, tree exp,
5202 rtx target)
5203 {
5204 rtx val, mem;
5205
5206 /* Expand the operands. */
5207 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5208 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5209
5210 return expand_sync_lock_test_and_set (target, mem, val);
5211 }
5212
5213 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5214
5215 static void
expand_builtin_sync_lock_release(enum machine_mode mode,tree exp)5216 expand_builtin_sync_lock_release (enum machine_mode mode, tree exp)
5217 {
5218 rtx mem;
5219
5220 /* Expand the operands. */
5221 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5222
5223 expand_atomic_store (mem, const0_rtx, MEMMODEL_RELEASE, true);
5224 }
5225
5226 /* Given an integer representing an ``enum memmodel'', verify its
5227 correctness and return the memory model enum. */
5228
5229 static enum memmodel
get_memmodel(tree exp)5230 get_memmodel (tree exp)
5231 {
5232 rtx op;
5233 unsigned HOST_WIDE_INT val;
5234
5235 /* If the parameter is not a constant, it's a run time value so we'll just
5236 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5237 if (TREE_CODE (exp) != INTEGER_CST)
5238 return MEMMODEL_SEQ_CST;
5239
5240 op = expand_normal (exp);
5241
5242 val = INTVAL (op);
5243 if (targetm.memmodel_check)
5244 val = targetm.memmodel_check (val);
5245 else if (val & ~MEMMODEL_MASK)
5246 {
5247 warning (OPT_Winvalid_memory_model,
5248 "Unknown architecture specifier in memory model to builtin.");
5249 return MEMMODEL_SEQ_CST;
5250 }
5251
5252 if ((INTVAL (op) & MEMMODEL_MASK) >= MEMMODEL_LAST)
5253 {
5254 warning (OPT_Winvalid_memory_model,
5255 "invalid memory model argument to builtin");
5256 return MEMMODEL_SEQ_CST;
5257 }
5258
5259 return (enum memmodel) val;
5260 }
5261
5262 /* Expand the __atomic_exchange intrinsic:
5263 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5264 EXP is the CALL_EXPR.
5265 TARGET is an optional place for us to store the results. */
5266
5267 static rtx
expand_builtin_atomic_exchange(enum machine_mode mode,tree exp,rtx target)5268 expand_builtin_atomic_exchange (enum machine_mode mode, tree exp, rtx target)
5269 {
5270 rtx val, mem;
5271 enum memmodel model;
5272
5273 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5274 if ((model & MEMMODEL_MASK) == MEMMODEL_CONSUME)
5275 {
5276 error ("invalid memory model for %<__atomic_exchange%>");
5277 return NULL_RTX;
5278 }
5279
5280 if (!flag_inline_atomics)
5281 return NULL_RTX;
5282
5283 /* Expand the operands. */
5284 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5285 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5286
5287 return expand_atomic_exchange (target, mem, val, model);
5288 }
5289
5290 /* Expand the __atomic_compare_exchange intrinsic:
5291 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5292 TYPE desired, BOOL weak,
5293 enum memmodel success,
5294 enum memmodel failure)
5295 EXP is the CALL_EXPR.
5296 TARGET is an optional place for us to store the results. */
5297
5298 static rtx
expand_builtin_atomic_compare_exchange(enum machine_mode mode,tree exp,rtx target)5299 expand_builtin_atomic_compare_exchange (enum machine_mode mode, tree exp,
5300 rtx target)
5301 {
5302 rtx expect, desired, mem, oldval, label;
5303 enum memmodel success, failure;
5304 tree weak;
5305 bool is_weak;
5306
5307 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5308 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5309
5310 if ((failure & MEMMODEL_MASK) == MEMMODEL_RELEASE
5311 || (failure & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5312 {
5313 error ("invalid failure memory model for %<__atomic_compare_exchange%>");
5314 return NULL_RTX;
5315 }
5316
5317 if (failure > success)
5318 {
5319 error ("failure memory model cannot be stronger than success "
5320 "memory model for %<__atomic_compare_exchange%>");
5321 return NULL_RTX;
5322 }
5323
5324 if (!flag_inline_atomics)
5325 return NULL_RTX;
5326
5327 /* Expand the operands. */
5328 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5329
5330 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5331 expect = convert_memory_address (Pmode, expect);
5332 expect = gen_rtx_MEM (mode, expect);
5333 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5334
5335 weak = CALL_EXPR_ARG (exp, 3);
5336 is_weak = false;
5337 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5338 is_weak = true;
5339
5340 if (target == const0_rtx)
5341 target = NULL;
5342
5343 /* Lest the rtl backend create a race condition with an imporoper store
5344 to memory, always create a new pseudo for OLDVAL. */
5345 oldval = NULL;
5346
5347 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5348 is_weak, success, failure))
5349 return NULL_RTX;
5350
5351 /* Conditionally store back to EXPECT, lest we create a race condition
5352 with an improper store to memory. */
5353 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5354 the normal case where EXPECT is totally private, i.e. a register. At
5355 which point the store can be unconditional. */
5356 label = gen_label_rtx ();
5357 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL, VOIDmode, 1, label);
5358 emit_move_insn (expect, oldval);
5359 emit_label (label);
5360
5361 return target;
5362 }
5363
5364 /* Expand the __atomic_load intrinsic:
5365 TYPE __atomic_load (TYPE *object, enum memmodel)
5366 EXP is the CALL_EXPR.
5367 TARGET is an optional place for us to store the results. */
5368
5369 static rtx
expand_builtin_atomic_load(enum machine_mode mode,tree exp,rtx target)5370 expand_builtin_atomic_load (enum machine_mode mode, tree exp, rtx target)
5371 {
5372 rtx mem;
5373 enum memmodel model;
5374
5375 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5376 if ((model & MEMMODEL_MASK) == MEMMODEL_RELEASE
5377 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5378 {
5379 error ("invalid memory model for %<__atomic_load%>");
5380 return NULL_RTX;
5381 }
5382
5383 if (!flag_inline_atomics)
5384 return NULL_RTX;
5385
5386 /* Expand the operand. */
5387 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5388
5389 return expand_atomic_load (target, mem, model);
5390 }
5391
5392
5393 /* Expand the __atomic_store intrinsic:
5394 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5395 EXP is the CALL_EXPR.
5396 TARGET is an optional place for us to store the results. */
5397
5398 static rtx
expand_builtin_atomic_store(enum machine_mode mode,tree exp)5399 expand_builtin_atomic_store (enum machine_mode mode, tree exp)
5400 {
5401 rtx mem, val;
5402 enum memmodel model;
5403
5404 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5405 if ((model & MEMMODEL_MASK) != MEMMODEL_RELAXED
5406 && (model & MEMMODEL_MASK) != MEMMODEL_SEQ_CST
5407 && (model & MEMMODEL_MASK) != MEMMODEL_RELEASE)
5408 {
5409 error ("invalid memory model for %<__atomic_store%>");
5410 return NULL_RTX;
5411 }
5412
5413 if (!flag_inline_atomics)
5414 return NULL_RTX;
5415
5416 /* Expand the operands. */
5417 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5418 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5419
5420 return expand_atomic_store (mem, val, model, false);
5421 }
5422
5423 /* Expand the __atomic_fetch_XXX intrinsic:
5424 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5425 EXP is the CALL_EXPR.
5426 TARGET is an optional place for us to store the results.
5427 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5428 FETCH_AFTER is true if returning the result of the operation.
5429 FETCH_AFTER is false if returning the value before the operation.
5430 IGNORE is true if the result is not used.
5431 EXT_CALL is the correct builtin for an external call if this cannot be
5432 resolved to an instruction sequence. */
5433
5434 static rtx
expand_builtin_atomic_fetch_op(enum machine_mode mode,tree exp,rtx target,enum rtx_code code,bool fetch_after,bool ignore,enum built_in_function ext_call)5435 expand_builtin_atomic_fetch_op (enum machine_mode mode, tree exp, rtx target,
5436 enum rtx_code code, bool fetch_after,
5437 bool ignore, enum built_in_function ext_call)
5438 {
5439 rtx val, mem, ret;
5440 enum memmodel model;
5441 tree fndecl;
5442 tree addr;
5443
5444 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5445
5446 /* Expand the operands. */
5447 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5448 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5449
5450 /* Only try generating instructions if inlining is turned on. */
5451 if (flag_inline_atomics)
5452 {
5453 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5454 if (ret)
5455 return ret;
5456 }
5457
5458 /* Return if a different routine isn't needed for the library call. */
5459 if (ext_call == BUILT_IN_NONE)
5460 return NULL_RTX;
5461
5462 /* Change the call to the specified function. */
5463 fndecl = get_callee_fndecl (exp);
5464 addr = CALL_EXPR_FN (exp);
5465 STRIP_NOPS (addr);
5466
5467 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5468 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5469
5470 /* Expand the call here so we can emit trailing code. */
5471 ret = expand_call (exp, target, ignore);
5472
5473 /* Replace the original function just in case it matters. */
5474 TREE_OPERAND (addr, 0) = fndecl;
5475
5476 /* Then issue the arithmetic correction to return the right result. */
5477 if (!ignore)
5478 {
5479 if (code == NOT)
5480 {
5481 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5482 OPTAB_LIB_WIDEN);
5483 ret = expand_simple_unop (mode, NOT, ret, target, true);
5484 }
5485 else
5486 ret = expand_simple_binop (mode, code, ret, val, target, true,
5487 OPTAB_LIB_WIDEN);
5488 }
5489 return ret;
5490 }
5491
5492
5493 #ifndef HAVE_atomic_clear
5494 # define HAVE_atomic_clear 0
5495 # define gen_atomic_clear(x,y) (gcc_unreachable (), NULL_RTX)
5496 #endif
5497
5498 /* Expand an atomic clear operation.
5499 void _atomic_clear (BOOL *obj, enum memmodel)
5500 EXP is the call expression. */
5501
5502 static rtx
expand_builtin_atomic_clear(tree exp)5503 expand_builtin_atomic_clear (tree exp)
5504 {
5505 enum machine_mode mode;
5506 rtx mem, ret;
5507 enum memmodel model;
5508
5509 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5510 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5511 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5512
5513 if ((model & MEMMODEL_MASK) == MEMMODEL_ACQUIRE
5514 || (model & MEMMODEL_MASK) == MEMMODEL_ACQ_REL)
5515 {
5516 error ("invalid memory model for %<__atomic_store%>");
5517 return const0_rtx;
5518 }
5519
5520 if (HAVE_atomic_clear)
5521 {
5522 emit_insn (gen_atomic_clear (mem, model));
5523 return const0_rtx;
5524 }
5525
5526 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5527 Failing that, a store is issued by __atomic_store. The only way this can
5528 fail is if the bool type is larger than a word size. Unlikely, but
5529 handle it anyway for completeness. Assume a single threaded model since
5530 there is no atomic support in this case, and no barriers are required. */
5531 ret = expand_atomic_store (mem, const0_rtx, model, true);
5532 if (!ret)
5533 emit_move_insn (mem, const0_rtx);
5534 return const0_rtx;
5535 }
5536
5537 /* Expand an atomic test_and_set operation.
5538 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5539 EXP is the call expression. */
5540
5541 static rtx
expand_builtin_atomic_test_and_set(tree exp,rtx target)5542 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5543 {
5544 rtx mem;
5545 enum memmodel model;
5546 enum machine_mode mode;
5547
5548 mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5549 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5550 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5551
5552 return expand_atomic_test_and_set (target, mem, model);
5553 }
5554
5555
5556 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5557 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
5558
5559 static tree
fold_builtin_atomic_always_lock_free(tree arg0,tree arg1)5560 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5561 {
5562 int size;
5563 enum machine_mode mode;
5564 unsigned int mode_align, type_align;
5565
5566 if (TREE_CODE (arg0) != INTEGER_CST)
5567 return NULL_TREE;
5568
5569 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5570 mode = mode_for_size (size, MODE_INT, 0);
5571 mode_align = GET_MODE_ALIGNMENT (mode);
5572
5573 if (TREE_CODE (arg1) == INTEGER_CST && INTVAL (expand_normal (arg1)) == 0)
5574 type_align = mode_align;
5575 else
5576 {
5577 tree ttype = TREE_TYPE (arg1);
5578
5579 /* This function is usually invoked and folded immediately by the front
5580 end before anything else has a chance to look at it. The pointer
5581 parameter at this point is usually cast to a void *, so check for that
5582 and look past the cast. */
5583 if (TREE_CODE (arg1) == NOP_EXPR && POINTER_TYPE_P (ttype)
5584 && VOID_TYPE_P (TREE_TYPE (ttype)))
5585 arg1 = TREE_OPERAND (arg1, 0);
5586
5587 ttype = TREE_TYPE (arg1);
5588 gcc_assert (POINTER_TYPE_P (ttype));
5589
5590 /* Get the underlying type of the object. */
5591 ttype = TREE_TYPE (ttype);
5592 type_align = TYPE_ALIGN (ttype);
5593 }
5594
5595 /* If the object has smaller alignment, the the lock free routines cannot
5596 be used. */
5597 if (type_align < mode_align)
5598 return boolean_false_node;
5599
5600 /* Check if a compare_and_swap pattern exists for the mode which represents
5601 the required size. The pattern is not allowed to fail, so the existence
5602 of the pattern indicates support is present. */
5603 if (can_compare_and_swap_p (mode, true))
5604 return boolean_true_node;
5605 else
5606 return boolean_false_node;
5607 }
5608
5609 /* Return true if the parameters to call EXP represent an object which will
5610 always generate lock free instructions. The first argument represents the
5611 size of the object, and the second parameter is a pointer to the object
5612 itself. If NULL is passed for the object, then the result is based on
5613 typical alignment for an object of the specified size. Otherwise return
5614 false. */
5615
5616 static rtx
expand_builtin_atomic_always_lock_free(tree exp)5617 expand_builtin_atomic_always_lock_free (tree exp)
5618 {
5619 tree size;
5620 tree arg0 = CALL_EXPR_ARG (exp, 0);
5621 tree arg1 = CALL_EXPR_ARG (exp, 1);
5622
5623 if (TREE_CODE (arg0) != INTEGER_CST)
5624 {
5625 error ("non-constant argument 1 to __atomic_always_lock_free");
5626 return const0_rtx;
5627 }
5628
5629 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5630 if (size == boolean_true_node)
5631 return const1_rtx;
5632 return const0_rtx;
5633 }
5634
5635 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5636 is lock free on this architecture. */
5637
5638 static tree
fold_builtin_atomic_is_lock_free(tree arg0,tree arg1)5639 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5640 {
5641 if (!flag_inline_atomics)
5642 return NULL_TREE;
5643
5644 /* If it isn't always lock free, don't generate a result. */
5645 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5646 return boolean_true_node;
5647
5648 return NULL_TREE;
5649 }
5650
5651 /* Return true if the parameters to call EXP represent an object which will
5652 always generate lock free instructions. The first argument represents the
5653 size of the object, and the second parameter is a pointer to the object
5654 itself. If NULL is passed for the object, then the result is based on
5655 typical alignment for an object of the specified size. Otherwise return
5656 NULL*/
5657
5658 static rtx
expand_builtin_atomic_is_lock_free(tree exp)5659 expand_builtin_atomic_is_lock_free (tree exp)
5660 {
5661 tree size;
5662 tree arg0 = CALL_EXPR_ARG (exp, 0);
5663 tree arg1 = CALL_EXPR_ARG (exp, 1);
5664
5665 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5666 {
5667 error ("non-integer argument 1 to __atomic_is_lock_free");
5668 return NULL_RTX;
5669 }
5670
5671 if (!flag_inline_atomics)
5672 return NULL_RTX;
5673
5674 /* If the value is known at compile time, return the RTX for it. */
5675 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5676 if (size == boolean_true_node)
5677 return const1_rtx;
5678
5679 return NULL_RTX;
5680 }
5681
5682 /* Expand the __atomic_thread_fence intrinsic:
5683 void __atomic_thread_fence (enum memmodel)
5684 EXP is the CALL_EXPR. */
5685
5686 static void
expand_builtin_atomic_thread_fence(tree exp)5687 expand_builtin_atomic_thread_fence (tree exp)
5688 {
5689 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5690 expand_mem_thread_fence (model);
5691 }
5692
5693 /* Expand the __atomic_signal_fence intrinsic:
5694 void __atomic_signal_fence (enum memmodel)
5695 EXP is the CALL_EXPR. */
5696
5697 static void
expand_builtin_atomic_signal_fence(tree exp)5698 expand_builtin_atomic_signal_fence (tree exp)
5699 {
5700 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5701 expand_mem_signal_fence (model);
5702 }
5703
5704 /* Expand the __sync_synchronize intrinsic. */
5705
5706 static void
expand_builtin_sync_synchronize(void)5707 expand_builtin_sync_synchronize (void)
5708 {
5709 expand_mem_thread_fence (MEMMODEL_SEQ_CST);
5710 }
5711
5712 static rtx
expand_builtin_thread_pointer(tree exp,rtx target)5713 expand_builtin_thread_pointer (tree exp, rtx target)
5714 {
5715 enum insn_code icode;
5716 if (!validate_arglist (exp, VOID_TYPE))
5717 return const0_rtx;
5718 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5719 if (icode != CODE_FOR_nothing)
5720 {
5721 struct expand_operand op;
5722 /* If the target is not sutitable then create a new target. */
5723 if (target == NULL_RTX
5724 || !REG_P (target)
5725 || GET_MODE (target) != Pmode)
5726 target = gen_reg_rtx (Pmode);
5727 create_output_operand (&op, target, Pmode);
5728 expand_insn (icode, 1, &op);
5729 return target;
5730 }
5731 error ("__builtin_thread_pointer is not supported on this target");
5732 return const0_rtx;
5733 }
5734
5735 static void
expand_builtin_set_thread_pointer(tree exp)5736 expand_builtin_set_thread_pointer (tree exp)
5737 {
5738 enum insn_code icode;
5739 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5740 return;
5741 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5742 if (icode != CODE_FOR_nothing)
5743 {
5744 struct expand_operand op;
5745 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5746 Pmode, EXPAND_NORMAL);
5747 create_input_operand (&op, val, Pmode);
5748 expand_insn (icode, 1, &op);
5749 return;
5750 }
5751 error ("__builtin_set_thread_pointer is not supported on this target");
5752 }
5753
5754
5755 /* Emit code to restore the current value of stack. */
5756
5757 static void
expand_stack_restore(tree var)5758 expand_stack_restore (tree var)
5759 {
5760 rtx prev, sa = expand_normal (var);
5761
5762 sa = convert_memory_address (Pmode, sa);
5763
5764 prev = get_last_insn ();
5765 emit_stack_restore (SAVE_BLOCK, sa);
5766 fixup_args_size_notes (prev, get_last_insn (), 0);
5767 }
5768
5769
5770 /* Emit code to save the current value of stack. */
5771
5772 static rtx
expand_stack_save(void)5773 expand_stack_save (void)
5774 {
5775 rtx ret = NULL_RTX;
5776
5777 do_pending_stack_adjust ();
5778 emit_stack_save (SAVE_BLOCK, &ret);
5779 return ret;
5780 }
5781
5782 /* Expand an expression EXP that calls a built-in function,
5783 with result going to TARGET if that's convenient
5784 (and in mode MODE if that's convenient).
5785 SUBTARGET may be used as the target for computing one of EXP's operands.
5786 IGNORE is nonzero if the value is to be ignored. */
5787
5788 rtx
expand_builtin(tree exp,rtx target,rtx subtarget,enum machine_mode mode,int ignore)5789 expand_builtin (tree exp, rtx target, rtx subtarget, enum machine_mode mode,
5790 int ignore)
5791 {
5792 tree fndecl = get_callee_fndecl (exp);
5793 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5794 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5795 int flags;
5796
5797 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5798 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5799
5800 /* When not optimizing, generate calls to library functions for a certain
5801 set of builtins. */
5802 if (!optimize
5803 && !called_as_built_in (fndecl)
5804 && fcode != BUILT_IN_FORK
5805 && fcode != BUILT_IN_EXECL
5806 && fcode != BUILT_IN_EXECV
5807 && fcode != BUILT_IN_EXECLP
5808 && fcode != BUILT_IN_EXECLE
5809 && fcode != BUILT_IN_EXECVP
5810 && fcode != BUILT_IN_EXECVE
5811 && fcode != BUILT_IN_ALLOCA
5812 && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5813 && fcode != BUILT_IN_FREE)
5814 return expand_call (exp, target, ignore);
5815
5816 /* The built-in function expanders test for target == const0_rtx
5817 to determine whether the function's result will be ignored. */
5818 if (ignore)
5819 target = const0_rtx;
5820
5821 /* If the result of a pure or const built-in function is ignored, and
5822 none of its arguments are volatile, we can avoid expanding the
5823 built-in call and just evaluate the arguments for side-effects. */
5824 if (target == const0_rtx
5825 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5826 && !(flags & ECF_LOOPING_CONST_OR_PURE))
5827 {
5828 bool volatilep = false;
5829 tree arg;
5830 call_expr_arg_iterator iter;
5831
5832 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5833 if (TREE_THIS_VOLATILE (arg))
5834 {
5835 volatilep = true;
5836 break;
5837 }
5838
5839 if (! volatilep)
5840 {
5841 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5842 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5843 return const0_rtx;
5844 }
5845 }
5846
5847 switch (fcode)
5848 {
5849 CASE_FLT_FN (BUILT_IN_FABS):
5850 case BUILT_IN_FABSD32:
5851 case BUILT_IN_FABSD64:
5852 case BUILT_IN_FABSD128:
5853 target = expand_builtin_fabs (exp, target, subtarget);
5854 if (target)
5855 return target;
5856 break;
5857
5858 CASE_FLT_FN (BUILT_IN_COPYSIGN):
5859 target = expand_builtin_copysign (exp, target, subtarget);
5860 if (target)
5861 return target;
5862 break;
5863
5864 /* Just do a normal library call if we were unable to fold
5865 the values. */
5866 CASE_FLT_FN (BUILT_IN_CABS):
5867 break;
5868
5869 CASE_FLT_FN (BUILT_IN_EXP):
5870 CASE_FLT_FN (BUILT_IN_EXP10):
5871 CASE_FLT_FN (BUILT_IN_POW10):
5872 CASE_FLT_FN (BUILT_IN_EXP2):
5873 CASE_FLT_FN (BUILT_IN_EXPM1):
5874 CASE_FLT_FN (BUILT_IN_LOGB):
5875 CASE_FLT_FN (BUILT_IN_LOG):
5876 CASE_FLT_FN (BUILT_IN_LOG10):
5877 CASE_FLT_FN (BUILT_IN_LOG2):
5878 CASE_FLT_FN (BUILT_IN_LOG1P):
5879 CASE_FLT_FN (BUILT_IN_TAN):
5880 CASE_FLT_FN (BUILT_IN_ASIN):
5881 CASE_FLT_FN (BUILT_IN_ACOS):
5882 CASE_FLT_FN (BUILT_IN_ATAN):
5883 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
5884 /* Treat these like sqrt only if unsafe math optimizations are allowed,
5885 because of possible accuracy problems. */
5886 if (! flag_unsafe_math_optimizations)
5887 break;
5888 CASE_FLT_FN (BUILT_IN_SQRT):
5889 CASE_FLT_FN (BUILT_IN_FLOOR):
5890 CASE_FLT_FN (BUILT_IN_CEIL):
5891 CASE_FLT_FN (BUILT_IN_TRUNC):
5892 CASE_FLT_FN (BUILT_IN_ROUND):
5893 CASE_FLT_FN (BUILT_IN_NEARBYINT):
5894 CASE_FLT_FN (BUILT_IN_RINT):
5895 target = expand_builtin_mathfn (exp, target, subtarget);
5896 if (target)
5897 return target;
5898 break;
5899
5900 CASE_FLT_FN (BUILT_IN_FMA):
5901 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5902 if (target)
5903 return target;
5904 break;
5905
5906 CASE_FLT_FN (BUILT_IN_ILOGB):
5907 if (! flag_unsafe_math_optimizations)
5908 break;
5909 CASE_FLT_FN (BUILT_IN_ISINF):
5910 CASE_FLT_FN (BUILT_IN_FINITE):
5911 case BUILT_IN_ISFINITE:
5912 case BUILT_IN_ISNORMAL:
5913 target = expand_builtin_interclass_mathfn (exp, target);
5914 if (target)
5915 return target;
5916 break;
5917
5918 CASE_FLT_FN (BUILT_IN_ICEIL):
5919 CASE_FLT_FN (BUILT_IN_LCEIL):
5920 CASE_FLT_FN (BUILT_IN_LLCEIL):
5921 CASE_FLT_FN (BUILT_IN_LFLOOR):
5922 CASE_FLT_FN (BUILT_IN_IFLOOR):
5923 CASE_FLT_FN (BUILT_IN_LLFLOOR):
5924 target = expand_builtin_int_roundingfn (exp, target);
5925 if (target)
5926 return target;
5927 break;
5928
5929 CASE_FLT_FN (BUILT_IN_IRINT):
5930 CASE_FLT_FN (BUILT_IN_LRINT):
5931 CASE_FLT_FN (BUILT_IN_LLRINT):
5932 CASE_FLT_FN (BUILT_IN_IROUND):
5933 CASE_FLT_FN (BUILT_IN_LROUND):
5934 CASE_FLT_FN (BUILT_IN_LLROUND):
5935 target = expand_builtin_int_roundingfn_2 (exp, target);
5936 if (target)
5937 return target;
5938 break;
5939
5940 CASE_FLT_FN (BUILT_IN_POWI):
5941 target = expand_builtin_powi (exp, target);
5942 if (target)
5943 return target;
5944 break;
5945
5946 CASE_FLT_FN (BUILT_IN_ATAN2):
5947 CASE_FLT_FN (BUILT_IN_LDEXP):
5948 CASE_FLT_FN (BUILT_IN_SCALB):
5949 CASE_FLT_FN (BUILT_IN_SCALBN):
5950 CASE_FLT_FN (BUILT_IN_SCALBLN):
5951 if (! flag_unsafe_math_optimizations)
5952 break;
5953
5954 CASE_FLT_FN (BUILT_IN_FMOD):
5955 CASE_FLT_FN (BUILT_IN_REMAINDER):
5956 CASE_FLT_FN (BUILT_IN_DREM):
5957 CASE_FLT_FN (BUILT_IN_POW):
5958 target = expand_builtin_mathfn_2 (exp, target, subtarget);
5959 if (target)
5960 return target;
5961 break;
5962
5963 CASE_FLT_FN (BUILT_IN_CEXPI):
5964 target = expand_builtin_cexpi (exp, target);
5965 gcc_assert (target);
5966 return target;
5967
5968 CASE_FLT_FN (BUILT_IN_SIN):
5969 CASE_FLT_FN (BUILT_IN_COS):
5970 if (! flag_unsafe_math_optimizations)
5971 break;
5972 target = expand_builtin_mathfn_3 (exp, target, subtarget);
5973 if (target)
5974 return target;
5975 break;
5976
5977 CASE_FLT_FN (BUILT_IN_SINCOS):
5978 if (! flag_unsafe_math_optimizations)
5979 break;
5980 target = expand_builtin_sincos (exp);
5981 if (target)
5982 return target;
5983 break;
5984
5985 case BUILT_IN_APPLY_ARGS:
5986 return expand_builtin_apply_args ();
5987
5988 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5989 FUNCTION with a copy of the parameters described by
5990 ARGUMENTS, and ARGSIZE. It returns a block of memory
5991 allocated on the stack into which is stored all the registers
5992 that might possibly be used for returning the result of a
5993 function. ARGUMENTS is the value returned by
5994 __builtin_apply_args. ARGSIZE is the number of bytes of
5995 arguments that must be copied. ??? How should this value be
5996 computed? We'll also need a safe worst case value for varargs
5997 functions. */
5998 case BUILT_IN_APPLY:
5999 if (!validate_arglist (exp, POINTER_TYPE,
6000 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6001 && !validate_arglist (exp, REFERENCE_TYPE,
6002 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6003 return const0_rtx;
6004 else
6005 {
6006 rtx ops[3];
6007
6008 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6009 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6010 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6011
6012 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6013 }
6014
6015 /* __builtin_return (RESULT) causes the function to return the
6016 value described by RESULT. RESULT is address of the block of
6017 memory returned by __builtin_apply. */
6018 case BUILT_IN_RETURN:
6019 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6020 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6021 return const0_rtx;
6022
6023 case BUILT_IN_SAVEREGS:
6024 return expand_builtin_saveregs ();
6025
6026 case BUILT_IN_VA_ARG_PACK:
6027 /* All valid uses of __builtin_va_arg_pack () are removed during
6028 inlining. */
6029 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6030 return const0_rtx;
6031
6032 case BUILT_IN_VA_ARG_PACK_LEN:
6033 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6034 inlining. */
6035 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6036 return const0_rtx;
6037
6038 /* Return the address of the first anonymous stack arg. */
6039 case BUILT_IN_NEXT_ARG:
6040 if (fold_builtin_next_arg (exp, false))
6041 return const0_rtx;
6042 return expand_builtin_next_arg ();
6043
6044 case BUILT_IN_CLEAR_CACHE:
6045 target = expand_builtin___clear_cache (exp);
6046 if (target)
6047 return target;
6048 break;
6049
6050 case BUILT_IN_CLASSIFY_TYPE:
6051 return expand_builtin_classify_type (exp);
6052
6053 case BUILT_IN_CONSTANT_P:
6054 return const0_rtx;
6055
6056 case BUILT_IN_FRAME_ADDRESS:
6057 case BUILT_IN_RETURN_ADDRESS:
6058 return expand_builtin_frame_address (fndecl, exp);
6059
6060 /* Returns the address of the area where the structure is returned.
6061 0 otherwise. */
6062 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6063 if (call_expr_nargs (exp) != 0
6064 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6065 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6066 return const0_rtx;
6067 else
6068 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6069
6070 case BUILT_IN_ALLOCA:
6071 case BUILT_IN_ALLOCA_WITH_ALIGN:
6072 /* If the allocation stems from the declaration of a variable-sized
6073 object, it cannot accumulate. */
6074 target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
6075 if (target)
6076 return target;
6077 break;
6078
6079 case BUILT_IN_STACK_SAVE:
6080 return expand_stack_save ();
6081
6082 case BUILT_IN_STACK_RESTORE:
6083 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6084 return const0_rtx;
6085
6086 case BUILT_IN_BSWAP16:
6087 case BUILT_IN_BSWAP32:
6088 case BUILT_IN_BSWAP64:
6089 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6090 if (target)
6091 return target;
6092 break;
6093
6094 CASE_INT_FN (BUILT_IN_FFS):
6095 target = expand_builtin_unop (target_mode, exp, target,
6096 subtarget, ffs_optab);
6097 if (target)
6098 return target;
6099 break;
6100
6101 CASE_INT_FN (BUILT_IN_CLZ):
6102 target = expand_builtin_unop (target_mode, exp, target,
6103 subtarget, clz_optab);
6104 if (target)
6105 return target;
6106 break;
6107
6108 CASE_INT_FN (BUILT_IN_CTZ):
6109 target = expand_builtin_unop (target_mode, exp, target,
6110 subtarget, ctz_optab);
6111 if (target)
6112 return target;
6113 break;
6114
6115 CASE_INT_FN (BUILT_IN_CLRSB):
6116 target = expand_builtin_unop (target_mode, exp, target,
6117 subtarget, clrsb_optab);
6118 if (target)
6119 return target;
6120 break;
6121
6122 CASE_INT_FN (BUILT_IN_POPCOUNT):
6123 target = expand_builtin_unop (target_mode, exp, target,
6124 subtarget, popcount_optab);
6125 if (target)
6126 return target;
6127 break;
6128
6129 CASE_INT_FN (BUILT_IN_PARITY):
6130 target = expand_builtin_unop (target_mode, exp, target,
6131 subtarget, parity_optab);
6132 if (target)
6133 return target;
6134 break;
6135
6136 case BUILT_IN_STRLEN:
6137 target = expand_builtin_strlen (exp, target, target_mode);
6138 if (target)
6139 return target;
6140 break;
6141
6142 case BUILT_IN_STRCPY:
6143 target = expand_builtin_strcpy (exp, target);
6144 if (target)
6145 return target;
6146 break;
6147
6148 case BUILT_IN_STRNCPY:
6149 target = expand_builtin_strncpy (exp, target);
6150 if (target)
6151 return target;
6152 break;
6153
6154 case BUILT_IN_STPCPY:
6155 target = expand_builtin_stpcpy (exp, target, mode);
6156 if (target)
6157 return target;
6158 break;
6159
6160 case BUILT_IN_MEMCPY:
6161 target = expand_builtin_memcpy (exp, target);
6162 if (target)
6163 return target;
6164 break;
6165
6166 case BUILT_IN_MEMPCPY:
6167 target = expand_builtin_mempcpy (exp, target, mode);
6168 if (target)
6169 return target;
6170 break;
6171
6172 case BUILT_IN_MEMSET:
6173 target = expand_builtin_memset (exp, target, mode);
6174 if (target)
6175 return target;
6176 break;
6177
6178 case BUILT_IN_BZERO:
6179 target = expand_builtin_bzero (exp);
6180 if (target)
6181 return target;
6182 break;
6183
6184 case BUILT_IN_STRCMP:
6185 target = expand_builtin_strcmp (exp, target);
6186 if (target)
6187 return target;
6188 break;
6189
6190 case BUILT_IN_STRNCMP:
6191 target = expand_builtin_strncmp (exp, target, mode);
6192 if (target)
6193 return target;
6194 break;
6195
6196 case BUILT_IN_BCMP:
6197 case BUILT_IN_MEMCMP:
6198 target = expand_builtin_memcmp (exp, target, mode);
6199 if (target)
6200 return target;
6201 break;
6202
6203 case BUILT_IN_SETJMP:
6204 /* This should have been lowered to the builtins below. */
6205 gcc_unreachable ();
6206
6207 case BUILT_IN_SETJMP_SETUP:
6208 /* __builtin_setjmp_setup is passed a pointer to an array of five words
6209 and the receiver label. */
6210 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6211 {
6212 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6213 VOIDmode, EXPAND_NORMAL);
6214 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6215 rtx label_r = label_rtx (label);
6216
6217 /* This is copied from the handling of non-local gotos. */
6218 expand_builtin_setjmp_setup (buf_addr, label_r);
6219 nonlocal_goto_handler_labels
6220 = gen_rtx_EXPR_LIST (VOIDmode, label_r,
6221 nonlocal_goto_handler_labels);
6222 /* ??? Do not let expand_label treat us as such since we would
6223 not want to be both on the list of non-local labels and on
6224 the list of forced labels. */
6225 FORCED_LABEL (label) = 0;
6226 return const0_rtx;
6227 }
6228 break;
6229
6230 case BUILT_IN_SETJMP_RECEIVER:
6231 /* __builtin_setjmp_receiver is passed the receiver label. */
6232 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6233 {
6234 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6235 rtx label_r = label_rtx (label);
6236
6237 expand_builtin_setjmp_receiver (label_r);
6238 return const0_rtx;
6239 }
6240 break;
6241
6242 /* __builtin_longjmp is passed a pointer to an array of five words.
6243 It's similar to the C library longjmp function but works with
6244 __builtin_setjmp above. */
6245 case BUILT_IN_LONGJMP:
6246 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6247 {
6248 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6249 VOIDmode, EXPAND_NORMAL);
6250 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6251
6252 if (value != const1_rtx)
6253 {
6254 error ("%<__builtin_longjmp%> second argument must be 1");
6255 return const0_rtx;
6256 }
6257
6258 expand_builtin_longjmp (buf_addr, value);
6259 return const0_rtx;
6260 }
6261 break;
6262
6263 case BUILT_IN_NONLOCAL_GOTO:
6264 target = expand_builtin_nonlocal_goto (exp);
6265 if (target)
6266 return target;
6267 break;
6268
6269 /* This updates the setjmp buffer that is its argument with the value
6270 of the current stack pointer. */
6271 case BUILT_IN_UPDATE_SETJMP_BUF:
6272 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6273 {
6274 rtx buf_addr
6275 = expand_normal (CALL_EXPR_ARG (exp, 0));
6276
6277 expand_builtin_update_setjmp_buf (buf_addr);
6278 return const0_rtx;
6279 }
6280 break;
6281
6282 case BUILT_IN_TRAP:
6283 expand_builtin_trap ();
6284 return const0_rtx;
6285
6286 case BUILT_IN_UNREACHABLE:
6287 expand_builtin_unreachable ();
6288 return const0_rtx;
6289
6290 CASE_FLT_FN (BUILT_IN_SIGNBIT):
6291 case BUILT_IN_SIGNBITD32:
6292 case BUILT_IN_SIGNBITD64:
6293 case BUILT_IN_SIGNBITD128:
6294 target = expand_builtin_signbit (exp, target);
6295 if (target)
6296 return target;
6297 break;
6298
6299 /* Various hooks for the DWARF 2 __throw routine. */
6300 case BUILT_IN_UNWIND_INIT:
6301 expand_builtin_unwind_init ();
6302 return const0_rtx;
6303 case BUILT_IN_DWARF_CFA:
6304 return virtual_cfa_rtx;
6305 #ifdef DWARF2_UNWIND_INFO
6306 case BUILT_IN_DWARF_SP_COLUMN:
6307 return expand_builtin_dwarf_sp_column ();
6308 case BUILT_IN_INIT_DWARF_REG_SIZES:
6309 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6310 return const0_rtx;
6311 #endif
6312 case BUILT_IN_FROB_RETURN_ADDR:
6313 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6314 case BUILT_IN_EXTRACT_RETURN_ADDR:
6315 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6316 case BUILT_IN_EH_RETURN:
6317 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6318 CALL_EXPR_ARG (exp, 1));
6319 return const0_rtx;
6320 #ifdef EH_RETURN_DATA_REGNO
6321 case BUILT_IN_EH_RETURN_DATA_REGNO:
6322 return expand_builtin_eh_return_data_regno (exp);
6323 #endif
6324 case BUILT_IN_EXTEND_POINTER:
6325 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6326 case BUILT_IN_EH_POINTER:
6327 return expand_builtin_eh_pointer (exp);
6328 case BUILT_IN_EH_FILTER:
6329 return expand_builtin_eh_filter (exp);
6330 case BUILT_IN_EH_COPY_VALUES:
6331 return expand_builtin_eh_copy_values (exp);
6332
6333 case BUILT_IN_VA_START:
6334 return expand_builtin_va_start (exp);
6335 case BUILT_IN_VA_END:
6336 return expand_builtin_va_end (exp);
6337 case BUILT_IN_VA_COPY:
6338 return expand_builtin_va_copy (exp);
6339 case BUILT_IN_EXPECT:
6340 return expand_builtin_expect (exp, target);
6341 case BUILT_IN_ASSUME_ALIGNED:
6342 return expand_builtin_assume_aligned (exp, target);
6343 case BUILT_IN_PREFETCH:
6344 expand_builtin_prefetch (exp);
6345 return const0_rtx;
6346
6347 case BUILT_IN_INIT_TRAMPOLINE:
6348 return expand_builtin_init_trampoline (exp, true);
6349 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6350 return expand_builtin_init_trampoline (exp, false);
6351 case BUILT_IN_ADJUST_TRAMPOLINE:
6352 return expand_builtin_adjust_trampoline (exp);
6353
6354 case BUILT_IN_FORK:
6355 case BUILT_IN_EXECL:
6356 case BUILT_IN_EXECV:
6357 case BUILT_IN_EXECLP:
6358 case BUILT_IN_EXECLE:
6359 case BUILT_IN_EXECVP:
6360 case BUILT_IN_EXECVE:
6361 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6362 if (target)
6363 return target;
6364 break;
6365
6366 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6367 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6368 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6369 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6370 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6371 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6372 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6373 if (target)
6374 return target;
6375 break;
6376
6377 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6378 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6379 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6380 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6381 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6382 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6383 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6384 if (target)
6385 return target;
6386 break;
6387
6388 case BUILT_IN_SYNC_FETCH_AND_OR_1:
6389 case BUILT_IN_SYNC_FETCH_AND_OR_2:
6390 case BUILT_IN_SYNC_FETCH_AND_OR_4:
6391 case BUILT_IN_SYNC_FETCH_AND_OR_8:
6392 case BUILT_IN_SYNC_FETCH_AND_OR_16:
6393 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6394 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6395 if (target)
6396 return target;
6397 break;
6398
6399 case BUILT_IN_SYNC_FETCH_AND_AND_1:
6400 case BUILT_IN_SYNC_FETCH_AND_AND_2:
6401 case BUILT_IN_SYNC_FETCH_AND_AND_4:
6402 case BUILT_IN_SYNC_FETCH_AND_AND_8:
6403 case BUILT_IN_SYNC_FETCH_AND_AND_16:
6404 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6405 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6406 if (target)
6407 return target;
6408 break;
6409
6410 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6411 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6412 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6413 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6414 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6415 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6416 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6417 if (target)
6418 return target;
6419 break;
6420
6421 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6422 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6423 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6424 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6425 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6426 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6427 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6428 if (target)
6429 return target;
6430 break;
6431
6432 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6433 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6434 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6435 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6436 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6437 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6438 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6439 if (target)
6440 return target;
6441 break;
6442
6443 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6444 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6445 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6446 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6447 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6448 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6449 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6450 if (target)
6451 return target;
6452 break;
6453
6454 case BUILT_IN_SYNC_OR_AND_FETCH_1:
6455 case BUILT_IN_SYNC_OR_AND_FETCH_2:
6456 case BUILT_IN_SYNC_OR_AND_FETCH_4:
6457 case BUILT_IN_SYNC_OR_AND_FETCH_8:
6458 case BUILT_IN_SYNC_OR_AND_FETCH_16:
6459 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6460 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6461 if (target)
6462 return target;
6463 break;
6464
6465 case BUILT_IN_SYNC_AND_AND_FETCH_1:
6466 case BUILT_IN_SYNC_AND_AND_FETCH_2:
6467 case BUILT_IN_SYNC_AND_AND_FETCH_4:
6468 case BUILT_IN_SYNC_AND_AND_FETCH_8:
6469 case BUILT_IN_SYNC_AND_AND_FETCH_16:
6470 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6471 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6472 if (target)
6473 return target;
6474 break;
6475
6476 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6477 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6478 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6479 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6480 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6481 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6482 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6483 if (target)
6484 return target;
6485 break;
6486
6487 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6488 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6489 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6490 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6491 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6492 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6493 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6494 if (target)
6495 return target;
6496 break;
6497
6498 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6499 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6500 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6501 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6502 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6503 if (mode == VOIDmode)
6504 mode = TYPE_MODE (boolean_type_node);
6505 if (!target || !register_operand (target, mode))
6506 target = gen_reg_rtx (mode);
6507
6508 mode = get_builtin_sync_mode
6509 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6510 target = expand_builtin_compare_and_swap (mode, exp, true, target);
6511 if (target)
6512 return target;
6513 break;
6514
6515 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6516 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6517 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6518 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6519 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6520 mode = get_builtin_sync_mode
6521 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6522 target = expand_builtin_compare_and_swap (mode, exp, false, target);
6523 if (target)
6524 return target;
6525 break;
6526
6527 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6528 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6529 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6530 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6531 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6532 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6533 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6534 if (target)
6535 return target;
6536 break;
6537
6538 case BUILT_IN_SYNC_LOCK_RELEASE_1:
6539 case BUILT_IN_SYNC_LOCK_RELEASE_2:
6540 case BUILT_IN_SYNC_LOCK_RELEASE_4:
6541 case BUILT_IN_SYNC_LOCK_RELEASE_8:
6542 case BUILT_IN_SYNC_LOCK_RELEASE_16:
6543 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6544 expand_builtin_sync_lock_release (mode, exp);
6545 return const0_rtx;
6546
6547 case BUILT_IN_SYNC_SYNCHRONIZE:
6548 expand_builtin_sync_synchronize ();
6549 return const0_rtx;
6550
6551 case BUILT_IN_ATOMIC_EXCHANGE_1:
6552 case BUILT_IN_ATOMIC_EXCHANGE_2:
6553 case BUILT_IN_ATOMIC_EXCHANGE_4:
6554 case BUILT_IN_ATOMIC_EXCHANGE_8:
6555 case BUILT_IN_ATOMIC_EXCHANGE_16:
6556 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6557 target = expand_builtin_atomic_exchange (mode, exp, target);
6558 if (target)
6559 return target;
6560 break;
6561
6562 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6563 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6564 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6565 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6566 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6567 {
6568 unsigned int nargs, z;
6569 vec<tree, va_gc> *vec;
6570
6571 mode =
6572 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6573 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6574 if (target)
6575 return target;
6576
6577 /* If this is turned into an external library call, the weak parameter
6578 must be dropped to match the expected parameter list. */
6579 nargs = call_expr_nargs (exp);
6580 vec_alloc (vec, nargs - 1);
6581 for (z = 0; z < 3; z++)
6582 vec->quick_push (CALL_EXPR_ARG (exp, z));
6583 /* Skip the boolean weak parameter. */
6584 for (z = 4; z < 6; z++)
6585 vec->quick_push (CALL_EXPR_ARG (exp, z));
6586 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6587 break;
6588 }
6589
6590 case BUILT_IN_ATOMIC_LOAD_1:
6591 case BUILT_IN_ATOMIC_LOAD_2:
6592 case BUILT_IN_ATOMIC_LOAD_4:
6593 case BUILT_IN_ATOMIC_LOAD_8:
6594 case BUILT_IN_ATOMIC_LOAD_16:
6595 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6596 target = expand_builtin_atomic_load (mode, exp, target);
6597 if (target)
6598 return target;
6599 break;
6600
6601 case BUILT_IN_ATOMIC_STORE_1:
6602 case BUILT_IN_ATOMIC_STORE_2:
6603 case BUILT_IN_ATOMIC_STORE_4:
6604 case BUILT_IN_ATOMIC_STORE_8:
6605 case BUILT_IN_ATOMIC_STORE_16:
6606 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6607 target = expand_builtin_atomic_store (mode, exp);
6608 if (target)
6609 return const0_rtx;
6610 break;
6611
6612 case BUILT_IN_ATOMIC_ADD_FETCH_1:
6613 case BUILT_IN_ATOMIC_ADD_FETCH_2:
6614 case BUILT_IN_ATOMIC_ADD_FETCH_4:
6615 case BUILT_IN_ATOMIC_ADD_FETCH_8:
6616 case BUILT_IN_ATOMIC_ADD_FETCH_16:
6617 {
6618 enum built_in_function lib;
6619 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6620 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6621 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6622 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6623 ignore, lib);
6624 if (target)
6625 return target;
6626 break;
6627 }
6628 case BUILT_IN_ATOMIC_SUB_FETCH_1:
6629 case BUILT_IN_ATOMIC_SUB_FETCH_2:
6630 case BUILT_IN_ATOMIC_SUB_FETCH_4:
6631 case BUILT_IN_ATOMIC_SUB_FETCH_8:
6632 case BUILT_IN_ATOMIC_SUB_FETCH_16:
6633 {
6634 enum built_in_function lib;
6635 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6636 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6637 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6638 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6639 ignore, lib);
6640 if (target)
6641 return target;
6642 break;
6643 }
6644 case BUILT_IN_ATOMIC_AND_FETCH_1:
6645 case BUILT_IN_ATOMIC_AND_FETCH_2:
6646 case BUILT_IN_ATOMIC_AND_FETCH_4:
6647 case BUILT_IN_ATOMIC_AND_FETCH_8:
6648 case BUILT_IN_ATOMIC_AND_FETCH_16:
6649 {
6650 enum built_in_function lib;
6651 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6652 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6653 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6654 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6655 ignore, lib);
6656 if (target)
6657 return target;
6658 break;
6659 }
6660 case BUILT_IN_ATOMIC_NAND_FETCH_1:
6661 case BUILT_IN_ATOMIC_NAND_FETCH_2:
6662 case BUILT_IN_ATOMIC_NAND_FETCH_4:
6663 case BUILT_IN_ATOMIC_NAND_FETCH_8:
6664 case BUILT_IN_ATOMIC_NAND_FETCH_16:
6665 {
6666 enum built_in_function lib;
6667 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6668 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6669 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6670 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6671 ignore, lib);
6672 if (target)
6673 return target;
6674 break;
6675 }
6676 case BUILT_IN_ATOMIC_XOR_FETCH_1:
6677 case BUILT_IN_ATOMIC_XOR_FETCH_2:
6678 case BUILT_IN_ATOMIC_XOR_FETCH_4:
6679 case BUILT_IN_ATOMIC_XOR_FETCH_8:
6680 case BUILT_IN_ATOMIC_XOR_FETCH_16:
6681 {
6682 enum built_in_function lib;
6683 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6684 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6685 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6686 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6687 ignore, lib);
6688 if (target)
6689 return target;
6690 break;
6691 }
6692 case BUILT_IN_ATOMIC_OR_FETCH_1:
6693 case BUILT_IN_ATOMIC_OR_FETCH_2:
6694 case BUILT_IN_ATOMIC_OR_FETCH_4:
6695 case BUILT_IN_ATOMIC_OR_FETCH_8:
6696 case BUILT_IN_ATOMIC_OR_FETCH_16:
6697 {
6698 enum built_in_function lib;
6699 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6700 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6701 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6702 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6703 ignore, lib);
6704 if (target)
6705 return target;
6706 break;
6707 }
6708 case BUILT_IN_ATOMIC_FETCH_ADD_1:
6709 case BUILT_IN_ATOMIC_FETCH_ADD_2:
6710 case BUILT_IN_ATOMIC_FETCH_ADD_4:
6711 case BUILT_IN_ATOMIC_FETCH_ADD_8:
6712 case BUILT_IN_ATOMIC_FETCH_ADD_16:
6713 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6714 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6715 ignore, BUILT_IN_NONE);
6716 if (target)
6717 return target;
6718 break;
6719
6720 case BUILT_IN_ATOMIC_FETCH_SUB_1:
6721 case BUILT_IN_ATOMIC_FETCH_SUB_2:
6722 case BUILT_IN_ATOMIC_FETCH_SUB_4:
6723 case BUILT_IN_ATOMIC_FETCH_SUB_8:
6724 case BUILT_IN_ATOMIC_FETCH_SUB_16:
6725 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6726 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6727 ignore, BUILT_IN_NONE);
6728 if (target)
6729 return target;
6730 break;
6731
6732 case BUILT_IN_ATOMIC_FETCH_AND_1:
6733 case BUILT_IN_ATOMIC_FETCH_AND_2:
6734 case BUILT_IN_ATOMIC_FETCH_AND_4:
6735 case BUILT_IN_ATOMIC_FETCH_AND_8:
6736 case BUILT_IN_ATOMIC_FETCH_AND_16:
6737 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6738 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6739 ignore, BUILT_IN_NONE);
6740 if (target)
6741 return target;
6742 break;
6743
6744 case BUILT_IN_ATOMIC_FETCH_NAND_1:
6745 case BUILT_IN_ATOMIC_FETCH_NAND_2:
6746 case BUILT_IN_ATOMIC_FETCH_NAND_4:
6747 case BUILT_IN_ATOMIC_FETCH_NAND_8:
6748 case BUILT_IN_ATOMIC_FETCH_NAND_16:
6749 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6750 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6751 ignore, BUILT_IN_NONE);
6752 if (target)
6753 return target;
6754 break;
6755
6756 case BUILT_IN_ATOMIC_FETCH_XOR_1:
6757 case BUILT_IN_ATOMIC_FETCH_XOR_2:
6758 case BUILT_IN_ATOMIC_FETCH_XOR_4:
6759 case BUILT_IN_ATOMIC_FETCH_XOR_8:
6760 case BUILT_IN_ATOMIC_FETCH_XOR_16:
6761 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6762 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6763 ignore, BUILT_IN_NONE);
6764 if (target)
6765 return target;
6766 break;
6767
6768 case BUILT_IN_ATOMIC_FETCH_OR_1:
6769 case BUILT_IN_ATOMIC_FETCH_OR_2:
6770 case BUILT_IN_ATOMIC_FETCH_OR_4:
6771 case BUILT_IN_ATOMIC_FETCH_OR_8:
6772 case BUILT_IN_ATOMIC_FETCH_OR_16:
6773 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6774 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6775 ignore, BUILT_IN_NONE);
6776 if (target)
6777 return target;
6778 break;
6779
6780 case BUILT_IN_ATOMIC_TEST_AND_SET:
6781 return expand_builtin_atomic_test_and_set (exp, target);
6782
6783 case BUILT_IN_ATOMIC_CLEAR:
6784 return expand_builtin_atomic_clear (exp);
6785
6786 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6787 return expand_builtin_atomic_always_lock_free (exp);
6788
6789 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6790 target = expand_builtin_atomic_is_lock_free (exp);
6791 if (target)
6792 return target;
6793 break;
6794
6795 case BUILT_IN_ATOMIC_THREAD_FENCE:
6796 expand_builtin_atomic_thread_fence (exp);
6797 return const0_rtx;
6798
6799 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6800 expand_builtin_atomic_signal_fence (exp);
6801 return const0_rtx;
6802
6803 case BUILT_IN_OBJECT_SIZE:
6804 return expand_builtin_object_size (exp);
6805
6806 case BUILT_IN_MEMCPY_CHK:
6807 case BUILT_IN_MEMPCPY_CHK:
6808 case BUILT_IN_MEMMOVE_CHK:
6809 case BUILT_IN_MEMSET_CHK:
6810 target = expand_builtin_memory_chk (exp, target, mode, fcode);
6811 if (target)
6812 return target;
6813 break;
6814
6815 case BUILT_IN_STRCPY_CHK:
6816 case BUILT_IN_STPCPY_CHK:
6817 case BUILT_IN_STRNCPY_CHK:
6818 case BUILT_IN_STPNCPY_CHK:
6819 case BUILT_IN_STRCAT_CHK:
6820 case BUILT_IN_STRNCAT_CHK:
6821 case BUILT_IN_SNPRINTF_CHK:
6822 case BUILT_IN_VSNPRINTF_CHK:
6823 maybe_emit_chk_warning (exp, fcode);
6824 break;
6825
6826 case BUILT_IN_SPRINTF_CHK:
6827 case BUILT_IN_VSPRINTF_CHK:
6828 maybe_emit_sprintf_chk_warning (exp, fcode);
6829 break;
6830
6831 case BUILT_IN_FREE:
6832 if (warn_free_nonheap_object)
6833 maybe_emit_free_warning (exp);
6834 break;
6835
6836 case BUILT_IN_THREAD_POINTER:
6837 return expand_builtin_thread_pointer (exp, target);
6838
6839 case BUILT_IN_SET_THREAD_POINTER:
6840 expand_builtin_set_thread_pointer (exp);
6841 return const0_rtx;
6842
6843 case BUILT_IN_CILK_DETACH:
6844 expand_builtin_cilk_detach (exp);
6845 return const0_rtx;
6846
6847 case BUILT_IN_CILK_POP_FRAME:
6848 expand_builtin_cilk_pop_frame (exp);
6849 return const0_rtx;
6850
6851 default: /* just do library call, if unknown builtin */
6852 break;
6853 }
6854
6855 /* The switch statement above can drop through to cause the function
6856 to be called normally. */
6857 return expand_call (exp, target, ignore);
6858 }
6859
6860 /* Determine whether a tree node represents a call to a built-in
6861 function. If the tree T is a call to a built-in function with
6862 the right number of arguments of the appropriate types, return
6863 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6864 Otherwise the return value is END_BUILTINS. */
6865
6866 enum built_in_function
builtin_mathfn_code(const_tree t)6867 builtin_mathfn_code (const_tree t)
6868 {
6869 const_tree fndecl, arg, parmlist;
6870 const_tree argtype, parmtype;
6871 const_call_expr_arg_iterator iter;
6872
6873 if (TREE_CODE (t) != CALL_EXPR
6874 || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6875 return END_BUILTINS;
6876
6877 fndecl = get_callee_fndecl (t);
6878 if (fndecl == NULL_TREE
6879 || TREE_CODE (fndecl) != FUNCTION_DECL
6880 || ! DECL_BUILT_IN (fndecl)
6881 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6882 return END_BUILTINS;
6883
6884 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6885 init_const_call_expr_arg_iterator (t, &iter);
6886 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6887 {
6888 /* If a function doesn't take a variable number of arguments,
6889 the last element in the list will have type `void'. */
6890 parmtype = TREE_VALUE (parmlist);
6891 if (VOID_TYPE_P (parmtype))
6892 {
6893 if (more_const_call_expr_args_p (&iter))
6894 return END_BUILTINS;
6895 return DECL_FUNCTION_CODE (fndecl);
6896 }
6897
6898 if (! more_const_call_expr_args_p (&iter))
6899 return END_BUILTINS;
6900
6901 arg = next_const_call_expr_arg (&iter);
6902 argtype = TREE_TYPE (arg);
6903
6904 if (SCALAR_FLOAT_TYPE_P (parmtype))
6905 {
6906 if (! SCALAR_FLOAT_TYPE_P (argtype))
6907 return END_BUILTINS;
6908 }
6909 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6910 {
6911 if (! COMPLEX_FLOAT_TYPE_P (argtype))
6912 return END_BUILTINS;
6913 }
6914 else if (POINTER_TYPE_P (parmtype))
6915 {
6916 if (! POINTER_TYPE_P (argtype))
6917 return END_BUILTINS;
6918 }
6919 else if (INTEGRAL_TYPE_P (parmtype))
6920 {
6921 if (! INTEGRAL_TYPE_P (argtype))
6922 return END_BUILTINS;
6923 }
6924 else
6925 return END_BUILTINS;
6926 }
6927
6928 /* Variable-length argument list. */
6929 return DECL_FUNCTION_CODE (fndecl);
6930 }
6931
6932 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6933 evaluate to a constant. */
6934
6935 static tree
fold_builtin_constant_p(tree arg)6936 fold_builtin_constant_p (tree arg)
6937 {
6938 /* We return 1 for a numeric type that's known to be a constant
6939 value at compile-time or for an aggregate type that's a
6940 literal constant. */
6941 STRIP_NOPS (arg);
6942
6943 /* If we know this is a constant, emit the constant of one. */
6944 if (CONSTANT_CLASS_P (arg)
6945 || (TREE_CODE (arg) == CONSTRUCTOR
6946 && TREE_CONSTANT (arg)))
6947 return integer_one_node;
6948 if (TREE_CODE (arg) == ADDR_EXPR)
6949 {
6950 tree op = TREE_OPERAND (arg, 0);
6951 if (TREE_CODE (op) == STRING_CST
6952 || (TREE_CODE (op) == ARRAY_REF
6953 && integer_zerop (TREE_OPERAND (op, 1))
6954 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6955 return integer_one_node;
6956 }
6957
6958 /* If this expression has side effects, show we don't know it to be a
6959 constant. Likewise if it's a pointer or aggregate type since in
6960 those case we only want literals, since those are only optimized
6961 when generating RTL, not later.
6962 And finally, if we are compiling an initializer, not code, we
6963 need to return a definite result now; there's not going to be any
6964 more optimization done. */
6965 if (TREE_SIDE_EFFECTS (arg)
6966 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6967 || POINTER_TYPE_P (TREE_TYPE (arg))
6968 || cfun == 0
6969 || folding_initializer
6970 || force_folding_builtin_constant_p)
6971 return integer_zero_node;
6972
6973 return NULL_TREE;
6974 }
6975
6976 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6977 return it as a truthvalue. */
6978
6979 static tree
build_builtin_expect_predicate(location_t loc,tree pred,tree expected,tree predictor)6980 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
6981 tree predictor)
6982 {
6983 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6984
6985 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6986 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6987 ret_type = TREE_TYPE (TREE_TYPE (fn));
6988 pred_type = TREE_VALUE (arg_types);
6989 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6990
6991 pred = fold_convert_loc (loc, pred_type, pred);
6992 expected = fold_convert_loc (loc, expected_type, expected);
6993 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
6994 predictor);
6995
6996 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6997 build_int_cst (ret_type, 0));
6998 }
6999
7000 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7001 NULL_TREE if no simplification is possible. */
7002
7003 tree
fold_builtin_expect(location_t loc,tree arg0,tree arg1,tree arg2)7004 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7005 {
7006 tree inner, fndecl, inner_arg0;
7007 enum tree_code code;
7008
7009 /* Distribute the expected value over short-circuiting operators.
7010 See through the cast from truthvalue_type_node to long. */
7011 inner_arg0 = arg0;
7012 while (TREE_CODE (inner_arg0) == NOP_EXPR
7013 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7014 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7015 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7016
7017 /* If this is a builtin_expect within a builtin_expect keep the
7018 inner one. See through a comparison against a constant. It
7019 might have been added to create a thruthvalue. */
7020 inner = inner_arg0;
7021
7022 if (COMPARISON_CLASS_P (inner)
7023 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7024 inner = TREE_OPERAND (inner, 0);
7025
7026 if (TREE_CODE (inner) == CALL_EXPR
7027 && (fndecl = get_callee_fndecl (inner))
7028 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
7029 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
7030 return arg0;
7031
7032 inner = inner_arg0;
7033 code = TREE_CODE (inner);
7034 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
7035 {
7036 tree op0 = TREE_OPERAND (inner, 0);
7037 tree op1 = TREE_OPERAND (inner, 1);
7038
7039 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
7040 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
7041 inner = build2 (code, TREE_TYPE (inner), op0, op1);
7042
7043 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
7044 }
7045
7046 /* If the argument isn't invariant then there's nothing else we can do. */
7047 if (!TREE_CONSTANT (inner_arg0))
7048 return NULL_TREE;
7049
7050 /* If we expect that a comparison against the argument will fold to
7051 a constant return the constant. In practice, this means a true
7052 constant or the address of a non-weak symbol. */
7053 inner = inner_arg0;
7054 STRIP_NOPS (inner);
7055 if (TREE_CODE (inner) == ADDR_EXPR)
7056 {
7057 do
7058 {
7059 inner = TREE_OPERAND (inner, 0);
7060 }
7061 while (TREE_CODE (inner) == COMPONENT_REF
7062 || TREE_CODE (inner) == ARRAY_REF);
7063 if ((TREE_CODE (inner) == VAR_DECL
7064 || TREE_CODE (inner) == FUNCTION_DECL)
7065 && DECL_WEAK (inner))
7066 return NULL_TREE;
7067 }
7068
7069 /* Otherwise, ARG0 already has the proper type for the return value. */
7070 return arg0;
7071 }
7072
7073 /* Fold a call to __builtin_classify_type with argument ARG. */
7074
7075 static tree
fold_builtin_classify_type(tree arg)7076 fold_builtin_classify_type (tree arg)
7077 {
7078 if (arg == 0)
7079 return build_int_cst (integer_type_node, no_type_class);
7080
7081 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
7082 }
7083
7084 /* Fold a call to __builtin_strlen with argument ARG. */
7085
7086 static tree
fold_builtin_strlen(location_t loc,tree type,tree arg)7087 fold_builtin_strlen (location_t loc, tree type, tree arg)
7088 {
7089 if (!validate_arg (arg, POINTER_TYPE))
7090 return NULL_TREE;
7091 else
7092 {
7093 tree len = c_strlen (arg, 0);
7094
7095 if (len)
7096 return fold_convert_loc (loc, type, len);
7097
7098 return NULL_TREE;
7099 }
7100 }
7101
7102 /* Fold a call to __builtin_inf or __builtin_huge_val. */
7103
7104 static tree
fold_builtin_inf(location_t loc,tree type,int warn)7105 fold_builtin_inf (location_t loc, tree type, int warn)
7106 {
7107 REAL_VALUE_TYPE real;
7108
7109 /* __builtin_inff is intended to be usable to define INFINITY on all
7110 targets. If an infinity is not available, INFINITY expands "to a
7111 positive constant of type float that overflows at translation
7112 time", footnote "In this case, using INFINITY will violate the
7113 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7114 Thus we pedwarn to ensure this constraint violation is
7115 diagnosed. */
7116 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7117 pedwarn (loc, 0, "target format does not support infinity");
7118
7119 real_inf (&real);
7120 return build_real (type, real);
7121 }
7122
7123 /* Fold a call to __builtin_nan or __builtin_nans with argument ARG. */
7124
7125 static tree
fold_builtin_nan(tree arg,tree type,int quiet)7126 fold_builtin_nan (tree arg, tree type, int quiet)
7127 {
7128 REAL_VALUE_TYPE real;
7129 const char *str;
7130
7131 if (!validate_arg (arg, POINTER_TYPE))
7132 return NULL_TREE;
7133 str = c_getstr (arg);
7134 if (!str)
7135 return NULL_TREE;
7136
7137 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
7138 return NULL_TREE;
7139
7140 return build_real (type, real);
7141 }
7142
7143 /* Return true if the floating point expression T has an integer value.
7144 We also allow +Inf, -Inf and NaN to be considered integer values. */
7145
7146 static bool
integer_valued_real_p(tree t)7147 integer_valued_real_p (tree t)
7148 {
7149 switch (TREE_CODE (t))
7150 {
7151 case FLOAT_EXPR:
7152 return true;
7153
7154 case ABS_EXPR:
7155 case SAVE_EXPR:
7156 return integer_valued_real_p (TREE_OPERAND (t, 0));
7157
7158 case COMPOUND_EXPR:
7159 case MODIFY_EXPR:
7160 case BIND_EXPR:
7161 return integer_valued_real_p (TREE_OPERAND (t, 1));
7162
7163 case PLUS_EXPR:
7164 case MINUS_EXPR:
7165 case MULT_EXPR:
7166 case MIN_EXPR:
7167 case MAX_EXPR:
7168 return integer_valued_real_p (TREE_OPERAND (t, 0))
7169 && integer_valued_real_p (TREE_OPERAND (t, 1));
7170
7171 case COND_EXPR:
7172 return integer_valued_real_p (TREE_OPERAND (t, 1))
7173 && integer_valued_real_p (TREE_OPERAND (t, 2));
7174
7175 case REAL_CST:
7176 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
7177
7178 case NOP_EXPR:
7179 {
7180 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
7181 if (TREE_CODE (type) == INTEGER_TYPE)
7182 return true;
7183 if (TREE_CODE (type) == REAL_TYPE)
7184 return integer_valued_real_p (TREE_OPERAND (t, 0));
7185 break;
7186 }
7187
7188 case CALL_EXPR:
7189 switch (builtin_mathfn_code (t))
7190 {
7191 CASE_FLT_FN (BUILT_IN_CEIL):
7192 CASE_FLT_FN (BUILT_IN_FLOOR):
7193 CASE_FLT_FN (BUILT_IN_NEARBYINT):
7194 CASE_FLT_FN (BUILT_IN_RINT):
7195 CASE_FLT_FN (BUILT_IN_ROUND):
7196 CASE_FLT_FN (BUILT_IN_TRUNC):
7197 return true;
7198
7199 CASE_FLT_FN (BUILT_IN_FMIN):
7200 CASE_FLT_FN (BUILT_IN_FMAX):
7201 return integer_valued_real_p (CALL_EXPR_ARG (t, 0))
7202 && integer_valued_real_p (CALL_EXPR_ARG (t, 1));
7203
7204 default:
7205 break;
7206 }
7207 break;
7208
7209 default:
7210 break;
7211 }
7212 return false;
7213 }
7214
7215 /* FNDECL is assumed to be a builtin where truncation can be propagated
7216 across (for instance floor((double)f) == (double)floorf (f).
7217 Do the transformation for a call with argument ARG. */
7218
7219 static tree
fold_trunc_transparent_mathfn(location_t loc,tree fndecl,tree arg)7220 fold_trunc_transparent_mathfn (location_t loc, tree fndecl, tree arg)
7221 {
7222 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7223
7224 if (!validate_arg (arg, REAL_TYPE))
7225 return NULL_TREE;
7226
7227 /* Integer rounding functions are idempotent. */
7228 if (fcode == builtin_mathfn_code (arg))
7229 return arg;
7230
7231 /* If argument is already integer valued, and we don't need to worry
7232 about setting errno, there's no need to perform rounding. */
7233 if (! flag_errno_math && integer_valued_real_p (arg))
7234 return arg;
7235
7236 if (optimize)
7237 {
7238 tree arg0 = strip_float_extensions (arg);
7239 tree ftype = TREE_TYPE (TREE_TYPE (fndecl));
7240 tree newtype = TREE_TYPE (arg0);
7241 tree decl;
7242
7243 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7244 && (decl = mathfn_built_in (newtype, fcode)))
7245 return fold_convert_loc (loc, ftype,
7246 build_call_expr_loc (loc, decl, 1,
7247 fold_convert_loc (loc,
7248 newtype,
7249 arg0)));
7250 }
7251 return NULL_TREE;
7252 }
7253
7254 /* FNDECL is assumed to be builtin which can narrow the FP type of
7255 the argument, for instance lround((double)f) -> lroundf (f).
7256 Do the transformation for a call with argument ARG. */
7257
7258 static tree
fold_fixed_mathfn(location_t loc,tree fndecl,tree arg)7259 fold_fixed_mathfn (location_t loc, tree fndecl, tree arg)
7260 {
7261 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7262
7263 if (!validate_arg (arg, REAL_TYPE))
7264 return NULL_TREE;
7265
7266 /* If argument is already integer valued, and we don't need to worry
7267 about setting errno, there's no need to perform rounding. */
7268 if (! flag_errno_math && integer_valued_real_p (arg))
7269 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
7270 TREE_TYPE (TREE_TYPE (fndecl)), arg);
7271
7272 if (optimize)
7273 {
7274 tree ftype = TREE_TYPE (arg);
7275 tree arg0 = strip_float_extensions (arg);
7276 tree newtype = TREE_TYPE (arg0);
7277 tree decl;
7278
7279 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (ftype)
7280 && (decl = mathfn_built_in (newtype, fcode)))
7281 return build_call_expr_loc (loc, decl, 1,
7282 fold_convert_loc (loc, newtype, arg0));
7283 }
7284
7285 /* Canonicalize iround (x) to lround (x) on ILP32 targets where
7286 sizeof (int) == sizeof (long). */
7287 if (TYPE_PRECISION (integer_type_node)
7288 == TYPE_PRECISION (long_integer_type_node))
7289 {
7290 tree newfn = NULL_TREE;
7291 switch (fcode)
7292 {
7293 CASE_FLT_FN (BUILT_IN_ICEIL):
7294 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7295 break;
7296
7297 CASE_FLT_FN (BUILT_IN_IFLOOR):
7298 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7299 break;
7300
7301 CASE_FLT_FN (BUILT_IN_IROUND):
7302 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7303 break;
7304
7305 CASE_FLT_FN (BUILT_IN_IRINT):
7306 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7307 break;
7308
7309 default:
7310 break;
7311 }
7312
7313 if (newfn)
7314 {
7315 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7316 return fold_convert_loc (loc,
7317 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7318 }
7319 }
7320
7321 /* Canonicalize llround (x) to lround (x) on LP64 targets where
7322 sizeof (long long) == sizeof (long). */
7323 if (TYPE_PRECISION (long_long_integer_type_node)
7324 == TYPE_PRECISION (long_integer_type_node))
7325 {
7326 tree newfn = NULL_TREE;
7327 switch (fcode)
7328 {
7329 CASE_FLT_FN (BUILT_IN_LLCEIL):
7330 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LCEIL);
7331 break;
7332
7333 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7334 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LFLOOR);
7335 break;
7336
7337 CASE_FLT_FN (BUILT_IN_LLROUND):
7338 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LROUND);
7339 break;
7340
7341 CASE_FLT_FN (BUILT_IN_LLRINT):
7342 newfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_LRINT);
7343 break;
7344
7345 default:
7346 break;
7347 }
7348
7349 if (newfn)
7350 {
7351 tree newcall = build_call_expr_loc (loc, newfn, 1, arg);
7352 return fold_convert_loc (loc,
7353 TREE_TYPE (TREE_TYPE (fndecl)), newcall);
7354 }
7355 }
7356
7357 return NULL_TREE;
7358 }
7359
7360 /* Fold call to builtin cabs, cabsf or cabsl with argument ARG. TYPE is the
7361 return type. Return NULL_TREE if no simplification can be made. */
7362
7363 static tree
fold_builtin_cabs(location_t loc,tree arg,tree type,tree fndecl)7364 fold_builtin_cabs (location_t loc, tree arg, tree type, tree fndecl)
7365 {
7366 tree res;
7367
7368 if (!validate_arg (arg, COMPLEX_TYPE)
7369 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7370 return NULL_TREE;
7371
7372 /* Calculate the result when the argument is a constant. */
7373 if (TREE_CODE (arg) == COMPLEX_CST
7374 && (res = do_mpfr_arg2 (TREE_REALPART (arg), TREE_IMAGPART (arg),
7375 type, mpfr_hypot)))
7376 return res;
7377
7378 if (TREE_CODE (arg) == COMPLEX_EXPR)
7379 {
7380 tree real = TREE_OPERAND (arg, 0);
7381 tree imag = TREE_OPERAND (arg, 1);
7382
7383 /* If either part is zero, cabs is fabs of the other. */
7384 if (real_zerop (real))
7385 return fold_build1_loc (loc, ABS_EXPR, type, imag);
7386 if (real_zerop (imag))
7387 return fold_build1_loc (loc, ABS_EXPR, type, real);
7388
7389 /* cabs(x+xi) -> fabs(x)*sqrt(2). */
7390 if (flag_unsafe_math_optimizations
7391 && operand_equal_p (real, imag, OEP_PURE_SAME))
7392 {
7393 const REAL_VALUE_TYPE sqrt2_trunc
7394 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
7395 STRIP_NOPS (real);
7396 return fold_build2_loc (loc, MULT_EXPR, type,
7397 fold_build1_loc (loc, ABS_EXPR, type, real),
7398 build_real (type, sqrt2_trunc));
7399 }
7400 }
7401
7402 /* Optimize cabs(-z) and cabs(conj(z)) as cabs(z). */
7403 if (TREE_CODE (arg) == NEGATE_EXPR
7404 || TREE_CODE (arg) == CONJ_EXPR)
7405 return build_call_expr_loc (loc, fndecl, 1, TREE_OPERAND (arg, 0));
7406
7407 /* Don't do this when optimizing for size. */
7408 if (flag_unsafe_math_optimizations
7409 && optimize && optimize_function_for_speed_p (cfun))
7410 {
7411 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
7412
7413 if (sqrtfn != NULL_TREE)
7414 {
7415 tree rpart, ipart, result;
7416
7417 arg = builtin_save_expr (arg);
7418
7419 rpart = fold_build1_loc (loc, REALPART_EXPR, type, arg);
7420 ipart = fold_build1_loc (loc, IMAGPART_EXPR, type, arg);
7421
7422 rpart = builtin_save_expr (rpart);
7423 ipart = builtin_save_expr (ipart);
7424
7425 result = fold_build2_loc (loc, PLUS_EXPR, type,
7426 fold_build2_loc (loc, MULT_EXPR, type,
7427 rpart, rpart),
7428 fold_build2_loc (loc, MULT_EXPR, type,
7429 ipart, ipart));
7430
7431 return build_call_expr_loc (loc, sqrtfn, 1, result);
7432 }
7433 }
7434
7435 return NULL_TREE;
7436 }
7437
7438 /* Build a complex (inf +- 0i) for the result of cproj. TYPE is the
7439 complex tree type of the result. If NEG is true, the imaginary
7440 zero is negative. */
7441
7442 static tree
build_complex_cproj(tree type,bool neg)7443 build_complex_cproj (tree type, bool neg)
7444 {
7445 REAL_VALUE_TYPE rinf, rzero = dconst0;
7446
7447 real_inf (&rinf);
7448 rzero.sign = neg;
7449 return build_complex (type, build_real (TREE_TYPE (type), rinf),
7450 build_real (TREE_TYPE (type), rzero));
7451 }
7452
7453 /* Fold call to builtin cproj, cprojf or cprojl with argument ARG. TYPE is the
7454 return type. Return NULL_TREE if no simplification can be made. */
7455
7456 static tree
fold_builtin_cproj(location_t loc,tree arg,tree type)7457 fold_builtin_cproj (location_t loc, tree arg, tree type)
7458 {
7459 if (!validate_arg (arg, COMPLEX_TYPE)
7460 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != REAL_TYPE)
7461 return NULL_TREE;
7462
7463 /* If there are no infinities, return arg. */
7464 if (! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (type))))
7465 return non_lvalue_loc (loc, arg);
7466
7467 /* Calculate the result when the argument is a constant. */
7468 if (TREE_CODE (arg) == COMPLEX_CST)
7469 {
7470 const REAL_VALUE_TYPE *real = TREE_REAL_CST_PTR (TREE_REALPART (arg));
7471 const REAL_VALUE_TYPE *imag = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
7472
7473 if (real_isinf (real) || real_isinf (imag))
7474 return build_complex_cproj (type, imag->sign);
7475 else
7476 return arg;
7477 }
7478 else if (TREE_CODE (arg) == COMPLEX_EXPR)
7479 {
7480 tree real = TREE_OPERAND (arg, 0);
7481 tree imag = TREE_OPERAND (arg, 1);
7482
7483 STRIP_NOPS (real);
7484 STRIP_NOPS (imag);
7485
7486 /* If the real part is inf and the imag part is known to be
7487 nonnegative, return (inf + 0i). Remember side-effects are
7488 possible in the imag part. */
7489 if (TREE_CODE (real) == REAL_CST
7490 && real_isinf (TREE_REAL_CST_PTR (real))
7491 && tree_expr_nonnegative_p (imag))
7492 return omit_one_operand_loc (loc, type,
7493 build_complex_cproj (type, false),
7494 arg);
7495
7496 /* If the imag part is inf, return (inf+I*copysign(0,imag)).
7497 Remember side-effects are possible in the real part. */
7498 if (TREE_CODE (imag) == REAL_CST
7499 && real_isinf (TREE_REAL_CST_PTR (imag)))
7500 return
7501 omit_one_operand_loc (loc, type,
7502 build_complex_cproj (type, TREE_REAL_CST_PTR
7503 (imag)->sign), arg);
7504 }
7505
7506 return NULL_TREE;
7507 }
7508
7509 /* Fold a builtin function call to sqrt, sqrtf, or sqrtl with argument ARG.
7510 Return NULL_TREE if no simplification can be made. */
7511
7512 static tree
fold_builtin_sqrt(location_t loc,tree arg,tree type)7513 fold_builtin_sqrt (location_t loc, tree arg, tree type)
7514 {
7515
7516 enum built_in_function fcode;
7517 tree res;
7518
7519 if (!validate_arg (arg, REAL_TYPE))
7520 return NULL_TREE;
7521
7522 /* Calculate the result when the argument is a constant. */
7523 if ((res = do_mpfr_arg1 (arg, type, mpfr_sqrt, &dconst0, NULL, true)))
7524 return res;
7525
7526 /* Optimize sqrt(expN(x)) = expN(x*0.5). */
7527 fcode = builtin_mathfn_code (arg);
7528 if (flag_unsafe_math_optimizations && BUILTIN_EXPONENT_P (fcode))
7529 {
7530 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7531 arg = fold_build2_loc (loc, MULT_EXPR, type,
7532 CALL_EXPR_ARG (arg, 0),
7533 build_real (type, dconsthalf));
7534 return build_call_expr_loc (loc, expfn, 1, arg);
7535 }
7536
7537 /* Optimize sqrt(Nroot(x)) -> pow(x,1/(2*N)). */
7538 if (flag_unsafe_math_optimizations && BUILTIN_ROOT_P (fcode))
7539 {
7540 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7541
7542 if (powfn)
7543 {
7544 tree arg0 = CALL_EXPR_ARG (arg, 0);
7545 tree tree_root;
7546 /* The inner root was either sqrt or cbrt. */
7547 /* This was a conditional expression but it triggered a bug
7548 in Sun C 5.5. */
7549 REAL_VALUE_TYPE dconstroot;
7550 if (BUILTIN_SQRT_P (fcode))
7551 dconstroot = dconsthalf;
7552 else
7553 dconstroot = dconst_third ();
7554
7555 /* Adjust for the outer root. */
7556 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7557 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7558 tree_root = build_real (type, dconstroot);
7559 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7560 }
7561 }
7562
7563 /* Optimize sqrt(pow(x,y)) = pow(|x|,y*0.5). */
7564 if (flag_unsafe_math_optimizations
7565 && (fcode == BUILT_IN_POW
7566 || fcode == BUILT_IN_POWF
7567 || fcode == BUILT_IN_POWL))
7568 {
7569 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7570 tree arg0 = CALL_EXPR_ARG (arg, 0);
7571 tree arg1 = CALL_EXPR_ARG (arg, 1);
7572 tree narg1;
7573 if (!tree_expr_nonnegative_p (arg0))
7574 arg0 = build1 (ABS_EXPR, type, arg0);
7575 narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
7576 build_real (type, dconsthalf));
7577 return build_call_expr_loc (loc, powfn, 2, arg0, narg1);
7578 }
7579
7580 return NULL_TREE;
7581 }
7582
7583 /* Fold a builtin function call to cbrt, cbrtf, or cbrtl with argument ARG.
7584 Return NULL_TREE if no simplification can be made. */
7585
7586 static tree
fold_builtin_cbrt(location_t loc,tree arg,tree type)7587 fold_builtin_cbrt (location_t loc, tree arg, tree type)
7588 {
7589 const enum built_in_function fcode = builtin_mathfn_code (arg);
7590 tree res;
7591
7592 if (!validate_arg (arg, REAL_TYPE))
7593 return NULL_TREE;
7594
7595 /* Calculate the result when the argument is a constant. */
7596 if ((res = do_mpfr_arg1 (arg, type, mpfr_cbrt, NULL, NULL, 0)))
7597 return res;
7598
7599 if (flag_unsafe_math_optimizations)
7600 {
7601 /* Optimize cbrt(expN(x)) -> expN(x/3). */
7602 if (BUILTIN_EXPONENT_P (fcode))
7603 {
7604 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7605 const REAL_VALUE_TYPE third_trunc =
7606 real_value_truncate (TYPE_MODE (type), dconst_third ());
7607 arg = fold_build2_loc (loc, MULT_EXPR, type,
7608 CALL_EXPR_ARG (arg, 0),
7609 build_real (type, third_trunc));
7610 return build_call_expr_loc (loc, expfn, 1, arg);
7611 }
7612
7613 /* Optimize cbrt(sqrt(x)) -> pow(x,1/6). */
7614 if (BUILTIN_SQRT_P (fcode))
7615 {
7616 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7617
7618 if (powfn)
7619 {
7620 tree arg0 = CALL_EXPR_ARG (arg, 0);
7621 tree tree_root;
7622 REAL_VALUE_TYPE dconstroot = dconst_third ();
7623
7624 SET_REAL_EXP (&dconstroot, REAL_EXP (&dconstroot) - 1);
7625 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7626 tree_root = build_real (type, dconstroot);
7627 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7628 }
7629 }
7630
7631 /* Optimize cbrt(cbrt(x)) -> pow(x,1/9) iff x is nonnegative. */
7632 if (BUILTIN_CBRT_P (fcode))
7633 {
7634 tree arg0 = CALL_EXPR_ARG (arg, 0);
7635 if (tree_expr_nonnegative_p (arg0))
7636 {
7637 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7638
7639 if (powfn)
7640 {
7641 tree tree_root;
7642 REAL_VALUE_TYPE dconstroot;
7643
7644 real_arithmetic (&dconstroot, MULT_EXPR,
7645 dconst_third_ptr (), dconst_third_ptr ());
7646 dconstroot = real_value_truncate (TYPE_MODE (type), dconstroot);
7647 tree_root = build_real (type, dconstroot);
7648 return build_call_expr_loc (loc, powfn, 2, arg0, tree_root);
7649 }
7650 }
7651 }
7652
7653 /* Optimize cbrt(pow(x,y)) -> pow(x,y/3) iff x is nonnegative. */
7654 if (fcode == BUILT_IN_POW
7655 || fcode == BUILT_IN_POWF
7656 || fcode == BUILT_IN_POWL)
7657 {
7658 tree arg00 = CALL_EXPR_ARG (arg, 0);
7659 tree arg01 = CALL_EXPR_ARG (arg, 1);
7660 if (tree_expr_nonnegative_p (arg00))
7661 {
7662 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg), 0);
7663 const REAL_VALUE_TYPE dconstroot
7664 = real_value_truncate (TYPE_MODE (type), dconst_third ());
7665 tree narg01 = fold_build2_loc (loc, MULT_EXPR, type, arg01,
7666 build_real (type, dconstroot));
7667 return build_call_expr_loc (loc, powfn, 2, arg00, narg01);
7668 }
7669 }
7670 }
7671 return NULL_TREE;
7672 }
7673
7674 /* Fold function call to builtin cos, cosf, or cosl with argument ARG.
7675 TYPE is the type of the return value. Return NULL_TREE if no
7676 simplification can be made. */
7677
7678 static tree
fold_builtin_cos(location_t loc,tree arg,tree type,tree fndecl)7679 fold_builtin_cos (location_t loc,
7680 tree arg, tree type, tree fndecl)
7681 {
7682 tree res, narg;
7683
7684 if (!validate_arg (arg, REAL_TYPE))
7685 return NULL_TREE;
7686
7687 /* Calculate the result when the argument is a constant. */
7688 if ((res = do_mpfr_arg1 (arg, type, mpfr_cos, NULL, NULL, 0)))
7689 return res;
7690
7691 /* Optimize cos(-x) into cos (x). */
7692 if ((narg = fold_strip_sign_ops (arg)))
7693 return build_call_expr_loc (loc, fndecl, 1, narg);
7694
7695 return NULL_TREE;
7696 }
7697
7698 /* Fold function call to builtin cosh, coshf, or coshl with argument ARG.
7699 Return NULL_TREE if no simplification can be made. */
7700
7701 static tree
fold_builtin_cosh(location_t loc,tree arg,tree type,tree fndecl)7702 fold_builtin_cosh (location_t loc, tree arg, tree type, tree fndecl)
7703 {
7704 if (validate_arg (arg, REAL_TYPE))
7705 {
7706 tree res, narg;
7707
7708 /* Calculate the result when the argument is a constant. */
7709 if ((res = do_mpfr_arg1 (arg, type, mpfr_cosh, NULL, NULL, 0)))
7710 return res;
7711
7712 /* Optimize cosh(-x) into cosh (x). */
7713 if ((narg = fold_strip_sign_ops (arg)))
7714 return build_call_expr_loc (loc, fndecl, 1, narg);
7715 }
7716
7717 return NULL_TREE;
7718 }
7719
7720 /* Fold function call to builtin ccos (or ccosh if HYPER is TRUE) with
7721 argument ARG. TYPE is the type of the return value. Return
7722 NULL_TREE if no simplification can be made. */
7723
7724 static tree
fold_builtin_ccos(location_t loc,tree arg,tree type,tree fndecl,bool hyper)7725 fold_builtin_ccos (location_t loc, tree arg, tree type, tree fndecl,
7726 bool hyper)
7727 {
7728 if (validate_arg (arg, COMPLEX_TYPE)
7729 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7730 {
7731 tree tmp;
7732
7733 /* Calculate the result when the argument is a constant. */
7734 if ((tmp = do_mpc_arg1 (arg, type, (hyper ? mpc_cosh : mpc_cos))))
7735 return tmp;
7736
7737 /* Optimize fn(-x) into fn(x). */
7738 if ((tmp = fold_strip_sign_ops (arg)))
7739 return build_call_expr_loc (loc, fndecl, 1, tmp);
7740 }
7741
7742 return NULL_TREE;
7743 }
7744
7745 /* Fold function call to builtin tan, tanf, or tanl with argument ARG.
7746 Return NULL_TREE if no simplification can be made. */
7747
7748 static tree
fold_builtin_tan(tree arg,tree type)7749 fold_builtin_tan (tree arg, tree type)
7750 {
7751 enum built_in_function fcode;
7752 tree res;
7753
7754 if (!validate_arg (arg, REAL_TYPE))
7755 return NULL_TREE;
7756
7757 /* Calculate the result when the argument is a constant. */
7758 if ((res = do_mpfr_arg1 (arg, type, mpfr_tan, NULL, NULL, 0)))
7759 return res;
7760
7761 /* Optimize tan(atan(x)) = x. */
7762 fcode = builtin_mathfn_code (arg);
7763 if (flag_unsafe_math_optimizations
7764 && (fcode == BUILT_IN_ATAN
7765 || fcode == BUILT_IN_ATANF
7766 || fcode == BUILT_IN_ATANL))
7767 return CALL_EXPR_ARG (arg, 0);
7768
7769 return NULL_TREE;
7770 }
7771
7772 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
7773 NULL_TREE if no simplification can be made. */
7774
7775 static tree
fold_builtin_sincos(location_t loc,tree arg0,tree arg1,tree arg2)7776 fold_builtin_sincos (location_t loc,
7777 tree arg0, tree arg1, tree arg2)
7778 {
7779 tree type;
7780 tree res, fn, call;
7781
7782 if (!validate_arg (arg0, REAL_TYPE)
7783 || !validate_arg (arg1, POINTER_TYPE)
7784 || !validate_arg (arg2, POINTER_TYPE))
7785 return NULL_TREE;
7786
7787 type = TREE_TYPE (arg0);
7788
7789 /* Calculate the result when the argument is a constant. */
7790 if ((res = do_mpfr_sincos (arg0, arg1, arg2)))
7791 return res;
7792
7793 /* Canonicalize sincos to cexpi. */
7794 if (!targetm.libc_has_function (function_c99_math_complex))
7795 return NULL_TREE;
7796 fn = mathfn_built_in (type, BUILT_IN_CEXPI);
7797 if (!fn)
7798 return NULL_TREE;
7799
7800 call = build_call_expr_loc (loc, fn, 1, arg0);
7801 call = builtin_save_expr (call);
7802
7803 return build2 (COMPOUND_EXPR, void_type_node,
7804 build2 (MODIFY_EXPR, void_type_node,
7805 build_fold_indirect_ref_loc (loc, arg1),
7806 build1 (IMAGPART_EXPR, type, call)),
7807 build2 (MODIFY_EXPR, void_type_node,
7808 build_fold_indirect_ref_loc (loc, arg2),
7809 build1 (REALPART_EXPR, type, call)));
7810 }
7811
7812 /* Fold function call to builtin cexp, cexpf, or cexpl. Return
7813 NULL_TREE if no simplification can be made. */
7814
7815 static tree
fold_builtin_cexp(location_t loc,tree arg0,tree type)7816 fold_builtin_cexp (location_t loc, tree arg0, tree type)
7817 {
7818 tree rtype;
7819 tree realp, imagp, ifn;
7820 tree res;
7821
7822 if (!validate_arg (arg0, COMPLEX_TYPE)
7823 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) != REAL_TYPE)
7824 return NULL_TREE;
7825
7826 /* Calculate the result when the argument is a constant. */
7827 if ((res = do_mpc_arg1 (arg0, type, mpc_exp)))
7828 return res;
7829
7830 rtype = TREE_TYPE (TREE_TYPE (arg0));
7831
7832 /* In case we can figure out the real part of arg0 and it is constant zero
7833 fold to cexpi. */
7834 if (!targetm.libc_has_function (function_c99_math_complex))
7835 return NULL_TREE;
7836 ifn = mathfn_built_in (rtype, BUILT_IN_CEXPI);
7837 if (!ifn)
7838 return NULL_TREE;
7839
7840 if ((realp = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0))
7841 && real_zerop (realp))
7842 {
7843 tree narg = fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0);
7844 return build_call_expr_loc (loc, ifn, 1, narg);
7845 }
7846
7847 /* In case we can easily decompose real and imaginary parts split cexp
7848 to exp (r) * cexpi (i). */
7849 if (flag_unsafe_math_optimizations
7850 && realp)
7851 {
7852 tree rfn, rcall, icall;
7853
7854 rfn = mathfn_built_in (rtype, BUILT_IN_EXP);
7855 if (!rfn)
7856 return NULL_TREE;
7857
7858 imagp = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
7859 if (!imagp)
7860 return NULL_TREE;
7861
7862 icall = build_call_expr_loc (loc, ifn, 1, imagp);
7863 icall = builtin_save_expr (icall);
7864 rcall = build_call_expr_loc (loc, rfn, 1, realp);
7865 rcall = builtin_save_expr (rcall);
7866 return fold_build2_loc (loc, COMPLEX_EXPR, type,
7867 fold_build2_loc (loc, MULT_EXPR, rtype,
7868 rcall,
7869 fold_build1_loc (loc, REALPART_EXPR,
7870 rtype, icall)),
7871 fold_build2_loc (loc, MULT_EXPR, rtype,
7872 rcall,
7873 fold_build1_loc (loc, IMAGPART_EXPR,
7874 rtype, icall)));
7875 }
7876
7877 return NULL_TREE;
7878 }
7879
7880 /* Fold function call to builtin trunc, truncf or truncl with argument ARG.
7881 Return NULL_TREE if no simplification can be made. */
7882
7883 static tree
fold_builtin_trunc(location_t loc,tree fndecl,tree arg)7884 fold_builtin_trunc (location_t loc, tree fndecl, tree arg)
7885 {
7886 if (!validate_arg (arg, REAL_TYPE))
7887 return NULL_TREE;
7888
7889 /* Optimize trunc of constant value. */
7890 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7891 {
7892 REAL_VALUE_TYPE r, x;
7893 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7894
7895 x = TREE_REAL_CST (arg);
7896 real_trunc (&r, TYPE_MODE (type), &x);
7897 return build_real (type, r);
7898 }
7899
7900 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7901 }
7902
7903 /* Fold function call to builtin floor, floorf or floorl with argument ARG.
7904 Return NULL_TREE if no simplification can be made. */
7905
7906 static tree
fold_builtin_floor(location_t loc,tree fndecl,tree arg)7907 fold_builtin_floor (location_t loc, tree fndecl, tree arg)
7908 {
7909 if (!validate_arg (arg, REAL_TYPE))
7910 return NULL_TREE;
7911
7912 /* Optimize floor of constant value. */
7913 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7914 {
7915 REAL_VALUE_TYPE x;
7916
7917 x = TREE_REAL_CST (arg);
7918 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7919 {
7920 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7921 REAL_VALUE_TYPE r;
7922
7923 real_floor (&r, TYPE_MODE (type), &x);
7924 return build_real (type, r);
7925 }
7926 }
7927
7928 /* Fold floor (x) where x is nonnegative to trunc (x). */
7929 if (tree_expr_nonnegative_p (arg))
7930 {
7931 tree truncfn = mathfn_built_in (TREE_TYPE (arg), BUILT_IN_TRUNC);
7932 if (truncfn)
7933 return build_call_expr_loc (loc, truncfn, 1, arg);
7934 }
7935
7936 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7937 }
7938
7939 /* Fold function call to builtin ceil, ceilf or ceill with argument ARG.
7940 Return NULL_TREE if no simplification can be made. */
7941
7942 static tree
fold_builtin_ceil(location_t loc,tree fndecl,tree arg)7943 fold_builtin_ceil (location_t loc, tree fndecl, tree arg)
7944 {
7945 if (!validate_arg (arg, REAL_TYPE))
7946 return NULL_TREE;
7947
7948 /* Optimize ceil of constant value. */
7949 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7950 {
7951 REAL_VALUE_TYPE x;
7952
7953 x = TREE_REAL_CST (arg);
7954 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7955 {
7956 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7957 REAL_VALUE_TYPE r;
7958
7959 real_ceil (&r, TYPE_MODE (type), &x);
7960 return build_real (type, r);
7961 }
7962 }
7963
7964 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7965 }
7966
7967 /* Fold function call to builtin round, roundf or roundl with argument ARG.
7968 Return NULL_TREE if no simplification can be made. */
7969
7970 static tree
fold_builtin_round(location_t loc,tree fndecl,tree arg)7971 fold_builtin_round (location_t loc, tree fndecl, tree arg)
7972 {
7973 if (!validate_arg (arg, REAL_TYPE))
7974 return NULL_TREE;
7975
7976 /* Optimize round of constant value. */
7977 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
7978 {
7979 REAL_VALUE_TYPE x;
7980
7981 x = TREE_REAL_CST (arg);
7982 if (! REAL_VALUE_ISNAN (x) || ! flag_errno_math)
7983 {
7984 tree type = TREE_TYPE (TREE_TYPE (fndecl));
7985 REAL_VALUE_TYPE r;
7986
7987 real_round (&r, TYPE_MODE (type), &x);
7988 return build_real (type, r);
7989 }
7990 }
7991
7992 return fold_trunc_transparent_mathfn (loc, fndecl, arg);
7993 }
7994
7995 /* Fold function call to builtin lround, lroundf or lroundl (or the
7996 corresponding long long versions) and other rounding functions. ARG
7997 is the argument to the call. Return NULL_TREE if no simplification
7998 can be made. */
7999
8000 static tree
fold_builtin_int_roundingfn(location_t loc,tree fndecl,tree arg)8001 fold_builtin_int_roundingfn (location_t loc, tree fndecl, tree arg)
8002 {
8003 if (!validate_arg (arg, REAL_TYPE))
8004 return NULL_TREE;
8005
8006 /* Optimize lround of constant value. */
8007 if (TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
8008 {
8009 const REAL_VALUE_TYPE x = TREE_REAL_CST (arg);
8010
8011 if (real_isfinite (&x))
8012 {
8013 tree itype = TREE_TYPE (TREE_TYPE (fndecl));
8014 tree ftype = TREE_TYPE (arg);
8015 double_int val;
8016 REAL_VALUE_TYPE r;
8017
8018 switch (DECL_FUNCTION_CODE (fndecl))
8019 {
8020 CASE_FLT_FN (BUILT_IN_IFLOOR):
8021 CASE_FLT_FN (BUILT_IN_LFLOOR):
8022 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8023 real_floor (&r, TYPE_MODE (ftype), &x);
8024 break;
8025
8026 CASE_FLT_FN (BUILT_IN_ICEIL):
8027 CASE_FLT_FN (BUILT_IN_LCEIL):
8028 CASE_FLT_FN (BUILT_IN_LLCEIL):
8029 real_ceil (&r, TYPE_MODE (ftype), &x);
8030 break;
8031
8032 CASE_FLT_FN (BUILT_IN_IROUND):
8033 CASE_FLT_FN (BUILT_IN_LROUND):
8034 CASE_FLT_FN (BUILT_IN_LLROUND):
8035 real_round (&r, TYPE_MODE (ftype), &x);
8036 break;
8037
8038 default:
8039 gcc_unreachable ();
8040 }
8041
8042 real_to_integer2 ((HOST_WIDE_INT *)&val.low, &val.high, &r);
8043 if (double_int_fits_to_tree_p (itype, val))
8044 return double_int_to_tree (itype, val);
8045 }
8046 }
8047
8048 switch (DECL_FUNCTION_CODE (fndecl))
8049 {
8050 CASE_FLT_FN (BUILT_IN_LFLOOR):
8051 CASE_FLT_FN (BUILT_IN_LLFLOOR):
8052 /* Fold lfloor (x) where x is nonnegative to FIX_TRUNC (x). */
8053 if (tree_expr_nonnegative_p (arg))
8054 return fold_build1_loc (loc, FIX_TRUNC_EXPR,
8055 TREE_TYPE (TREE_TYPE (fndecl)), arg);
8056 break;
8057 default:;
8058 }
8059
8060 return fold_fixed_mathfn (loc, fndecl, arg);
8061 }
8062
8063 /* Fold function call to builtin ffs, clz, ctz, popcount and parity
8064 and their long and long long variants (i.e. ffsl and ffsll). ARG is
8065 the argument to the call. Return NULL_TREE if no simplification can
8066 be made. */
8067
8068 static tree
fold_builtin_bitop(tree fndecl,tree arg)8069 fold_builtin_bitop (tree fndecl, tree arg)
8070 {
8071 if (!validate_arg (arg, INTEGER_TYPE))
8072 return NULL_TREE;
8073
8074 /* Optimize for constant argument. */
8075 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8076 {
8077 HOST_WIDE_INT hi, width, result;
8078 unsigned HOST_WIDE_INT lo;
8079 tree type;
8080
8081 type = TREE_TYPE (arg);
8082 width = TYPE_PRECISION (type);
8083 lo = TREE_INT_CST_LOW (arg);
8084
8085 /* Clear all the bits that are beyond the type's precision. */
8086 if (width > HOST_BITS_PER_WIDE_INT)
8087 {
8088 hi = TREE_INT_CST_HIGH (arg);
8089 if (width < HOST_BITS_PER_DOUBLE_INT)
8090 hi &= ~(HOST_WIDE_INT_M1U << (width - HOST_BITS_PER_WIDE_INT));
8091 }
8092 else
8093 {
8094 hi = 0;
8095 if (width < HOST_BITS_PER_WIDE_INT)
8096 lo &= ~(HOST_WIDE_INT_M1U << width);
8097 }
8098
8099 switch (DECL_FUNCTION_CODE (fndecl))
8100 {
8101 CASE_INT_FN (BUILT_IN_FFS):
8102 if (lo != 0)
8103 result = ffs_hwi (lo);
8104 else if (hi != 0)
8105 result = HOST_BITS_PER_WIDE_INT + ffs_hwi (hi);
8106 else
8107 result = 0;
8108 break;
8109
8110 CASE_INT_FN (BUILT_IN_CLZ):
8111 if (hi != 0)
8112 result = width - floor_log2 (hi) - 1 - HOST_BITS_PER_WIDE_INT;
8113 else if (lo != 0)
8114 result = width - floor_log2 (lo) - 1;
8115 else if (! CLZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8116 result = width;
8117 break;
8118
8119 CASE_INT_FN (BUILT_IN_CTZ):
8120 if (lo != 0)
8121 result = ctz_hwi (lo);
8122 else if (hi != 0)
8123 result = HOST_BITS_PER_WIDE_INT + ctz_hwi (hi);
8124 else if (! CTZ_DEFINED_VALUE_AT_ZERO (TYPE_MODE (type), result))
8125 result = width;
8126 break;
8127
8128 CASE_INT_FN (BUILT_IN_CLRSB):
8129 if (width > 2 * HOST_BITS_PER_WIDE_INT)
8130 return NULL_TREE;
8131 if (width > HOST_BITS_PER_WIDE_INT
8132 && (hi & ((unsigned HOST_WIDE_INT) 1
8133 << (width - HOST_BITS_PER_WIDE_INT - 1))) != 0)
8134 {
8135 hi = ~hi & ~(HOST_WIDE_INT_M1U
8136 << (width - HOST_BITS_PER_WIDE_INT - 1));
8137 lo = ~lo;
8138 }
8139 else if (width <= HOST_BITS_PER_WIDE_INT
8140 && (lo & ((unsigned HOST_WIDE_INT) 1 << (width - 1))) != 0)
8141 lo = ~lo & ~(HOST_WIDE_INT_M1U << (width - 1));
8142 if (hi != 0)
8143 result = width - floor_log2 (hi) - 2 - HOST_BITS_PER_WIDE_INT;
8144 else if (lo != 0)
8145 result = width - floor_log2 (lo) - 2;
8146 else
8147 result = width - 1;
8148 break;
8149
8150 CASE_INT_FN (BUILT_IN_POPCOUNT):
8151 result = 0;
8152 while (lo)
8153 result++, lo &= lo - 1;
8154 while (hi)
8155 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8156 break;
8157
8158 CASE_INT_FN (BUILT_IN_PARITY):
8159 result = 0;
8160 while (lo)
8161 result++, lo &= lo - 1;
8162 while (hi)
8163 result++, hi &= (unsigned HOST_WIDE_INT) hi - 1;
8164 result &= 1;
8165 break;
8166
8167 default:
8168 gcc_unreachable ();
8169 }
8170
8171 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), result);
8172 }
8173
8174 return NULL_TREE;
8175 }
8176
8177 /* Fold function call to builtin_bswap and the short, long and long long
8178 variants. Return NULL_TREE if no simplification can be made. */
8179 static tree
fold_builtin_bswap(tree fndecl,tree arg)8180 fold_builtin_bswap (tree fndecl, tree arg)
8181 {
8182 if (! validate_arg (arg, INTEGER_TYPE))
8183 return NULL_TREE;
8184
8185 /* Optimize constant value. */
8186 if (TREE_CODE (arg) == INTEGER_CST && !TREE_OVERFLOW (arg))
8187 {
8188 HOST_WIDE_INT hi, width, r_hi = 0;
8189 unsigned HOST_WIDE_INT lo, r_lo = 0;
8190 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8191
8192 width = TYPE_PRECISION (type);
8193 lo = TREE_INT_CST_LOW (arg);
8194 hi = TREE_INT_CST_HIGH (arg);
8195
8196 switch (DECL_FUNCTION_CODE (fndecl))
8197 {
8198 case BUILT_IN_BSWAP16:
8199 case BUILT_IN_BSWAP32:
8200 case BUILT_IN_BSWAP64:
8201 {
8202 int s;
8203
8204 for (s = 0; s < width; s += 8)
8205 {
8206 int d = width - s - 8;
8207 unsigned HOST_WIDE_INT byte;
8208
8209 if (s < HOST_BITS_PER_WIDE_INT)
8210 byte = (lo >> s) & 0xff;
8211 else
8212 byte = (hi >> (s - HOST_BITS_PER_WIDE_INT)) & 0xff;
8213
8214 if (d < HOST_BITS_PER_WIDE_INT)
8215 r_lo |= byte << d;
8216 else
8217 r_hi |= byte << (d - HOST_BITS_PER_WIDE_INT);
8218 }
8219 }
8220
8221 break;
8222
8223 default:
8224 gcc_unreachable ();
8225 }
8226
8227 if (width < HOST_BITS_PER_WIDE_INT)
8228 return build_int_cst (type, r_lo);
8229 else
8230 return build_int_cst_wide (type, r_lo, r_hi);
8231 }
8232
8233 return NULL_TREE;
8234 }
8235
8236 /* A subroutine of fold_builtin to fold the various logarithmic
8237 functions. Return NULL_TREE if no simplification can me made.
8238 FUNC is the corresponding MPFR logarithm function. */
8239
8240 static tree
fold_builtin_logarithm(location_t loc,tree fndecl,tree arg,int (* func)(mpfr_ptr,mpfr_srcptr,mp_rnd_t))8241 fold_builtin_logarithm (location_t loc, tree fndecl, tree arg,
8242 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8243 {
8244 if (validate_arg (arg, REAL_TYPE))
8245 {
8246 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8247 tree res;
8248 const enum built_in_function fcode = builtin_mathfn_code (arg);
8249
8250 /* Calculate the result when the argument is a constant. */
8251 if ((res = do_mpfr_arg1 (arg, type, func, &dconst0, NULL, false)))
8252 return res;
8253
8254 /* Special case, optimize logN(expN(x)) = x. */
8255 if (flag_unsafe_math_optimizations
8256 && ((func == mpfr_log
8257 && (fcode == BUILT_IN_EXP
8258 || fcode == BUILT_IN_EXPF
8259 || fcode == BUILT_IN_EXPL))
8260 || (func == mpfr_log2
8261 && (fcode == BUILT_IN_EXP2
8262 || fcode == BUILT_IN_EXP2F
8263 || fcode == BUILT_IN_EXP2L))
8264 || (func == mpfr_log10 && (BUILTIN_EXP10_P (fcode)))))
8265 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8266
8267 /* Optimize logN(func()) for various exponential functions. We
8268 want to determine the value "x" and the power "exponent" in
8269 order to transform logN(x**exponent) into exponent*logN(x). */
8270 if (flag_unsafe_math_optimizations)
8271 {
8272 tree exponent = 0, x = 0;
8273
8274 switch (fcode)
8275 {
8276 CASE_FLT_FN (BUILT_IN_EXP):
8277 /* Prepare to do logN(exp(exponent) -> exponent*logN(e). */
8278 x = build_real (type, real_value_truncate (TYPE_MODE (type),
8279 dconst_e ()));
8280 exponent = CALL_EXPR_ARG (arg, 0);
8281 break;
8282 CASE_FLT_FN (BUILT_IN_EXP2):
8283 /* Prepare to do logN(exp2(exponent) -> exponent*logN(2). */
8284 x = build_real (type, dconst2);
8285 exponent = CALL_EXPR_ARG (arg, 0);
8286 break;
8287 CASE_FLT_FN (BUILT_IN_EXP10):
8288 CASE_FLT_FN (BUILT_IN_POW10):
8289 /* Prepare to do logN(exp10(exponent) -> exponent*logN(10). */
8290 {
8291 REAL_VALUE_TYPE dconst10;
8292 real_from_integer (&dconst10, VOIDmode, 10, 0, 0);
8293 x = build_real (type, dconst10);
8294 }
8295 exponent = CALL_EXPR_ARG (arg, 0);
8296 break;
8297 CASE_FLT_FN (BUILT_IN_SQRT):
8298 /* Prepare to do logN(sqrt(x) -> 0.5*logN(x). */
8299 x = CALL_EXPR_ARG (arg, 0);
8300 exponent = build_real (type, dconsthalf);
8301 break;
8302 CASE_FLT_FN (BUILT_IN_CBRT):
8303 /* Prepare to do logN(cbrt(x) -> (1/3)*logN(x). */
8304 x = CALL_EXPR_ARG (arg, 0);
8305 exponent = build_real (type, real_value_truncate (TYPE_MODE (type),
8306 dconst_third ()));
8307 break;
8308 CASE_FLT_FN (BUILT_IN_POW):
8309 /* Prepare to do logN(pow(x,exponent) -> exponent*logN(x). */
8310 x = CALL_EXPR_ARG (arg, 0);
8311 exponent = CALL_EXPR_ARG (arg, 1);
8312 break;
8313 default:
8314 break;
8315 }
8316
8317 /* Now perform the optimization. */
8318 if (x && exponent)
8319 {
8320 tree logfn = build_call_expr_loc (loc, fndecl, 1, x);
8321 return fold_build2_loc (loc, MULT_EXPR, type, exponent, logfn);
8322 }
8323 }
8324 }
8325
8326 return NULL_TREE;
8327 }
8328
8329 /* Fold a builtin function call to hypot, hypotf, or hypotl. Return
8330 NULL_TREE if no simplification can be made. */
8331
8332 static tree
fold_builtin_hypot(location_t loc,tree fndecl,tree arg0,tree arg1,tree type)8333 fold_builtin_hypot (location_t loc, tree fndecl,
8334 tree arg0, tree arg1, tree type)
8335 {
8336 tree res, narg0, narg1;
8337
8338 if (!validate_arg (arg0, REAL_TYPE)
8339 || !validate_arg (arg1, REAL_TYPE))
8340 return NULL_TREE;
8341
8342 /* Calculate the result when the argument is a constant. */
8343 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_hypot)))
8344 return res;
8345
8346 /* If either argument to hypot has a negate or abs, strip that off.
8347 E.g. hypot(-x,fabs(y)) -> hypot(x,y). */
8348 narg0 = fold_strip_sign_ops (arg0);
8349 narg1 = fold_strip_sign_ops (arg1);
8350 if (narg0 || narg1)
8351 {
8352 return build_call_expr_loc (loc, fndecl, 2, narg0 ? narg0 : arg0,
8353 narg1 ? narg1 : arg1);
8354 }
8355
8356 /* If either argument is zero, hypot is fabs of the other. */
8357 if (real_zerop (arg0))
8358 return fold_build1_loc (loc, ABS_EXPR, type, arg1);
8359 else if (real_zerop (arg1))
8360 return fold_build1_loc (loc, ABS_EXPR, type, arg0);
8361
8362 /* hypot(x,x) -> fabs(x)*sqrt(2). */
8363 if (flag_unsafe_math_optimizations
8364 && operand_equal_p (arg0, arg1, OEP_PURE_SAME))
8365 {
8366 const REAL_VALUE_TYPE sqrt2_trunc
8367 = real_value_truncate (TYPE_MODE (type), dconst_sqrt2 ());
8368 return fold_build2_loc (loc, MULT_EXPR, type,
8369 fold_build1_loc (loc, ABS_EXPR, type, arg0),
8370 build_real (type, sqrt2_trunc));
8371 }
8372
8373 return NULL_TREE;
8374 }
8375
8376
8377 /* Fold a builtin function call to pow, powf, or powl. Return
8378 NULL_TREE if no simplification can be made. */
8379 static tree
fold_builtin_pow(location_t loc,tree fndecl,tree arg0,tree arg1,tree type)8380 fold_builtin_pow (location_t loc, tree fndecl, tree arg0, tree arg1, tree type)
8381 {
8382 tree res;
8383
8384 if (!validate_arg (arg0, REAL_TYPE)
8385 || !validate_arg (arg1, REAL_TYPE))
8386 return NULL_TREE;
8387
8388 /* Calculate the result when the argument is a constant. */
8389 if ((res = do_mpfr_arg2 (arg0, arg1, type, mpfr_pow)))
8390 return res;
8391
8392 /* Optimize pow(1.0,y) = 1.0. */
8393 if (real_onep (arg0))
8394 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8395
8396 if (TREE_CODE (arg1) == REAL_CST
8397 && !TREE_OVERFLOW (arg1))
8398 {
8399 REAL_VALUE_TYPE cint;
8400 REAL_VALUE_TYPE c;
8401 HOST_WIDE_INT n;
8402
8403 c = TREE_REAL_CST (arg1);
8404
8405 /* Optimize pow(x,0.0) = 1.0. */
8406 if (REAL_VALUES_EQUAL (c, dconst0))
8407 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8408 arg0);
8409
8410 /* Optimize pow(x,1.0) = x. */
8411 if (REAL_VALUES_EQUAL (c, dconst1))
8412 return arg0;
8413
8414 /* Optimize pow(x,-1.0) = 1.0/x. */
8415 if (REAL_VALUES_EQUAL (c, dconstm1))
8416 return fold_build2_loc (loc, RDIV_EXPR, type,
8417 build_real (type, dconst1), arg0);
8418
8419 /* Optimize pow(x,0.5) = sqrt(x). */
8420 if (flag_unsafe_math_optimizations
8421 && REAL_VALUES_EQUAL (c, dconsthalf))
8422 {
8423 tree sqrtfn = mathfn_built_in (type, BUILT_IN_SQRT);
8424
8425 if (sqrtfn != NULL_TREE)
8426 return build_call_expr_loc (loc, sqrtfn, 1, arg0);
8427 }
8428
8429 /* Optimize pow(x,1.0/3.0) = cbrt(x). */
8430 if (flag_unsafe_math_optimizations)
8431 {
8432 const REAL_VALUE_TYPE dconstroot
8433 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8434
8435 if (REAL_VALUES_EQUAL (c, dconstroot))
8436 {
8437 tree cbrtfn = mathfn_built_in (type, BUILT_IN_CBRT);
8438 if (cbrtfn != NULL_TREE)
8439 return build_call_expr_loc (loc, cbrtfn, 1, arg0);
8440 }
8441 }
8442
8443 /* Check for an integer exponent. */
8444 n = real_to_integer (&c);
8445 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
8446 if (real_identical (&c, &cint))
8447 {
8448 /* Attempt to evaluate pow at compile-time, unless this should
8449 raise an exception. */
8450 if (TREE_CODE (arg0) == REAL_CST
8451 && !TREE_OVERFLOW (arg0)
8452 && (n > 0
8453 || (!flag_trapping_math && !flag_errno_math)
8454 || !REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), dconst0)))
8455 {
8456 REAL_VALUE_TYPE x;
8457 bool inexact;
8458
8459 x = TREE_REAL_CST (arg0);
8460 inexact = real_powi (&x, TYPE_MODE (type), &x, n);
8461 if (flag_unsafe_math_optimizations || !inexact)
8462 return build_real (type, x);
8463 }
8464
8465 /* Strip sign ops from even integer powers. */
8466 if ((n & 1) == 0 && flag_unsafe_math_optimizations)
8467 {
8468 tree narg0 = fold_strip_sign_ops (arg0);
8469 if (narg0)
8470 return build_call_expr_loc (loc, fndecl, 2, narg0, arg1);
8471 }
8472 }
8473 }
8474
8475 if (flag_unsafe_math_optimizations)
8476 {
8477 const enum built_in_function fcode = builtin_mathfn_code (arg0);
8478
8479 /* Optimize pow(expN(x),y) = expN(x*y). */
8480 if (BUILTIN_EXPONENT_P (fcode))
8481 {
8482 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
8483 tree arg = CALL_EXPR_ARG (arg0, 0);
8484 arg = fold_build2_loc (loc, MULT_EXPR, type, arg, arg1);
8485 return build_call_expr_loc (loc, expfn, 1, arg);
8486 }
8487
8488 /* Optimize pow(sqrt(x),y) = pow(x,y*0.5). */
8489 if (BUILTIN_SQRT_P (fcode))
8490 {
8491 tree narg0 = CALL_EXPR_ARG (arg0, 0);
8492 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8493 build_real (type, dconsthalf));
8494 return build_call_expr_loc (loc, fndecl, 2, narg0, narg1);
8495 }
8496
8497 /* Optimize pow(cbrt(x),y) = pow(x,y/3) iff x is nonnegative. */
8498 if (BUILTIN_CBRT_P (fcode))
8499 {
8500 tree arg = CALL_EXPR_ARG (arg0, 0);
8501 if (tree_expr_nonnegative_p (arg))
8502 {
8503 const REAL_VALUE_TYPE dconstroot
8504 = real_value_truncate (TYPE_MODE (type), dconst_third ());
8505 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg1,
8506 build_real (type, dconstroot));
8507 return build_call_expr_loc (loc, fndecl, 2, arg, narg1);
8508 }
8509 }
8510
8511 /* Optimize pow(pow(x,y),z) = pow(x,y*z) iff x is nonnegative. */
8512 if (fcode == BUILT_IN_POW
8513 || fcode == BUILT_IN_POWF
8514 || fcode == BUILT_IN_POWL)
8515 {
8516 tree arg00 = CALL_EXPR_ARG (arg0, 0);
8517 if (tree_expr_nonnegative_p (arg00))
8518 {
8519 tree arg01 = CALL_EXPR_ARG (arg0, 1);
8520 tree narg1 = fold_build2_loc (loc, MULT_EXPR, type, arg01, arg1);
8521 return build_call_expr_loc (loc, fndecl, 2, arg00, narg1);
8522 }
8523 }
8524 }
8525
8526 return NULL_TREE;
8527 }
8528
8529 /* Fold a builtin function call to powi, powif, or powil with argument ARG.
8530 Return NULL_TREE if no simplification can be made. */
8531 static tree
fold_builtin_powi(location_t loc,tree fndecl ATTRIBUTE_UNUSED,tree arg0,tree arg1,tree type)8532 fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED,
8533 tree arg0, tree arg1, tree type)
8534 {
8535 if (!validate_arg (arg0, REAL_TYPE)
8536 || !validate_arg (arg1, INTEGER_TYPE))
8537 return NULL_TREE;
8538
8539 /* Optimize pow(1.0,y) = 1.0. */
8540 if (real_onep (arg0))
8541 return omit_one_operand_loc (loc, type, build_real (type, dconst1), arg1);
8542
8543 if (tree_fits_shwi_p (arg1))
8544 {
8545 HOST_WIDE_INT c = tree_to_shwi (arg1);
8546
8547 /* Evaluate powi at compile-time. */
8548 if (TREE_CODE (arg0) == REAL_CST
8549 && !TREE_OVERFLOW (arg0))
8550 {
8551 REAL_VALUE_TYPE x;
8552 x = TREE_REAL_CST (arg0);
8553 real_powi (&x, TYPE_MODE (type), &x, c);
8554 return build_real (type, x);
8555 }
8556
8557 /* Optimize pow(x,0) = 1.0. */
8558 if (c == 0)
8559 return omit_one_operand_loc (loc, type, build_real (type, dconst1),
8560 arg0);
8561
8562 /* Optimize pow(x,1) = x. */
8563 if (c == 1)
8564 return arg0;
8565
8566 /* Optimize pow(x,-1) = 1.0/x. */
8567 if (c == -1)
8568 return fold_build2_loc (loc, RDIV_EXPR, type,
8569 build_real (type, dconst1), arg0);
8570 }
8571
8572 return NULL_TREE;
8573 }
8574
8575 /* A subroutine of fold_builtin to fold the various exponent
8576 functions. Return NULL_TREE if no simplification can be made.
8577 FUNC is the corresponding MPFR exponent function. */
8578
8579 static tree
fold_builtin_exponent(location_t loc,tree fndecl,tree arg,int (* func)(mpfr_ptr,mpfr_srcptr,mp_rnd_t))8580 fold_builtin_exponent (location_t loc, tree fndecl, tree arg,
8581 int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t))
8582 {
8583 if (validate_arg (arg, REAL_TYPE))
8584 {
8585 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8586 tree res;
8587
8588 /* Calculate the result when the argument is a constant. */
8589 if ((res = do_mpfr_arg1 (arg, type, func, NULL, NULL, 0)))
8590 return res;
8591
8592 /* Optimize expN(logN(x)) = x. */
8593 if (flag_unsafe_math_optimizations)
8594 {
8595 const enum built_in_function fcode = builtin_mathfn_code (arg);
8596
8597 if ((func == mpfr_exp
8598 && (fcode == BUILT_IN_LOG
8599 || fcode == BUILT_IN_LOGF
8600 || fcode == BUILT_IN_LOGL))
8601 || (func == mpfr_exp2
8602 && (fcode == BUILT_IN_LOG2
8603 || fcode == BUILT_IN_LOG2F
8604 || fcode == BUILT_IN_LOG2L))
8605 || (func == mpfr_exp10
8606 && (fcode == BUILT_IN_LOG10
8607 || fcode == BUILT_IN_LOG10F
8608 || fcode == BUILT_IN_LOG10L)))
8609 return fold_convert_loc (loc, type, CALL_EXPR_ARG (arg, 0));
8610 }
8611 }
8612
8613 return NULL_TREE;
8614 }
8615
8616 /* Return true if VAR is a VAR_DECL or a component thereof. */
8617
8618 static bool
var_decl_component_p(tree var)8619 var_decl_component_p (tree var)
8620 {
8621 tree inner = var;
8622 while (handled_component_p (inner))
8623 inner = TREE_OPERAND (inner, 0);
8624 return SSA_VAR_P (inner);
8625 }
8626
8627 /* Fold function call to builtin memset. Return
8628 NULL_TREE if no simplification can be made. */
8629
8630 static tree
fold_builtin_memset(location_t loc,tree dest,tree c,tree len,tree type,bool ignore)8631 fold_builtin_memset (location_t loc, tree dest, tree c, tree len,
8632 tree type, bool ignore)
8633 {
8634 tree var, ret, etype;
8635 unsigned HOST_WIDE_INT length, cval;
8636
8637 if (! validate_arg (dest, POINTER_TYPE)
8638 || ! validate_arg (c, INTEGER_TYPE)
8639 || ! validate_arg (len, INTEGER_TYPE))
8640 return NULL_TREE;
8641
8642 if (! tree_fits_uhwi_p (len))
8643 return NULL_TREE;
8644
8645 /* If the LEN parameter is zero, return DEST. */
8646 if (integer_zerop (len))
8647 return omit_one_operand_loc (loc, type, dest, c);
8648
8649 if (TREE_CODE (c) != INTEGER_CST || TREE_SIDE_EFFECTS (dest))
8650 return NULL_TREE;
8651
8652 var = dest;
8653 STRIP_NOPS (var);
8654 if (TREE_CODE (var) != ADDR_EXPR)
8655 return NULL_TREE;
8656
8657 var = TREE_OPERAND (var, 0);
8658 if (TREE_THIS_VOLATILE (var))
8659 return NULL_TREE;
8660
8661 etype = TREE_TYPE (var);
8662 if (TREE_CODE (etype) == ARRAY_TYPE)
8663 etype = TREE_TYPE (etype);
8664
8665 if (!INTEGRAL_TYPE_P (etype)
8666 && !POINTER_TYPE_P (etype))
8667 return NULL_TREE;
8668
8669 if (! var_decl_component_p (var))
8670 return NULL_TREE;
8671
8672 length = tree_to_uhwi (len);
8673 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
8674 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
8675 return NULL_TREE;
8676
8677 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
8678 return NULL_TREE;
8679
8680 if (integer_zerop (c))
8681 cval = 0;
8682 else
8683 {
8684 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
8685 return NULL_TREE;
8686
8687 cval = TREE_INT_CST_LOW (c);
8688 cval &= 0xff;
8689 cval |= cval << 8;
8690 cval |= cval << 16;
8691 cval |= (cval << 31) << 1;
8692 }
8693
8694 ret = build_int_cst_type (etype, cval);
8695 var = build_fold_indirect_ref_loc (loc,
8696 fold_convert_loc (loc,
8697 build_pointer_type (etype),
8698 dest));
8699 ret = build2 (MODIFY_EXPR, etype, var, ret);
8700 if (ignore)
8701 return ret;
8702
8703 return omit_one_operand_loc (loc, type, dest, ret);
8704 }
8705
8706 /* Fold function call to builtin memset. Return
8707 NULL_TREE if no simplification can be made. */
8708
8709 static tree
fold_builtin_bzero(location_t loc,tree dest,tree size,bool ignore)8710 fold_builtin_bzero (location_t loc, tree dest, tree size, bool ignore)
8711 {
8712 if (! validate_arg (dest, POINTER_TYPE)
8713 || ! validate_arg (size, INTEGER_TYPE))
8714 return NULL_TREE;
8715
8716 if (!ignore)
8717 return NULL_TREE;
8718
8719 /* New argument list transforming bzero(ptr x, int y) to
8720 memset(ptr x, int 0, size_t y). This is done this way
8721 so that if it isn't expanded inline, we fallback to
8722 calling bzero instead of memset. */
8723
8724 return fold_builtin_memset (loc, dest, integer_zero_node,
8725 fold_convert_loc (loc, size_type_node, size),
8726 void_type_node, ignore);
8727 }
8728
8729 /* Fold function call to builtin mem{{,p}cpy,move}. Return
8730 NULL_TREE if no simplification can be made.
8731 If ENDP is 0, return DEST (like memcpy).
8732 If ENDP is 1, return DEST+LEN (like mempcpy).
8733 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
8734 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
8735 (memmove). */
8736
8737 static tree
fold_builtin_memory_op(location_t loc,tree dest,tree src,tree len,tree type,bool ignore,int endp)8738 fold_builtin_memory_op (location_t loc, tree dest, tree src,
8739 tree len, tree type, bool ignore, int endp)
8740 {
8741 tree destvar, srcvar, expr;
8742
8743 if (! validate_arg (dest, POINTER_TYPE)
8744 || ! validate_arg (src, POINTER_TYPE)
8745 || ! validate_arg (len, INTEGER_TYPE))
8746 return NULL_TREE;
8747
8748 /* If the LEN parameter is zero, return DEST. */
8749 if (integer_zerop (len))
8750 return omit_one_operand_loc (loc, type, dest, src);
8751
8752 /* If SRC and DEST are the same (and not volatile), return
8753 DEST{,+LEN,+LEN-1}. */
8754 if (operand_equal_p (src, dest, 0))
8755 expr = len;
8756 else
8757 {
8758 tree srctype, desttype;
8759 unsigned int src_align, dest_align;
8760 tree off0;
8761
8762 if (endp == 3)
8763 {
8764 src_align = get_pointer_alignment (src);
8765 dest_align = get_pointer_alignment (dest);
8766
8767 /* Both DEST and SRC must be pointer types.
8768 ??? This is what old code did. Is the testing for pointer types
8769 really mandatory?
8770
8771 If either SRC is readonly or length is 1, we can use memcpy. */
8772 if (!dest_align || !src_align)
8773 return NULL_TREE;
8774 if (readonly_data_expr (src)
8775 || (tree_fits_uhwi_p (len)
8776 && (MIN (src_align, dest_align) / BITS_PER_UNIT
8777 >= tree_to_uhwi (len))))
8778 {
8779 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8780 if (!fn)
8781 return NULL_TREE;
8782 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8783 }
8784
8785 /* If *src and *dest can't overlap, optimize into memcpy as well. */
8786 if (TREE_CODE (src) == ADDR_EXPR
8787 && TREE_CODE (dest) == ADDR_EXPR)
8788 {
8789 tree src_base, dest_base, fn;
8790 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
8791 HOST_WIDE_INT size = -1;
8792 HOST_WIDE_INT maxsize = -1;
8793
8794 srcvar = TREE_OPERAND (src, 0);
8795 src_base = get_ref_base_and_extent (srcvar, &src_offset,
8796 &size, &maxsize);
8797 destvar = TREE_OPERAND (dest, 0);
8798 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
8799 &size, &maxsize);
8800 if (tree_fits_uhwi_p (len))
8801 maxsize = tree_to_uhwi (len);
8802 else
8803 maxsize = -1;
8804 src_offset /= BITS_PER_UNIT;
8805 dest_offset /= BITS_PER_UNIT;
8806 if (SSA_VAR_P (src_base)
8807 && SSA_VAR_P (dest_base))
8808 {
8809 if (operand_equal_p (src_base, dest_base, 0)
8810 && ranges_overlap_p (src_offset, maxsize,
8811 dest_offset, maxsize))
8812 return NULL_TREE;
8813 }
8814 else if (TREE_CODE (src_base) == MEM_REF
8815 && TREE_CODE (dest_base) == MEM_REF)
8816 {
8817 double_int off;
8818 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
8819 TREE_OPERAND (dest_base, 0), 0))
8820 return NULL_TREE;
8821 off = mem_ref_offset (src_base) +
8822 double_int::from_shwi (src_offset);
8823 if (!off.fits_shwi ())
8824 return NULL_TREE;
8825 src_offset = off.low;
8826 off = mem_ref_offset (dest_base) +
8827 double_int::from_shwi (dest_offset);
8828 if (!off.fits_shwi ())
8829 return NULL_TREE;
8830 dest_offset = off.low;
8831 if (ranges_overlap_p (src_offset, maxsize,
8832 dest_offset, maxsize))
8833 return NULL_TREE;
8834 }
8835 else
8836 return NULL_TREE;
8837
8838 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8839 if (!fn)
8840 return NULL_TREE;
8841 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8842 }
8843
8844 /* If the destination and source do not alias optimize into
8845 memcpy as well. */
8846 if ((is_gimple_min_invariant (dest)
8847 || TREE_CODE (dest) == SSA_NAME)
8848 && (is_gimple_min_invariant (src)
8849 || TREE_CODE (src) == SSA_NAME))
8850 {
8851 ao_ref destr, srcr;
8852 ao_ref_init_from_ptr_and_size (&destr, dest, len);
8853 ao_ref_init_from_ptr_and_size (&srcr, src, len);
8854 if (!refs_may_alias_p_1 (&destr, &srcr, false))
8855 {
8856 tree fn;
8857 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
8858 if (!fn)
8859 return NULL_TREE;
8860 return build_call_expr_loc (loc, fn, 3, dest, src, len);
8861 }
8862 }
8863
8864 return NULL_TREE;
8865 }
8866
8867 if (!tree_fits_shwi_p (len))
8868 return NULL_TREE;
8869 /* FIXME:
8870 This logic lose for arguments like (type *)malloc (sizeof (type)),
8871 since we strip the casts of up to VOID return value from malloc.
8872 Perhaps we ought to inherit type from non-VOID argument here? */
8873 STRIP_NOPS (src);
8874 STRIP_NOPS (dest);
8875 if (!POINTER_TYPE_P (TREE_TYPE (src))
8876 || !POINTER_TYPE_P (TREE_TYPE (dest)))
8877 return NULL_TREE;
8878 /* In the following try to find a type that is most natural to be
8879 used for the memcpy source and destination and that allows
8880 the most optimization when memcpy is turned into a plain assignment
8881 using that type. In theory we could always use a char[len] type
8882 but that only gains us that the destination and source possibly
8883 no longer will have their address taken. */
8884 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
8885 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
8886 {
8887 tree tem = TREE_OPERAND (src, 0);
8888 STRIP_NOPS (tem);
8889 if (tem != TREE_OPERAND (src, 0))
8890 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
8891 }
8892 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
8893 {
8894 tree tem = TREE_OPERAND (dest, 0);
8895 STRIP_NOPS (tem);
8896 if (tem != TREE_OPERAND (dest, 0))
8897 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
8898 }
8899 srctype = TREE_TYPE (TREE_TYPE (src));
8900 if (TREE_CODE (srctype) == ARRAY_TYPE
8901 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8902 {
8903 srctype = TREE_TYPE (srctype);
8904 STRIP_NOPS (src);
8905 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
8906 }
8907 desttype = TREE_TYPE (TREE_TYPE (dest));
8908 if (TREE_CODE (desttype) == ARRAY_TYPE
8909 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8910 {
8911 desttype = TREE_TYPE (desttype);
8912 STRIP_NOPS (dest);
8913 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
8914 }
8915 if (TREE_ADDRESSABLE (srctype)
8916 || TREE_ADDRESSABLE (desttype))
8917 return NULL_TREE;
8918
8919 /* Make sure we are not copying using a floating-point mode or
8920 a type whose size possibly does not match its precision. */
8921 if (FLOAT_MODE_P (TYPE_MODE (desttype))
8922 || TREE_CODE (desttype) == BOOLEAN_TYPE
8923 || TREE_CODE (desttype) == ENUMERAL_TYPE)
8924 {
8925 /* A more suitable int_mode_for_mode would return a vector
8926 integer mode for a vector float mode or a integer complex
8927 mode for a float complex mode if there isn't a regular
8928 integer mode covering the mode of desttype. */
8929 enum machine_mode mode = int_mode_for_mode (TYPE_MODE (desttype));
8930 if (mode == BLKmode)
8931 desttype = NULL_TREE;
8932 else
8933 desttype = build_nonstandard_integer_type (GET_MODE_BITSIZE (mode),
8934 1);
8935 }
8936 if (FLOAT_MODE_P (TYPE_MODE (srctype))
8937 || TREE_CODE (srctype) == BOOLEAN_TYPE
8938 || TREE_CODE (srctype) == ENUMERAL_TYPE)
8939 {
8940 enum machine_mode mode = int_mode_for_mode (TYPE_MODE (srctype));
8941 if (mode == BLKmode)
8942 srctype = NULL_TREE;
8943 else
8944 srctype = build_nonstandard_integer_type (GET_MODE_BITSIZE (mode),
8945 1);
8946 }
8947 if (!srctype)
8948 srctype = desttype;
8949 if (!desttype)
8950 desttype = srctype;
8951 if (!srctype)
8952 return NULL_TREE;
8953
8954 src_align = get_pointer_alignment (src);
8955 dest_align = get_pointer_alignment (dest);
8956 if (dest_align < TYPE_ALIGN (desttype)
8957 || src_align < TYPE_ALIGN (srctype))
8958 return NULL_TREE;
8959
8960 if (!ignore)
8961 dest = builtin_save_expr (dest);
8962
8963 /* Build accesses at offset zero with a ref-all character type. */
8964 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
8965 ptr_mode, true), 0);
8966
8967 destvar = dest;
8968 STRIP_NOPS (destvar);
8969 if (TREE_CODE (destvar) == ADDR_EXPR
8970 && var_decl_component_p (TREE_OPERAND (destvar, 0))
8971 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
8972 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
8973 else
8974 destvar = NULL_TREE;
8975
8976 srcvar = src;
8977 STRIP_NOPS (srcvar);
8978 if (TREE_CODE (srcvar) == ADDR_EXPR
8979 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
8980 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
8981 {
8982 if (!destvar
8983 || src_align >= TYPE_ALIGN (desttype))
8984 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
8985 srcvar, off0);
8986 else if (!STRICT_ALIGNMENT)
8987 {
8988 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
8989 src_align);
8990 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
8991 }
8992 else
8993 srcvar = NULL_TREE;
8994 }
8995 else
8996 srcvar = NULL_TREE;
8997
8998 if (srcvar == NULL_TREE && destvar == NULL_TREE)
8999 return NULL_TREE;
9000
9001 if (srcvar == NULL_TREE)
9002 {
9003 STRIP_NOPS (src);
9004 if (src_align >= TYPE_ALIGN (desttype))
9005 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
9006 else
9007 {
9008 if (STRICT_ALIGNMENT)
9009 return NULL_TREE;
9010 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
9011 src_align);
9012 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
9013 }
9014 }
9015 else if (destvar == NULL_TREE)
9016 {
9017 STRIP_NOPS (dest);
9018 if (dest_align >= TYPE_ALIGN (srctype))
9019 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
9020 else
9021 {
9022 if (STRICT_ALIGNMENT)
9023 return NULL_TREE;
9024 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
9025 dest_align);
9026 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
9027 }
9028 }
9029
9030 expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
9031 }
9032
9033 if (ignore)
9034 return expr;
9035
9036 if (endp == 0 || endp == 3)
9037 return omit_one_operand_loc (loc, type, dest, expr);
9038
9039 if (expr == len)
9040 expr = NULL_TREE;
9041
9042 if (endp == 2)
9043 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
9044 ssize_int (1));
9045
9046 dest = fold_build_pointer_plus_loc (loc, dest, len);
9047 dest = fold_convert_loc (loc, type, dest);
9048 if (expr)
9049 dest = omit_one_operand_loc (loc, type, dest, expr);
9050 return dest;
9051 }
9052
9053 /* Fold function call to builtin strcpy with arguments DEST and SRC.
9054 If LEN is not NULL, it represents the length of the string to be
9055 copied. Return NULL_TREE if no simplification can be made. */
9056
9057 tree
fold_builtin_strcpy(location_t loc,tree fndecl,tree dest,tree src,tree len)9058 fold_builtin_strcpy (location_t loc, tree fndecl, tree dest, tree src, tree len)
9059 {
9060 tree fn;
9061
9062 if (!validate_arg (dest, POINTER_TYPE)
9063 || !validate_arg (src, POINTER_TYPE))
9064 return NULL_TREE;
9065
9066 /* If SRC and DEST are the same (and not volatile), return DEST. */
9067 if (operand_equal_p (src, dest, 0))
9068 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
9069
9070 if (optimize_function_for_size_p (cfun))
9071 return NULL_TREE;
9072
9073 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9074 if (!fn)
9075 return NULL_TREE;
9076
9077 if (!len)
9078 {
9079 len = c_strlen (src, 1);
9080 if (! len || TREE_SIDE_EFFECTS (len))
9081 return NULL_TREE;
9082 }
9083
9084 len = fold_convert_loc (loc, size_type_node, len);
9085 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
9086 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9087 build_call_expr_loc (loc, fn, 3, dest, src, len));
9088 }
9089
9090 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
9091 Return NULL_TREE if no simplification can be made. */
9092
9093 static tree
fold_builtin_stpcpy(location_t loc,tree fndecl,tree dest,tree src)9094 fold_builtin_stpcpy (location_t loc, tree fndecl, tree dest, tree src)
9095 {
9096 tree fn, len, lenp1, call, type;
9097
9098 if (!validate_arg (dest, POINTER_TYPE)
9099 || !validate_arg (src, POINTER_TYPE))
9100 return NULL_TREE;
9101
9102 len = c_strlen (src, 1);
9103 if (!len
9104 || TREE_CODE (len) != INTEGER_CST)
9105 return NULL_TREE;
9106
9107 if (optimize_function_for_size_p (cfun)
9108 /* If length is zero it's small enough. */
9109 && !integer_zerop (len))
9110 return NULL_TREE;
9111
9112 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9113 if (!fn)
9114 return NULL_TREE;
9115
9116 lenp1 = size_binop_loc (loc, PLUS_EXPR,
9117 fold_convert_loc (loc, size_type_node, len),
9118 build_int_cst (size_type_node, 1));
9119 /* We use dest twice in building our expression. Save it from
9120 multiple expansions. */
9121 dest = builtin_save_expr (dest);
9122 call = build_call_expr_loc (loc, fn, 3, dest, src, lenp1);
9123
9124 type = TREE_TYPE (TREE_TYPE (fndecl));
9125 dest = fold_build_pointer_plus_loc (loc, dest, len);
9126 dest = fold_convert_loc (loc, type, dest);
9127 dest = omit_one_operand_loc (loc, type, dest, call);
9128 return dest;
9129 }
9130
9131 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
9132 If SLEN is not NULL, it represents the length of the source string.
9133 Return NULL_TREE if no simplification can be made. */
9134
9135 tree
fold_builtin_strncpy(location_t loc,tree fndecl,tree dest,tree src,tree len,tree slen)9136 fold_builtin_strncpy (location_t loc, tree fndecl, tree dest,
9137 tree src, tree len, tree slen)
9138 {
9139 tree fn;
9140
9141 if (!validate_arg (dest, POINTER_TYPE)
9142 || !validate_arg (src, POINTER_TYPE)
9143 || !validate_arg (len, INTEGER_TYPE))
9144 return NULL_TREE;
9145
9146 /* If the LEN parameter is zero, return DEST. */
9147 if (integer_zerop (len))
9148 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
9149
9150 /* We can't compare slen with len as constants below if len is not a
9151 constant. */
9152 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
9153 return NULL_TREE;
9154
9155 if (!slen)
9156 slen = c_strlen (src, 1);
9157
9158 /* Now, we must be passed a constant src ptr parameter. */
9159 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
9160 return NULL_TREE;
9161
9162 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
9163
9164 /* We do not support simplification of this case, though we do
9165 support it when expanding trees into RTL. */
9166 /* FIXME: generate a call to __builtin_memset. */
9167 if (tree_int_cst_lt (slen, len))
9168 return NULL_TREE;
9169
9170 /* OK transform into builtin memcpy. */
9171 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
9172 if (!fn)
9173 return NULL_TREE;
9174
9175 len = fold_convert_loc (loc, size_type_node, len);
9176 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
9177 build_call_expr_loc (loc, fn, 3, dest, src, len));
9178 }
9179
9180 /* Fold function call to builtin memchr. ARG1, ARG2 and LEN are the
9181 arguments to the call, and TYPE is its return type.
9182 Return NULL_TREE if no simplification can be made. */
9183
9184 static tree
fold_builtin_memchr(location_t loc,tree arg1,tree arg2,tree len,tree type)9185 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
9186 {
9187 if (!validate_arg (arg1, POINTER_TYPE)
9188 || !validate_arg (arg2, INTEGER_TYPE)
9189 || !validate_arg (len, INTEGER_TYPE))
9190 return NULL_TREE;
9191 else
9192 {
9193 const char *p1;
9194
9195 if (TREE_CODE (arg2) != INTEGER_CST
9196 || !tree_fits_uhwi_p (len))
9197 return NULL_TREE;
9198
9199 p1 = c_getstr (arg1);
9200 if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
9201 {
9202 char c;
9203 const char *r;
9204 tree tem;
9205
9206 if (target_char_cast (arg2, &c))
9207 return NULL_TREE;
9208
9209 r = (const char *) memchr (p1, c, tree_to_uhwi (len));
9210
9211 if (r == NULL)
9212 return build_int_cst (TREE_TYPE (arg1), 0);
9213
9214 tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
9215 return fold_convert_loc (loc, type, tem);
9216 }
9217 return NULL_TREE;
9218 }
9219 }
9220
9221 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9222 Return NULL_TREE if no simplification can be made. */
9223
9224 static tree
fold_builtin_memcmp(location_t loc,tree arg1,tree arg2,tree len)9225 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9226 {
9227 const char *p1, *p2;
9228
9229 if (!validate_arg (arg1, POINTER_TYPE)
9230 || !validate_arg (arg2, POINTER_TYPE)
9231 || !validate_arg (len, INTEGER_TYPE))
9232 return NULL_TREE;
9233
9234 /* If the LEN parameter is zero, return zero. */
9235 if (integer_zerop (len))
9236 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9237 arg1, arg2);
9238
9239 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9240 if (operand_equal_p (arg1, arg2, 0))
9241 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9242
9243 p1 = c_getstr (arg1);
9244 p2 = c_getstr (arg2);
9245
9246 /* If all arguments are constant, and the value of len is not greater
9247 than the lengths of arg1 and arg2, evaluate at compile-time. */
9248 if (tree_fits_uhwi_p (len) && p1 && p2
9249 && compare_tree_int (len, strlen (p1) + 1) <= 0
9250 && compare_tree_int (len, strlen (p2) + 1) <= 0)
9251 {
9252 const int r = memcmp (p1, p2, tree_to_uhwi (len));
9253
9254 if (r > 0)
9255 return integer_one_node;
9256 else if (r < 0)
9257 return integer_minus_one_node;
9258 else
9259 return integer_zero_node;
9260 }
9261
9262 /* If len parameter is one, return an expression corresponding to
9263 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9264 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9265 {
9266 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9267 tree cst_uchar_ptr_node
9268 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9269
9270 tree ind1
9271 = fold_convert_loc (loc, integer_type_node,
9272 build1 (INDIRECT_REF, cst_uchar_node,
9273 fold_convert_loc (loc,
9274 cst_uchar_ptr_node,
9275 arg1)));
9276 tree ind2
9277 = fold_convert_loc (loc, integer_type_node,
9278 build1 (INDIRECT_REF, cst_uchar_node,
9279 fold_convert_loc (loc,
9280 cst_uchar_ptr_node,
9281 arg2)));
9282 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9283 }
9284
9285 return NULL_TREE;
9286 }
9287
9288 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
9289 Return NULL_TREE if no simplification can be made. */
9290
9291 static tree
fold_builtin_strcmp(location_t loc,tree arg1,tree arg2)9292 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
9293 {
9294 const char *p1, *p2;
9295
9296 if (!validate_arg (arg1, POINTER_TYPE)
9297 || !validate_arg (arg2, POINTER_TYPE))
9298 return NULL_TREE;
9299
9300 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9301 if (operand_equal_p (arg1, arg2, 0))
9302 return integer_zero_node;
9303
9304 p1 = c_getstr (arg1);
9305 p2 = c_getstr (arg2);
9306
9307 if (p1 && p2)
9308 {
9309 const int i = strcmp (p1, p2);
9310 if (i < 0)
9311 return integer_minus_one_node;
9312 else if (i > 0)
9313 return integer_one_node;
9314 else
9315 return integer_zero_node;
9316 }
9317
9318 /* If the second arg is "", return *(const unsigned char*)arg1. */
9319 if (p2 && *p2 == '\0')
9320 {
9321 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9322 tree cst_uchar_ptr_node
9323 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9324
9325 return fold_convert_loc (loc, integer_type_node,
9326 build1 (INDIRECT_REF, cst_uchar_node,
9327 fold_convert_loc (loc,
9328 cst_uchar_ptr_node,
9329 arg1)));
9330 }
9331
9332 /* If the first arg is "", return -*(const unsigned char*)arg2. */
9333 if (p1 && *p1 == '\0')
9334 {
9335 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9336 tree cst_uchar_ptr_node
9337 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9338
9339 tree temp
9340 = fold_convert_loc (loc, integer_type_node,
9341 build1 (INDIRECT_REF, cst_uchar_node,
9342 fold_convert_loc (loc,
9343 cst_uchar_ptr_node,
9344 arg2)));
9345 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9346 }
9347
9348 return NULL_TREE;
9349 }
9350
9351 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
9352 Return NULL_TREE if no simplification can be made. */
9353
9354 static tree
fold_builtin_strncmp(location_t loc,tree arg1,tree arg2,tree len)9355 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
9356 {
9357 const char *p1, *p2;
9358
9359 if (!validate_arg (arg1, POINTER_TYPE)
9360 || !validate_arg (arg2, POINTER_TYPE)
9361 || !validate_arg (len, INTEGER_TYPE))
9362 return NULL_TREE;
9363
9364 /* If the LEN parameter is zero, return zero. */
9365 if (integer_zerop (len))
9366 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9367 arg1, arg2);
9368
9369 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9370 if (operand_equal_p (arg1, arg2, 0))
9371 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9372
9373 p1 = c_getstr (arg1);
9374 p2 = c_getstr (arg2);
9375
9376 if (tree_fits_uhwi_p (len) && p1 && p2)
9377 {
9378 const int i = strncmp (p1, p2, tree_to_uhwi (len));
9379 if (i > 0)
9380 return integer_one_node;
9381 else if (i < 0)
9382 return integer_minus_one_node;
9383 else
9384 return integer_zero_node;
9385 }
9386
9387 /* If the second arg is "", and the length is greater than zero,
9388 return *(const unsigned char*)arg1. */
9389 if (p2 && *p2 == '\0'
9390 && TREE_CODE (len) == INTEGER_CST
9391 && tree_int_cst_sgn (len) == 1)
9392 {
9393 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9394 tree cst_uchar_ptr_node
9395 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9396
9397 return fold_convert_loc (loc, integer_type_node,
9398 build1 (INDIRECT_REF, cst_uchar_node,
9399 fold_convert_loc (loc,
9400 cst_uchar_ptr_node,
9401 arg1)));
9402 }
9403
9404 /* If the first arg is "", and the length is greater than zero,
9405 return -*(const unsigned char*)arg2. */
9406 if (p1 && *p1 == '\0'
9407 && TREE_CODE (len) == INTEGER_CST
9408 && tree_int_cst_sgn (len) == 1)
9409 {
9410 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9411 tree cst_uchar_ptr_node
9412 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9413
9414 tree temp = fold_convert_loc (loc, integer_type_node,
9415 build1 (INDIRECT_REF, cst_uchar_node,
9416 fold_convert_loc (loc,
9417 cst_uchar_ptr_node,
9418 arg2)));
9419 return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
9420 }
9421
9422 /* If len parameter is one, return an expression corresponding to
9423 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9424 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9425 {
9426 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9427 tree cst_uchar_ptr_node
9428 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9429
9430 tree ind1 = fold_convert_loc (loc, integer_type_node,
9431 build1 (INDIRECT_REF, cst_uchar_node,
9432 fold_convert_loc (loc,
9433 cst_uchar_ptr_node,
9434 arg1)));
9435 tree ind2 = fold_convert_loc (loc, integer_type_node,
9436 build1 (INDIRECT_REF, cst_uchar_node,
9437 fold_convert_loc (loc,
9438 cst_uchar_ptr_node,
9439 arg2)));
9440 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9441 }
9442
9443 return NULL_TREE;
9444 }
9445
9446 /* Fold function call to builtin signbit, signbitf or signbitl with argument
9447 ARG. Return NULL_TREE if no simplification can be made. */
9448
9449 static tree
fold_builtin_signbit(location_t loc,tree arg,tree type)9450 fold_builtin_signbit (location_t loc, tree arg, tree type)
9451 {
9452 if (!validate_arg (arg, REAL_TYPE))
9453 return NULL_TREE;
9454
9455 /* If ARG is a compile-time constant, determine the result. */
9456 if (TREE_CODE (arg) == REAL_CST
9457 && !TREE_OVERFLOW (arg))
9458 {
9459 REAL_VALUE_TYPE c;
9460
9461 c = TREE_REAL_CST (arg);
9462 return (REAL_VALUE_NEGATIVE (c)
9463 ? build_one_cst (type)
9464 : build_zero_cst (type));
9465 }
9466
9467 /* If ARG is non-negative, the result is always zero. */
9468 if (tree_expr_nonnegative_p (arg))
9469 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9470
9471 /* If ARG's format doesn't have signed zeros, return "arg < 0.0". */
9472 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg))))
9473 return fold_convert (type,
9474 fold_build2_loc (loc, LT_EXPR, boolean_type_node, arg,
9475 build_real (TREE_TYPE (arg), dconst0)));
9476
9477 return NULL_TREE;
9478 }
9479
9480 /* Fold function call to builtin copysign, copysignf or copysignl with
9481 arguments ARG1 and ARG2. Return NULL_TREE if no simplification can
9482 be made. */
9483
9484 static tree
fold_builtin_copysign(location_t loc,tree fndecl,tree arg1,tree arg2,tree type)9485 fold_builtin_copysign (location_t loc, tree fndecl,
9486 tree arg1, tree arg2, tree type)
9487 {
9488 tree tem;
9489
9490 if (!validate_arg (arg1, REAL_TYPE)
9491 || !validate_arg (arg2, REAL_TYPE))
9492 return NULL_TREE;
9493
9494 /* copysign(X,X) is X. */
9495 if (operand_equal_p (arg1, arg2, 0))
9496 return fold_convert_loc (loc, type, arg1);
9497
9498 /* If ARG1 and ARG2 are compile-time constants, determine the result. */
9499 if (TREE_CODE (arg1) == REAL_CST
9500 && TREE_CODE (arg2) == REAL_CST
9501 && !TREE_OVERFLOW (arg1)
9502 && !TREE_OVERFLOW (arg2))
9503 {
9504 REAL_VALUE_TYPE c1, c2;
9505
9506 c1 = TREE_REAL_CST (arg1);
9507 c2 = TREE_REAL_CST (arg2);
9508 /* c1.sign := c2.sign. */
9509 real_copysign (&c1, &c2);
9510 return build_real (type, c1);
9511 }
9512
9513 /* copysign(X, Y) is fabs(X) when Y is always non-negative.
9514 Remember to evaluate Y for side-effects. */
9515 if (tree_expr_nonnegative_p (arg2))
9516 return omit_one_operand_loc (loc, type,
9517 fold_build1_loc (loc, ABS_EXPR, type, arg1),
9518 arg2);
9519
9520 /* Strip sign changing operations for the first argument. */
9521 tem = fold_strip_sign_ops (arg1);
9522 if (tem)
9523 return build_call_expr_loc (loc, fndecl, 2, tem, arg2);
9524
9525 return NULL_TREE;
9526 }
9527
9528 /* Fold a call to builtin isascii with argument ARG. */
9529
9530 static tree
fold_builtin_isascii(location_t loc,tree arg)9531 fold_builtin_isascii (location_t loc, tree arg)
9532 {
9533 if (!validate_arg (arg, INTEGER_TYPE))
9534 return NULL_TREE;
9535 else
9536 {
9537 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9538 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9539 build_int_cst (integer_type_node,
9540 ~ (unsigned HOST_WIDE_INT) 0x7f));
9541 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9542 arg, integer_zero_node);
9543 }
9544 }
9545
9546 /* Fold a call to builtin toascii with argument ARG. */
9547
9548 static tree
fold_builtin_toascii(location_t loc,tree arg)9549 fold_builtin_toascii (location_t loc, tree arg)
9550 {
9551 if (!validate_arg (arg, INTEGER_TYPE))
9552 return NULL_TREE;
9553
9554 /* Transform toascii(c) -> (c & 0x7f). */
9555 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9556 build_int_cst (integer_type_node, 0x7f));
9557 }
9558
9559 /* Fold a call to builtin isdigit with argument ARG. */
9560
9561 static tree
fold_builtin_isdigit(location_t loc,tree arg)9562 fold_builtin_isdigit (location_t loc, tree arg)
9563 {
9564 if (!validate_arg (arg, INTEGER_TYPE))
9565 return NULL_TREE;
9566 else
9567 {
9568 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9569 /* According to the C standard, isdigit is unaffected by locale.
9570 However, it definitely is affected by the target character set. */
9571 unsigned HOST_WIDE_INT target_digit0
9572 = lang_hooks.to_target_charset ('0');
9573
9574 if (target_digit0 == 0)
9575 return NULL_TREE;
9576
9577 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9578 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9579 build_int_cst (unsigned_type_node, target_digit0));
9580 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9581 build_int_cst (unsigned_type_node, 9));
9582 }
9583 }
9584
9585 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9586
9587 static tree
fold_builtin_fabs(location_t loc,tree arg,tree type)9588 fold_builtin_fabs (location_t loc, tree arg, tree type)
9589 {
9590 if (!validate_arg (arg, REAL_TYPE))
9591 return NULL_TREE;
9592
9593 arg = fold_convert_loc (loc, type, arg);
9594 if (TREE_CODE (arg) == REAL_CST)
9595 return fold_abs_const (arg, type);
9596 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9597 }
9598
9599 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9600
9601 static tree
fold_builtin_abs(location_t loc,tree arg,tree type)9602 fold_builtin_abs (location_t loc, tree arg, tree type)
9603 {
9604 if (!validate_arg (arg, INTEGER_TYPE))
9605 return NULL_TREE;
9606
9607 arg = fold_convert_loc (loc, type, arg);
9608 if (TREE_CODE (arg) == INTEGER_CST)
9609 return fold_abs_const (arg, type);
9610 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9611 }
9612
9613 /* Fold a fma operation with arguments ARG[012]. */
9614
9615 tree
fold_fma(location_t loc ATTRIBUTE_UNUSED,tree type,tree arg0,tree arg1,tree arg2)9616 fold_fma (location_t loc ATTRIBUTE_UNUSED,
9617 tree type, tree arg0, tree arg1, tree arg2)
9618 {
9619 if (TREE_CODE (arg0) == REAL_CST
9620 && TREE_CODE (arg1) == REAL_CST
9621 && TREE_CODE (arg2) == REAL_CST)
9622 return do_mpfr_arg3 (arg0, arg1, arg2, type, mpfr_fma);
9623
9624 return NULL_TREE;
9625 }
9626
9627 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
9628
9629 static tree
fold_builtin_fma(location_t loc,tree arg0,tree arg1,tree arg2,tree type)9630 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
9631 {
9632 if (validate_arg (arg0, REAL_TYPE)
9633 && validate_arg (arg1, REAL_TYPE)
9634 && validate_arg (arg2, REAL_TYPE))
9635 {
9636 tree tem = fold_fma (loc, type, arg0, arg1, arg2);
9637 if (tem)
9638 return tem;
9639
9640 /* ??? Only expand to FMA_EXPR if it's directly supported. */
9641 if (optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
9642 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
9643 }
9644 return NULL_TREE;
9645 }
9646
9647 /* Fold a call to builtin fmin or fmax. */
9648
9649 static tree
fold_builtin_fmin_fmax(location_t loc,tree arg0,tree arg1,tree type,bool max)9650 fold_builtin_fmin_fmax (location_t loc, tree arg0, tree arg1,
9651 tree type, bool max)
9652 {
9653 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, REAL_TYPE))
9654 {
9655 /* Calculate the result when the argument is a constant. */
9656 tree res = do_mpfr_arg2 (arg0, arg1, type, (max ? mpfr_max : mpfr_min));
9657
9658 if (res)
9659 return res;
9660
9661 /* If either argument is NaN, return the other one. Avoid the
9662 transformation if we get (and honor) a signalling NaN. Using
9663 omit_one_operand() ensures we create a non-lvalue. */
9664 if (TREE_CODE (arg0) == REAL_CST
9665 && real_isnan (&TREE_REAL_CST (arg0))
9666 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9667 || ! TREE_REAL_CST (arg0).signalling))
9668 return omit_one_operand_loc (loc, type, arg1, arg0);
9669 if (TREE_CODE (arg1) == REAL_CST
9670 && real_isnan (&TREE_REAL_CST (arg1))
9671 && (! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
9672 || ! TREE_REAL_CST (arg1).signalling))
9673 return omit_one_operand_loc (loc, type, arg0, arg1);
9674
9675 /* Transform fmin/fmax(x,x) -> x. */
9676 if (operand_equal_p (arg0, arg1, OEP_PURE_SAME))
9677 return omit_one_operand_loc (loc, type, arg0, arg1);
9678
9679 /* Convert fmin/fmax to MIN_EXPR/MAX_EXPR. C99 requires these
9680 functions to return the numeric arg if the other one is NaN.
9681 These tree codes don't honor that, so only transform if
9682 -ffinite-math-only is set. C99 doesn't require -0.0 to be
9683 handled, so we don't have to worry about it either. */
9684 if (flag_finite_math_only)
9685 return fold_build2_loc (loc, (max ? MAX_EXPR : MIN_EXPR), type,
9686 fold_convert_loc (loc, type, arg0),
9687 fold_convert_loc (loc, type, arg1));
9688 }
9689 return NULL_TREE;
9690 }
9691
9692 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9693
9694 static tree
fold_builtin_carg(location_t loc,tree arg,tree type)9695 fold_builtin_carg (location_t loc, tree arg, tree type)
9696 {
9697 if (validate_arg (arg, COMPLEX_TYPE)
9698 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9699 {
9700 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9701
9702 if (atan2_fn)
9703 {
9704 tree new_arg = builtin_save_expr (arg);
9705 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9706 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9707 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9708 }
9709 }
9710
9711 return NULL_TREE;
9712 }
9713
9714 /* Fold a call to builtin logb/ilogb. */
9715
9716 static tree
fold_builtin_logb(location_t loc,tree arg,tree rettype)9717 fold_builtin_logb (location_t loc, tree arg, tree rettype)
9718 {
9719 if (! validate_arg (arg, REAL_TYPE))
9720 return NULL_TREE;
9721
9722 STRIP_NOPS (arg);
9723
9724 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9725 {
9726 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9727
9728 switch (value->cl)
9729 {
9730 case rvc_nan:
9731 case rvc_inf:
9732 /* If arg is Inf or NaN and we're logb, return it. */
9733 if (TREE_CODE (rettype) == REAL_TYPE)
9734 {
9735 /* For logb(-Inf) we have to return +Inf. */
9736 if (real_isinf (value) && real_isneg (value))
9737 {
9738 REAL_VALUE_TYPE tem;
9739 real_inf (&tem);
9740 return build_real (rettype, tem);
9741 }
9742 return fold_convert_loc (loc, rettype, arg);
9743 }
9744 /* Fall through... */
9745 case rvc_zero:
9746 /* Zero may set errno and/or raise an exception for logb, also
9747 for ilogb we don't know FP_ILOGB0. */
9748 return NULL_TREE;
9749 case rvc_normal:
9750 /* For normal numbers, proceed iff radix == 2. In GCC,
9751 normalized significands are in the range [0.5, 1.0). We
9752 want the exponent as if they were [1.0, 2.0) so get the
9753 exponent and subtract 1. */
9754 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9755 return fold_convert_loc (loc, rettype,
9756 build_int_cst (integer_type_node,
9757 REAL_EXP (value)-1));
9758 break;
9759 }
9760 }
9761
9762 return NULL_TREE;
9763 }
9764
9765 /* Fold a call to builtin significand, if radix == 2. */
9766
9767 static tree
fold_builtin_significand(location_t loc,tree arg,tree rettype)9768 fold_builtin_significand (location_t loc, tree arg, tree rettype)
9769 {
9770 if (! validate_arg (arg, REAL_TYPE))
9771 return NULL_TREE;
9772
9773 STRIP_NOPS (arg);
9774
9775 if (TREE_CODE (arg) == REAL_CST && ! TREE_OVERFLOW (arg))
9776 {
9777 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg);
9778
9779 switch (value->cl)
9780 {
9781 case rvc_zero:
9782 case rvc_nan:
9783 case rvc_inf:
9784 /* If arg is +-0, +-Inf or +-NaN, then return it. */
9785 return fold_convert_loc (loc, rettype, arg);
9786 case rvc_normal:
9787 /* For normal numbers, proceed iff radix == 2. */
9788 if (REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (arg)))->b == 2)
9789 {
9790 REAL_VALUE_TYPE result = *value;
9791 /* In GCC, normalized significands are in the range [0.5,
9792 1.0). We want them to be [1.0, 2.0) so set the
9793 exponent to 1. */
9794 SET_REAL_EXP (&result, 1);
9795 return build_real (rettype, result);
9796 }
9797 break;
9798 }
9799 }
9800
9801 return NULL_TREE;
9802 }
9803
9804 /* Fold a call to builtin frexp, we can assume the base is 2. */
9805
9806 static tree
fold_builtin_frexp(location_t loc,tree arg0,tree arg1,tree rettype)9807 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9808 {
9809 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9810 return NULL_TREE;
9811
9812 STRIP_NOPS (arg0);
9813
9814 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9815 return NULL_TREE;
9816
9817 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9818
9819 /* Proceed if a valid pointer type was passed in. */
9820 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9821 {
9822 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9823 tree frac, exp;
9824
9825 switch (value->cl)
9826 {
9827 case rvc_zero:
9828 /* For +-0, return (*exp = 0, +-0). */
9829 exp = integer_zero_node;
9830 frac = arg0;
9831 break;
9832 case rvc_nan:
9833 case rvc_inf:
9834 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9835 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9836 case rvc_normal:
9837 {
9838 /* Since the frexp function always expects base 2, and in
9839 GCC normalized significands are already in the range
9840 [0.5, 1.0), we have exactly what frexp wants. */
9841 REAL_VALUE_TYPE frac_rvt = *value;
9842 SET_REAL_EXP (&frac_rvt, 0);
9843 frac = build_real (rettype, frac_rvt);
9844 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9845 }
9846 break;
9847 default:
9848 gcc_unreachable ();
9849 }
9850
9851 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9852 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9853 TREE_SIDE_EFFECTS (arg1) = 1;
9854 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9855 }
9856
9857 return NULL_TREE;
9858 }
9859
9860 /* Fold a call to builtin ldexp or scalbn/scalbln. If LDEXP is true
9861 then we can assume the base is two. If it's false, then we have to
9862 check the mode of the TYPE parameter in certain cases. */
9863
9864 static tree
fold_builtin_load_exponent(location_t loc,tree arg0,tree arg1,tree type,bool ldexp)9865 fold_builtin_load_exponent (location_t loc, tree arg0, tree arg1,
9866 tree type, bool ldexp)
9867 {
9868 if (validate_arg (arg0, REAL_TYPE) && validate_arg (arg1, INTEGER_TYPE))
9869 {
9870 STRIP_NOPS (arg0);
9871 STRIP_NOPS (arg1);
9872
9873 /* If arg0 is 0, Inf or NaN, or if arg1 is 0, then return arg0. */
9874 if (real_zerop (arg0) || integer_zerop (arg1)
9875 || (TREE_CODE (arg0) == REAL_CST
9876 && !real_isfinite (&TREE_REAL_CST (arg0))))
9877 return omit_one_operand_loc (loc, type, arg0, arg1);
9878
9879 /* If both arguments are constant, then try to evaluate it. */
9880 if ((ldexp || REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2)
9881 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9882 && tree_fits_shwi_p (arg1))
9883 {
9884 /* Bound the maximum adjustment to twice the range of the
9885 mode's valid exponents. Use abs to ensure the range is
9886 positive as a sanity check. */
9887 const long max_exp_adj = 2 *
9888 labs (REAL_MODE_FORMAT (TYPE_MODE (type))->emax
9889 - REAL_MODE_FORMAT (TYPE_MODE (type))->emin);
9890
9891 /* Get the user-requested adjustment. */
9892 const HOST_WIDE_INT req_exp_adj = tree_to_shwi (arg1);
9893
9894 /* The requested adjustment must be inside this range. This
9895 is a preliminary cap to avoid things like overflow, we
9896 may still fail to compute the result for other reasons. */
9897 if (-max_exp_adj < req_exp_adj && req_exp_adj < max_exp_adj)
9898 {
9899 REAL_VALUE_TYPE initial_result;
9900
9901 real_ldexp (&initial_result, &TREE_REAL_CST (arg0), req_exp_adj);
9902
9903 /* Ensure we didn't overflow. */
9904 if (! real_isinf (&initial_result))
9905 {
9906 const REAL_VALUE_TYPE trunc_result
9907 = real_value_truncate (TYPE_MODE (type), initial_result);
9908
9909 /* Only proceed if the target mode can hold the
9910 resulting value. */
9911 if (REAL_VALUES_EQUAL (initial_result, trunc_result))
9912 return build_real (type, trunc_result);
9913 }
9914 }
9915 }
9916 }
9917
9918 return NULL_TREE;
9919 }
9920
9921 /* Fold a call to builtin modf. */
9922
9923 static tree
fold_builtin_modf(location_t loc,tree arg0,tree arg1,tree rettype)9924 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9925 {
9926 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9927 return NULL_TREE;
9928
9929 STRIP_NOPS (arg0);
9930
9931 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9932 return NULL_TREE;
9933
9934 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9935
9936 /* Proceed if a valid pointer type was passed in. */
9937 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9938 {
9939 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9940 REAL_VALUE_TYPE trunc, frac;
9941
9942 switch (value->cl)
9943 {
9944 case rvc_nan:
9945 case rvc_zero:
9946 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9947 trunc = frac = *value;
9948 break;
9949 case rvc_inf:
9950 /* For +-Inf, return (*arg1 = arg0, +-0). */
9951 frac = dconst0;
9952 frac.sign = value->sign;
9953 trunc = *value;
9954 break;
9955 case rvc_normal:
9956 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9957 real_trunc (&trunc, VOIDmode, value);
9958 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9959 /* If the original number was negative and already
9960 integral, then the fractional part is -0.0. */
9961 if (value->sign && frac.cl == rvc_zero)
9962 frac.sign = value->sign;
9963 break;
9964 }
9965
9966 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9967 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9968 build_real (rettype, trunc));
9969 TREE_SIDE_EFFECTS (arg1) = 1;
9970 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9971 build_real (rettype, frac));
9972 }
9973
9974 return NULL_TREE;
9975 }
9976
9977 /* Given a location LOC, an interclass builtin function decl FNDECL
9978 and its single argument ARG, return an folded expression computing
9979 the same, or NULL_TREE if we either couldn't or didn't want to fold
9980 (the latter happen if there's an RTL instruction available). */
9981
9982 static tree
fold_builtin_interclass_mathfn(location_t loc,tree fndecl,tree arg)9983 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9984 {
9985 enum machine_mode mode;
9986
9987 if (!validate_arg (arg, REAL_TYPE))
9988 return NULL_TREE;
9989
9990 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9991 return NULL_TREE;
9992
9993 mode = TYPE_MODE (TREE_TYPE (arg));
9994
9995 /* If there is no optab, try generic code. */
9996 switch (DECL_FUNCTION_CODE (fndecl))
9997 {
9998 tree result;
9999
10000 CASE_FLT_FN (BUILT_IN_ISINF):
10001 {
10002 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
10003 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
10004 tree const type = TREE_TYPE (arg);
10005 REAL_VALUE_TYPE r;
10006 char buf[128];
10007
10008 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10009 real_from_string (&r, buf);
10010 result = build_call_expr (isgr_fn, 2,
10011 fold_build1_loc (loc, ABS_EXPR, type, arg),
10012 build_real (type, r));
10013 return result;
10014 }
10015 CASE_FLT_FN (BUILT_IN_FINITE):
10016 case BUILT_IN_ISFINITE:
10017 {
10018 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
10019 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10020 tree const type = TREE_TYPE (arg);
10021 REAL_VALUE_TYPE r;
10022 char buf[128];
10023
10024 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10025 real_from_string (&r, buf);
10026 result = build_call_expr (isle_fn, 2,
10027 fold_build1_loc (loc, ABS_EXPR, type, arg),
10028 build_real (type, r));
10029 /*result = fold_build2_loc (loc, UNGT_EXPR,
10030 TREE_TYPE (TREE_TYPE (fndecl)),
10031 fold_build1_loc (loc, ABS_EXPR, type, arg),
10032 build_real (type, r));
10033 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
10034 TREE_TYPE (TREE_TYPE (fndecl)),
10035 result);*/
10036 return result;
10037 }
10038 case BUILT_IN_ISNORMAL:
10039 {
10040 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
10041 islessequal(fabs(x),DBL_MAX). */
10042 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
10043 tree const isge_fn = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
10044 tree const type = TREE_TYPE (arg);
10045 REAL_VALUE_TYPE rmax, rmin;
10046 char buf[128];
10047
10048 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
10049 real_from_string (&rmax, buf);
10050 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10051 real_from_string (&rmin, buf);
10052 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10053 result = build_call_expr (isle_fn, 2, arg,
10054 build_real (type, rmax));
10055 result = fold_build2 (BIT_AND_EXPR, integer_type_node, result,
10056 build_call_expr (isge_fn, 2, arg,
10057 build_real (type, rmin)));
10058 return result;
10059 }
10060 default:
10061 break;
10062 }
10063
10064 return NULL_TREE;
10065 }
10066
10067 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
10068 ARG is the argument for the call. */
10069
10070 static tree
fold_builtin_classify(location_t loc,tree fndecl,tree arg,int builtin_index)10071 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
10072 {
10073 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10074 REAL_VALUE_TYPE r;
10075
10076 if (!validate_arg (arg, REAL_TYPE))
10077 return NULL_TREE;
10078
10079 switch (builtin_index)
10080 {
10081 case BUILT_IN_ISINF:
10082 if (!HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10083 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10084
10085 if (TREE_CODE (arg) == REAL_CST)
10086 {
10087 r = TREE_REAL_CST (arg);
10088 if (real_isinf (&r))
10089 return real_compare (GT_EXPR, &r, &dconst0)
10090 ? integer_one_node : integer_minus_one_node;
10091 else
10092 return integer_zero_node;
10093 }
10094
10095 return NULL_TREE;
10096
10097 case BUILT_IN_ISINF_SIGN:
10098 {
10099 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
10100 /* In a boolean context, GCC will fold the inner COND_EXPR to
10101 1. So e.g. "if (isinf_sign(x))" would be folded to just
10102 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
10103 tree signbit_fn = mathfn_built_in_1 (TREE_TYPE (arg), BUILT_IN_SIGNBIT, 0);
10104 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
10105 tree tmp = NULL_TREE;
10106
10107 arg = builtin_save_expr (arg);
10108
10109 if (signbit_fn && isinf_fn)
10110 {
10111 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
10112 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
10113
10114 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10115 signbit_call, integer_zero_node);
10116 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
10117 isinf_call, integer_zero_node);
10118
10119 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
10120 integer_minus_one_node, integer_one_node);
10121 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10122 isinf_call, tmp,
10123 integer_zero_node);
10124 }
10125
10126 return tmp;
10127 }
10128
10129 case BUILT_IN_ISFINITE:
10130 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg)))
10131 && !HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg))))
10132 return omit_one_operand_loc (loc, type, integer_one_node, arg);
10133
10134 if (TREE_CODE (arg) == REAL_CST)
10135 {
10136 r = TREE_REAL_CST (arg);
10137 return real_isfinite (&r) ? integer_one_node : integer_zero_node;
10138 }
10139
10140 return NULL_TREE;
10141
10142 case BUILT_IN_ISNAN:
10143 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg))))
10144 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
10145
10146 if (TREE_CODE (arg) == REAL_CST)
10147 {
10148 r = TREE_REAL_CST (arg);
10149 return real_isnan (&r) ? integer_one_node : integer_zero_node;
10150 }
10151
10152 arg = builtin_save_expr (arg);
10153 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
10154
10155 default:
10156 gcc_unreachable ();
10157 }
10158 }
10159
10160 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
10161 This builtin will generate code to return the appropriate floating
10162 point classification depending on the value of the floating point
10163 number passed in. The possible return values must be supplied as
10164 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
10165 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
10166 one floating point argument which is "type generic". */
10167
10168 static tree
fold_builtin_fpclassify(location_t loc,tree exp)10169 fold_builtin_fpclassify (location_t loc, tree exp)
10170 {
10171 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
10172 arg, type, res, tmp;
10173 enum machine_mode mode;
10174 REAL_VALUE_TYPE r;
10175 char buf[128];
10176
10177 /* Verify the required arguments in the original call. */
10178 if (!validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE,
10179 INTEGER_TYPE, INTEGER_TYPE,
10180 INTEGER_TYPE, REAL_TYPE, VOID_TYPE))
10181 return NULL_TREE;
10182
10183 fp_nan = CALL_EXPR_ARG (exp, 0);
10184 fp_infinite = CALL_EXPR_ARG (exp, 1);
10185 fp_normal = CALL_EXPR_ARG (exp, 2);
10186 fp_subnormal = CALL_EXPR_ARG (exp, 3);
10187 fp_zero = CALL_EXPR_ARG (exp, 4);
10188 arg = CALL_EXPR_ARG (exp, 5);
10189 type = TREE_TYPE (arg);
10190 mode = TYPE_MODE (type);
10191 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
10192
10193 /* fpclassify(x) ->
10194 isnan(x) ? FP_NAN :
10195 (fabs(x) == Inf ? FP_INFINITE :
10196 (fabs(x) >= DBL_MIN ? FP_NORMAL :
10197 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
10198
10199 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10200 build_real (type, dconst0));
10201 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
10202 tmp, fp_zero, fp_subnormal);
10203
10204 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
10205 real_from_string (&r, buf);
10206 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
10207 arg, build_real (type, r));
10208 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
10209
10210 if (HONOR_INFINITIES (mode))
10211 {
10212 real_inf (&r);
10213 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
10214 build_real (type, r));
10215 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
10216 fp_infinite, res);
10217 }
10218
10219 if (HONOR_NANS (mode))
10220 {
10221 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
10222 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
10223 }
10224
10225 return res;
10226 }
10227
10228 /* Fold a call to an unordered comparison function such as
10229 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
10230 being called and ARG0 and ARG1 are the arguments for the call.
10231 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
10232 the opposite of the desired result. UNORDERED_CODE is used
10233 for modes that can hold NaNs and ORDERED_CODE is used for
10234 the rest. */
10235
10236 static tree
fold_builtin_unordered_cmp(location_t loc,tree fndecl,tree arg0,tree arg1,enum tree_code unordered_code,enum tree_code ordered_code)10237 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
10238 enum tree_code unordered_code,
10239 enum tree_code ordered_code)
10240 {
10241 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10242 enum tree_code code;
10243 tree type0, type1;
10244 enum tree_code code0, code1;
10245 tree cmp_type = NULL_TREE;
10246
10247 type0 = TREE_TYPE (arg0);
10248 type1 = TREE_TYPE (arg1);
10249
10250 code0 = TREE_CODE (type0);
10251 code1 = TREE_CODE (type1);
10252
10253 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
10254 /* Choose the wider of two real types. */
10255 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
10256 ? type0 : type1;
10257 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
10258 cmp_type = type0;
10259 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
10260 cmp_type = type1;
10261
10262 arg0 = fold_convert_loc (loc, cmp_type, arg0);
10263 arg1 = fold_convert_loc (loc, cmp_type, arg1);
10264
10265 if (unordered_code == UNORDERED_EXPR)
10266 {
10267 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
10268 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
10269 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
10270 }
10271
10272 code = HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))) ? unordered_code
10273 : ordered_code;
10274 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
10275 fold_build2_loc (loc, code, type, arg0, arg1));
10276 }
10277
10278 /* Fold a call to built-in function FNDECL with 0 arguments.
10279 IGNORE is true if the result of the function call is ignored. This
10280 function returns NULL_TREE if no simplification was possible. */
10281
10282 static tree
fold_builtin_0(location_t loc,tree fndecl,bool ignore ATTRIBUTE_UNUSED)10283 fold_builtin_0 (location_t loc, tree fndecl, bool ignore ATTRIBUTE_UNUSED)
10284 {
10285 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10286 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10287 switch (fcode)
10288 {
10289 CASE_FLT_FN (BUILT_IN_INF):
10290 case BUILT_IN_INFD32:
10291 case BUILT_IN_INFD64:
10292 case BUILT_IN_INFD128:
10293 return fold_builtin_inf (loc, type, true);
10294
10295 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10296 return fold_builtin_inf (loc, type, false);
10297
10298 case BUILT_IN_CLASSIFY_TYPE:
10299 return fold_builtin_classify_type (NULL_TREE);
10300
10301 case BUILT_IN_UNREACHABLE:
10302 if (flag_sanitize & SANITIZE_UNREACHABLE
10303 && (current_function_decl == NULL
10304 || !lookup_attribute ("no_sanitize_undefined",
10305 DECL_ATTRIBUTES (current_function_decl))))
10306 return ubsan_instrument_unreachable (loc);
10307 break;
10308
10309 default:
10310 break;
10311 }
10312 return NULL_TREE;
10313 }
10314
10315 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10316 IGNORE is true if the result of the function call is ignored. This
10317 function returns NULL_TREE if no simplification was possible. */
10318
10319 static tree
fold_builtin_1(location_t loc,tree fndecl,tree arg0,bool ignore)10320 fold_builtin_1 (location_t loc, tree fndecl, tree arg0, bool ignore)
10321 {
10322 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10323 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10324 switch (fcode)
10325 {
10326 case BUILT_IN_CONSTANT_P:
10327 {
10328 tree val = fold_builtin_constant_p (arg0);
10329
10330 /* Gimplification will pull the CALL_EXPR for the builtin out of
10331 an if condition. When not optimizing, we'll not CSE it back.
10332 To avoid link error types of regressions, return false now. */
10333 if (!val && !optimize)
10334 val = integer_zero_node;
10335
10336 return val;
10337 }
10338
10339 case BUILT_IN_CLASSIFY_TYPE:
10340 return fold_builtin_classify_type (arg0);
10341
10342 case BUILT_IN_STRLEN:
10343 return fold_builtin_strlen (loc, type, arg0);
10344
10345 CASE_FLT_FN (BUILT_IN_FABS):
10346 case BUILT_IN_FABSD32:
10347 case BUILT_IN_FABSD64:
10348 case BUILT_IN_FABSD128:
10349 return fold_builtin_fabs (loc, arg0, type);
10350
10351 case BUILT_IN_ABS:
10352 case BUILT_IN_LABS:
10353 case BUILT_IN_LLABS:
10354 case BUILT_IN_IMAXABS:
10355 return fold_builtin_abs (loc, arg0, type);
10356
10357 CASE_FLT_FN (BUILT_IN_CONJ):
10358 if (validate_arg (arg0, COMPLEX_TYPE)
10359 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10360 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10361 break;
10362
10363 CASE_FLT_FN (BUILT_IN_CREAL):
10364 if (validate_arg (arg0, COMPLEX_TYPE)
10365 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10366 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));;
10367 break;
10368
10369 CASE_FLT_FN (BUILT_IN_CIMAG):
10370 if (validate_arg (arg0, COMPLEX_TYPE)
10371 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10372 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10373 break;
10374
10375 CASE_FLT_FN (BUILT_IN_CCOS):
10376 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ false);
10377
10378 CASE_FLT_FN (BUILT_IN_CCOSH):
10379 return fold_builtin_ccos (loc, arg0, type, fndecl, /*hyper=*/ true);
10380
10381 CASE_FLT_FN (BUILT_IN_CPROJ):
10382 return fold_builtin_cproj (loc, arg0, type);
10383
10384 CASE_FLT_FN (BUILT_IN_CSIN):
10385 if (validate_arg (arg0, COMPLEX_TYPE)
10386 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10387 return do_mpc_arg1 (arg0, type, mpc_sin);
10388 break;
10389
10390 CASE_FLT_FN (BUILT_IN_CSINH):
10391 if (validate_arg (arg0, COMPLEX_TYPE)
10392 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10393 return do_mpc_arg1 (arg0, type, mpc_sinh);
10394 break;
10395
10396 CASE_FLT_FN (BUILT_IN_CTAN):
10397 if (validate_arg (arg0, COMPLEX_TYPE)
10398 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10399 return do_mpc_arg1 (arg0, type, mpc_tan);
10400 break;
10401
10402 CASE_FLT_FN (BUILT_IN_CTANH):
10403 if (validate_arg (arg0, COMPLEX_TYPE)
10404 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10405 return do_mpc_arg1 (arg0, type, mpc_tanh);
10406 break;
10407
10408 CASE_FLT_FN (BUILT_IN_CLOG):
10409 if (validate_arg (arg0, COMPLEX_TYPE)
10410 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10411 return do_mpc_arg1 (arg0, type, mpc_log);
10412 break;
10413
10414 CASE_FLT_FN (BUILT_IN_CSQRT):
10415 if (validate_arg (arg0, COMPLEX_TYPE)
10416 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10417 return do_mpc_arg1 (arg0, type, mpc_sqrt);
10418 break;
10419
10420 CASE_FLT_FN (BUILT_IN_CASIN):
10421 if (validate_arg (arg0, COMPLEX_TYPE)
10422 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10423 return do_mpc_arg1 (arg0, type, mpc_asin);
10424 break;
10425
10426 CASE_FLT_FN (BUILT_IN_CACOS):
10427 if (validate_arg (arg0, COMPLEX_TYPE)
10428 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10429 return do_mpc_arg1 (arg0, type, mpc_acos);
10430 break;
10431
10432 CASE_FLT_FN (BUILT_IN_CATAN):
10433 if (validate_arg (arg0, COMPLEX_TYPE)
10434 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10435 return do_mpc_arg1 (arg0, type, mpc_atan);
10436 break;
10437
10438 CASE_FLT_FN (BUILT_IN_CASINH):
10439 if (validate_arg (arg0, COMPLEX_TYPE)
10440 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10441 return do_mpc_arg1 (arg0, type, mpc_asinh);
10442 break;
10443
10444 CASE_FLT_FN (BUILT_IN_CACOSH):
10445 if (validate_arg (arg0, COMPLEX_TYPE)
10446 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10447 return do_mpc_arg1 (arg0, type, mpc_acosh);
10448 break;
10449
10450 CASE_FLT_FN (BUILT_IN_CATANH):
10451 if (validate_arg (arg0, COMPLEX_TYPE)
10452 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10453 return do_mpc_arg1 (arg0, type, mpc_atanh);
10454 break;
10455
10456 CASE_FLT_FN (BUILT_IN_CABS):
10457 return fold_builtin_cabs (loc, arg0, type, fndecl);
10458
10459 CASE_FLT_FN (BUILT_IN_CARG):
10460 return fold_builtin_carg (loc, arg0, type);
10461
10462 CASE_FLT_FN (BUILT_IN_SQRT):
10463 return fold_builtin_sqrt (loc, arg0, type);
10464
10465 CASE_FLT_FN (BUILT_IN_CBRT):
10466 return fold_builtin_cbrt (loc, arg0, type);
10467
10468 CASE_FLT_FN (BUILT_IN_ASIN):
10469 if (validate_arg (arg0, REAL_TYPE))
10470 return do_mpfr_arg1 (arg0, type, mpfr_asin,
10471 &dconstm1, &dconst1, true);
10472 break;
10473
10474 CASE_FLT_FN (BUILT_IN_ACOS):
10475 if (validate_arg (arg0, REAL_TYPE))
10476 return do_mpfr_arg1 (arg0, type, mpfr_acos,
10477 &dconstm1, &dconst1, true);
10478 break;
10479
10480 CASE_FLT_FN (BUILT_IN_ATAN):
10481 if (validate_arg (arg0, REAL_TYPE))
10482 return do_mpfr_arg1 (arg0, type, mpfr_atan, NULL, NULL, 0);
10483 break;
10484
10485 CASE_FLT_FN (BUILT_IN_ASINH):
10486 if (validate_arg (arg0, REAL_TYPE))
10487 return do_mpfr_arg1 (arg0, type, mpfr_asinh, NULL, NULL, 0);
10488 break;
10489
10490 CASE_FLT_FN (BUILT_IN_ACOSH):
10491 if (validate_arg (arg0, REAL_TYPE))
10492 return do_mpfr_arg1 (arg0, type, mpfr_acosh,
10493 &dconst1, NULL, true);
10494 break;
10495
10496 CASE_FLT_FN (BUILT_IN_ATANH):
10497 if (validate_arg (arg0, REAL_TYPE))
10498 return do_mpfr_arg1 (arg0, type, mpfr_atanh,
10499 &dconstm1, &dconst1, false);
10500 break;
10501
10502 CASE_FLT_FN (BUILT_IN_SIN):
10503 if (validate_arg (arg0, REAL_TYPE))
10504 return do_mpfr_arg1 (arg0, type, mpfr_sin, NULL, NULL, 0);
10505 break;
10506
10507 CASE_FLT_FN (BUILT_IN_COS):
10508 return fold_builtin_cos (loc, arg0, type, fndecl);
10509
10510 CASE_FLT_FN (BUILT_IN_TAN):
10511 return fold_builtin_tan (arg0, type);
10512
10513 CASE_FLT_FN (BUILT_IN_CEXP):
10514 return fold_builtin_cexp (loc, arg0, type);
10515
10516 CASE_FLT_FN (BUILT_IN_CEXPI):
10517 if (validate_arg (arg0, REAL_TYPE))
10518 return do_mpfr_sincos (arg0, NULL_TREE, NULL_TREE);
10519 break;
10520
10521 CASE_FLT_FN (BUILT_IN_SINH):
10522 if (validate_arg (arg0, REAL_TYPE))
10523 return do_mpfr_arg1 (arg0, type, mpfr_sinh, NULL, NULL, 0);
10524 break;
10525
10526 CASE_FLT_FN (BUILT_IN_COSH):
10527 return fold_builtin_cosh (loc, arg0, type, fndecl);
10528
10529 CASE_FLT_FN (BUILT_IN_TANH):
10530 if (validate_arg (arg0, REAL_TYPE))
10531 return do_mpfr_arg1 (arg0, type, mpfr_tanh, NULL, NULL, 0);
10532 break;
10533
10534 CASE_FLT_FN (BUILT_IN_ERF):
10535 if (validate_arg (arg0, REAL_TYPE))
10536 return do_mpfr_arg1 (arg0, type, mpfr_erf, NULL, NULL, 0);
10537 break;
10538
10539 CASE_FLT_FN (BUILT_IN_ERFC):
10540 if (validate_arg (arg0, REAL_TYPE))
10541 return do_mpfr_arg1 (arg0, type, mpfr_erfc, NULL, NULL, 0);
10542 break;
10543
10544 CASE_FLT_FN (BUILT_IN_TGAMMA):
10545 if (validate_arg (arg0, REAL_TYPE))
10546 return do_mpfr_arg1 (arg0, type, mpfr_gamma, NULL, NULL, 0);
10547 break;
10548
10549 CASE_FLT_FN (BUILT_IN_EXP):
10550 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp);
10551
10552 CASE_FLT_FN (BUILT_IN_EXP2):
10553 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp2);
10554
10555 CASE_FLT_FN (BUILT_IN_EXP10):
10556 CASE_FLT_FN (BUILT_IN_POW10):
10557 return fold_builtin_exponent (loc, fndecl, arg0, mpfr_exp10);
10558
10559 CASE_FLT_FN (BUILT_IN_EXPM1):
10560 if (validate_arg (arg0, REAL_TYPE))
10561 return do_mpfr_arg1 (arg0, type, mpfr_expm1, NULL, NULL, 0);
10562 break;
10563
10564 CASE_FLT_FN (BUILT_IN_LOG):
10565 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log);
10566
10567 CASE_FLT_FN (BUILT_IN_LOG2):
10568 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log2);
10569
10570 CASE_FLT_FN (BUILT_IN_LOG10):
10571 return fold_builtin_logarithm (loc, fndecl, arg0, mpfr_log10);
10572
10573 CASE_FLT_FN (BUILT_IN_LOG1P):
10574 if (validate_arg (arg0, REAL_TYPE))
10575 return do_mpfr_arg1 (arg0, type, mpfr_log1p,
10576 &dconstm1, NULL, false);
10577 break;
10578
10579 CASE_FLT_FN (BUILT_IN_J0):
10580 if (validate_arg (arg0, REAL_TYPE))
10581 return do_mpfr_arg1 (arg0, type, mpfr_j0,
10582 NULL, NULL, 0);
10583 break;
10584
10585 CASE_FLT_FN (BUILT_IN_J1):
10586 if (validate_arg (arg0, REAL_TYPE))
10587 return do_mpfr_arg1 (arg0, type, mpfr_j1,
10588 NULL, NULL, 0);
10589 break;
10590
10591 CASE_FLT_FN (BUILT_IN_Y0):
10592 if (validate_arg (arg0, REAL_TYPE))
10593 return do_mpfr_arg1 (arg0, type, mpfr_y0,
10594 &dconst0, NULL, false);
10595 break;
10596
10597 CASE_FLT_FN (BUILT_IN_Y1):
10598 if (validate_arg (arg0, REAL_TYPE))
10599 return do_mpfr_arg1 (arg0, type, mpfr_y1,
10600 &dconst0, NULL, false);
10601 break;
10602
10603 CASE_FLT_FN (BUILT_IN_NAN):
10604 case BUILT_IN_NAND32:
10605 case BUILT_IN_NAND64:
10606 case BUILT_IN_NAND128:
10607 return fold_builtin_nan (arg0, type, true);
10608
10609 CASE_FLT_FN (BUILT_IN_NANS):
10610 return fold_builtin_nan (arg0, type, false);
10611
10612 CASE_FLT_FN (BUILT_IN_FLOOR):
10613 return fold_builtin_floor (loc, fndecl, arg0);
10614
10615 CASE_FLT_FN (BUILT_IN_CEIL):
10616 return fold_builtin_ceil (loc, fndecl, arg0);
10617
10618 CASE_FLT_FN (BUILT_IN_TRUNC):
10619 return fold_builtin_trunc (loc, fndecl, arg0);
10620
10621 CASE_FLT_FN (BUILT_IN_ROUND):
10622 return fold_builtin_round (loc, fndecl, arg0);
10623
10624 CASE_FLT_FN (BUILT_IN_NEARBYINT):
10625 CASE_FLT_FN (BUILT_IN_RINT):
10626 return fold_trunc_transparent_mathfn (loc, fndecl, arg0);
10627
10628 CASE_FLT_FN (BUILT_IN_ICEIL):
10629 CASE_FLT_FN (BUILT_IN_LCEIL):
10630 CASE_FLT_FN (BUILT_IN_LLCEIL):
10631 CASE_FLT_FN (BUILT_IN_LFLOOR):
10632 CASE_FLT_FN (BUILT_IN_IFLOOR):
10633 CASE_FLT_FN (BUILT_IN_LLFLOOR):
10634 CASE_FLT_FN (BUILT_IN_IROUND):
10635 CASE_FLT_FN (BUILT_IN_LROUND):
10636 CASE_FLT_FN (BUILT_IN_LLROUND):
10637 return fold_builtin_int_roundingfn (loc, fndecl, arg0);
10638
10639 CASE_FLT_FN (BUILT_IN_IRINT):
10640 CASE_FLT_FN (BUILT_IN_LRINT):
10641 CASE_FLT_FN (BUILT_IN_LLRINT):
10642 return fold_fixed_mathfn (loc, fndecl, arg0);
10643
10644 case BUILT_IN_BSWAP16:
10645 case BUILT_IN_BSWAP32:
10646 case BUILT_IN_BSWAP64:
10647 return fold_builtin_bswap (fndecl, arg0);
10648
10649 CASE_INT_FN (BUILT_IN_FFS):
10650 CASE_INT_FN (BUILT_IN_CLZ):
10651 CASE_INT_FN (BUILT_IN_CTZ):
10652 CASE_INT_FN (BUILT_IN_CLRSB):
10653 CASE_INT_FN (BUILT_IN_POPCOUNT):
10654 CASE_INT_FN (BUILT_IN_PARITY):
10655 return fold_builtin_bitop (fndecl, arg0);
10656
10657 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10658 return fold_builtin_signbit (loc, arg0, type);
10659
10660 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
10661 return fold_builtin_significand (loc, arg0, type);
10662
10663 CASE_FLT_FN (BUILT_IN_ILOGB):
10664 CASE_FLT_FN (BUILT_IN_LOGB):
10665 return fold_builtin_logb (loc, arg0, type);
10666
10667 case BUILT_IN_ISASCII:
10668 return fold_builtin_isascii (loc, arg0);
10669
10670 case BUILT_IN_TOASCII:
10671 return fold_builtin_toascii (loc, arg0);
10672
10673 case BUILT_IN_ISDIGIT:
10674 return fold_builtin_isdigit (loc, arg0);
10675
10676 CASE_FLT_FN (BUILT_IN_FINITE):
10677 case BUILT_IN_FINITED32:
10678 case BUILT_IN_FINITED64:
10679 case BUILT_IN_FINITED128:
10680 case BUILT_IN_ISFINITE:
10681 {
10682 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10683 if (ret)
10684 return ret;
10685 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10686 }
10687
10688 CASE_FLT_FN (BUILT_IN_ISINF):
10689 case BUILT_IN_ISINFD32:
10690 case BUILT_IN_ISINFD64:
10691 case BUILT_IN_ISINFD128:
10692 {
10693 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10694 if (ret)
10695 return ret;
10696 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10697 }
10698
10699 case BUILT_IN_ISNORMAL:
10700 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10701
10702 case BUILT_IN_ISINF_SIGN:
10703 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10704
10705 CASE_FLT_FN (BUILT_IN_ISNAN):
10706 case BUILT_IN_ISNAND32:
10707 case BUILT_IN_ISNAND64:
10708 case BUILT_IN_ISNAND128:
10709 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10710
10711 case BUILT_IN_PRINTF:
10712 case BUILT_IN_PRINTF_UNLOCKED:
10713 case BUILT_IN_VPRINTF:
10714 return fold_builtin_printf (loc, fndecl, arg0, NULL_TREE, ignore, fcode);
10715
10716 case BUILT_IN_FREE:
10717 if (integer_zerop (arg0))
10718 return build_empty_stmt (loc);
10719 break;
10720
10721 default:
10722 break;
10723 }
10724
10725 return NULL_TREE;
10726
10727 }
10728
10729 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
10730 IGNORE is true if the result of the function call is ignored. This
10731 function returns NULL_TREE if no simplification was possible. */
10732
10733 static tree
fold_builtin_2(location_t loc,tree fndecl,tree arg0,tree arg1,bool ignore)10734 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1, bool ignore)
10735 {
10736 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10737 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10738
10739 switch (fcode)
10740 {
10741 CASE_FLT_FN (BUILT_IN_JN):
10742 if (validate_arg (arg0, INTEGER_TYPE)
10743 && validate_arg (arg1, REAL_TYPE))
10744 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_jn, NULL, 0);
10745 break;
10746
10747 CASE_FLT_FN (BUILT_IN_YN):
10748 if (validate_arg (arg0, INTEGER_TYPE)
10749 && validate_arg (arg1, REAL_TYPE))
10750 return do_mpfr_bessel_n (arg0, arg1, type, mpfr_yn,
10751 &dconst0, false);
10752 break;
10753
10754 CASE_FLT_FN (BUILT_IN_DREM):
10755 CASE_FLT_FN (BUILT_IN_REMAINDER):
10756 if (validate_arg (arg0, REAL_TYPE)
10757 && validate_arg (arg1, REAL_TYPE))
10758 return do_mpfr_arg2 (arg0, arg1, type, mpfr_remainder);
10759 break;
10760
10761 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10762 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10763 if (validate_arg (arg0, REAL_TYPE)
10764 && validate_arg (arg1, POINTER_TYPE))
10765 return do_mpfr_lgamma_r (arg0, arg1, type);
10766 break;
10767
10768 CASE_FLT_FN (BUILT_IN_ATAN2):
10769 if (validate_arg (arg0, REAL_TYPE)
10770 && validate_arg (arg1, REAL_TYPE))
10771 return do_mpfr_arg2 (arg0, arg1, type, mpfr_atan2);
10772 break;
10773
10774 CASE_FLT_FN (BUILT_IN_FDIM):
10775 if (validate_arg (arg0, REAL_TYPE)
10776 && validate_arg (arg1, REAL_TYPE))
10777 return do_mpfr_arg2 (arg0, arg1, type, mpfr_dim);
10778 break;
10779
10780 CASE_FLT_FN (BUILT_IN_HYPOT):
10781 return fold_builtin_hypot (loc, fndecl, arg0, arg1, type);
10782
10783 CASE_FLT_FN (BUILT_IN_CPOW):
10784 if (validate_arg (arg0, COMPLEX_TYPE)
10785 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10786 && validate_arg (arg1, COMPLEX_TYPE)
10787 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE)
10788 return do_mpc_arg2 (arg0, arg1, type, /*do_nonfinite=*/ 0, mpc_pow);
10789 break;
10790
10791 CASE_FLT_FN (BUILT_IN_LDEXP):
10792 return fold_builtin_load_exponent (loc, arg0, arg1, type, /*ldexp=*/true);
10793 CASE_FLT_FN (BUILT_IN_SCALBN):
10794 CASE_FLT_FN (BUILT_IN_SCALBLN):
10795 return fold_builtin_load_exponent (loc, arg0, arg1,
10796 type, /*ldexp=*/false);
10797
10798 CASE_FLT_FN (BUILT_IN_FREXP):
10799 return fold_builtin_frexp (loc, arg0, arg1, type);
10800
10801 CASE_FLT_FN (BUILT_IN_MODF):
10802 return fold_builtin_modf (loc, arg0, arg1, type);
10803
10804 case BUILT_IN_BZERO:
10805 return fold_builtin_bzero (loc, arg0, arg1, ignore);
10806
10807 case BUILT_IN_FPUTS:
10808 return fold_builtin_fputs (loc, arg0, arg1, ignore, false, NULL_TREE);
10809
10810 case BUILT_IN_FPUTS_UNLOCKED:
10811 return fold_builtin_fputs (loc, arg0, arg1, ignore, true, NULL_TREE);
10812
10813 case BUILT_IN_STRSTR:
10814 return fold_builtin_strstr (loc, arg0, arg1, type);
10815
10816 case BUILT_IN_STRCAT:
10817 return fold_builtin_strcat (loc, arg0, arg1, NULL_TREE);
10818
10819 case BUILT_IN_STRSPN:
10820 return fold_builtin_strspn (loc, arg0, arg1);
10821
10822 case BUILT_IN_STRCSPN:
10823 return fold_builtin_strcspn (loc, arg0, arg1);
10824
10825 case BUILT_IN_STRCHR:
10826 case BUILT_IN_INDEX:
10827 return fold_builtin_strchr (loc, arg0, arg1, type);
10828
10829 case BUILT_IN_STRRCHR:
10830 case BUILT_IN_RINDEX:
10831 return fold_builtin_strrchr (loc, arg0, arg1, type);
10832
10833 case BUILT_IN_STRCPY:
10834 return fold_builtin_strcpy (loc, fndecl, arg0, arg1, NULL_TREE);
10835
10836 case BUILT_IN_STPCPY:
10837 if (ignore)
10838 {
10839 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
10840 if (!fn)
10841 break;
10842
10843 return build_call_expr_loc (loc, fn, 2, arg0, arg1);
10844 }
10845 else
10846 return fold_builtin_stpcpy (loc, fndecl, arg0, arg1);
10847 break;
10848
10849 case BUILT_IN_STRCMP:
10850 return fold_builtin_strcmp (loc, arg0, arg1);
10851
10852 case BUILT_IN_STRPBRK:
10853 return fold_builtin_strpbrk (loc, arg0, arg1, type);
10854
10855 case BUILT_IN_EXPECT:
10856 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
10857
10858 CASE_FLT_FN (BUILT_IN_POW):
10859 return fold_builtin_pow (loc, fndecl, arg0, arg1, type);
10860
10861 CASE_FLT_FN (BUILT_IN_POWI):
10862 return fold_builtin_powi (loc, fndecl, arg0, arg1, type);
10863
10864 CASE_FLT_FN (BUILT_IN_COPYSIGN):
10865 return fold_builtin_copysign (loc, fndecl, arg0, arg1, type);
10866
10867 CASE_FLT_FN (BUILT_IN_FMIN):
10868 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/false);
10869
10870 CASE_FLT_FN (BUILT_IN_FMAX):
10871 return fold_builtin_fmin_fmax (loc, arg0, arg1, type, /*max=*/true);
10872
10873 case BUILT_IN_ISGREATER:
10874 return fold_builtin_unordered_cmp (loc, fndecl,
10875 arg0, arg1, UNLE_EXPR, LE_EXPR);
10876 case BUILT_IN_ISGREATEREQUAL:
10877 return fold_builtin_unordered_cmp (loc, fndecl,
10878 arg0, arg1, UNLT_EXPR, LT_EXPR);
10879 case BUILT_IN_ISLESS:
10880 return fold_builtin_unordered_cmp (loc, fndecl,
10881 arg0, arg1, UNGE_EXPR, GE_EXPR);
10882 case BUILT_IN_ISLESSEQUAL:
10883 return fold_builtin_unordered_cmp (loc, fndecl,
10884 arg0, arg1, UNGT_EXPR, GT_EXPR);
10885 case BUILT_IN_ISLESSGREATER:
10886 return fold_builtin_unordered_cmp (loc, fndecl,
10887 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10888 case BUILT_IN_ISUNORDERED:
10889 return fold_builtin_unordered_cmp (loc, fndecl,
10890 arg0, arg1, UNORDERED_EXPR,
10891 NOP_EXPR);
10892
10893 /* We do the folding for va_start in the expander. */
10894 case BUILT_IN_VA_START:
10895 break;
10896
10897 case BUILT_IN_SPRINTF:
10898 return fold_builtin_sprintf (loc, arg0, arg1, NULL_TREE, ignore);
10899
10900 case BUILT_IN_OBJECT_SIZE:
10901 return fold_builtin_object_size (arg0, arg1);
10902
10903 case BUILT_IN_PRINTF:
10904 case BUILT_IN_PRINTF_UNLOCKED:
10905 case BUILT_IN_VPRINTF:
10906 return fold_builtin_printf (loc, fndecl, arg0, arg1, ignore, fcode);
10907
10908 case BUILT_IN_PRINTF_CHK:
10909 case BUILT_IN_VPRINTF_CHK:
10910 if (!validate_arg (arg0, INTEGER_TYPE)
10911 || TREE_SIDE_EFFECTS (arg0))
10912 return NULL_TREE;
10913 else
10914 return fold_builtin_printf (loc, fndecl,
10915 arg1, NULL_TREE, ignore, fcode);
10916 break;
10917
10918 case BUILT_IN_FPRINTF:
10919 case BUILT_IN_FPRINTF_UNLOCKED:
10920 case BUILT_IN_VFPRINTF:
10921 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, NULL_TREE,
10922 ignore, fcode);
10923
10924 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10925 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10926
10927 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10928 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10929
10930 default:
10931 break;
10932 }
10933 return NULL_TREE;
10934 }
10935
10936 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10937 and ARG2. IGNORE is true if the result of the function call is ignored.
10938 This function returns NULL_TREE if no simplification was possible. */
10939
10940 static tree
fold_builtin_3(location_t loc,tree fndecl,tree arg0,tree arg1,tree arg2,bool ignore)10941 fold_builtin_3 (location_t loc, tree fndecl,
10942 tree arg0, tree arg1, tree arg2, bool ignore)
10943 {
10944 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10945 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10946 switch (fcode)
10947 {
10948
10949 CASE_FLT_FN (BUILT_IN_SINCOS):
10950 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10951
10952 CASE_FLT_FN (BUILT_IN_FMA):
10953 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
10954 break;
10955
10956 CASE_FLT_FN (BUILT_IN_REMQUO):
10957 if (validate_arg (arg0, REAL_TYPE)
10958 && validate_arg (arg1, REAL_TYPE)
10959 && validate_arg (arg2, POINTER_TYPE))
10960 return do_mpfr_remquo (arg0, arg1, arg2);
10961 break;
10962
10963 case BUILT_IN_MEMSET:
10964 return fold_builtin_memset (loc, arg0, arg1, arg2, type, ignore);
10965
10966 case BUILT_IN_BCOPY:
10967 return fold_builtin_memory_op (loc, arg1, arg0, arg2,
10968 void_type_node, true, /*endp=*/3);
10969
10970 case BUILT_IN_MEMCPY:
10971 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10972 type, ignore, /*endp=*/0);
10973
10974 case BUILT_IN_MEMPCPY:
10975 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10976 type, ignore, /*endp=*/1);
10977
10978 case BUILT_IN_MEMMOVE:
10979 return fold_builtin_memory_op (loc, arg0, arg1, arg2,
10980 type, ignore, /*endp=*/3);
10981
10982 case BUILT_IN_STRNCAT:
10983 return fold_builtin_strncat (loc, arg0, arg1, arg2);
10984
10985 case BUILT_IN_STRNCPY:
10986 return fold_builtin_strncpy (loc, fndecl, arg0, arg1, arg2, NULL_TREE);
10987
10988 case BUILT_IN_STRNCMP:
10989 return fold_builtin_strncmp (loc, arg0, arg1, arg2);
10990
10991 case BUILT_IN_MEMCHR:
10992 return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
10993
10994 case BUILT_IN_BCMP:
10995 case BUILT_IN_MEMCMP:
10996 return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
10997
10998 case BUILT_IN_SPRINTF:
10999 return fold_builtin_sprintf (loc, arg0, arg1, arg2, ignore);
11000
11001 case BUILT_IN_SNPRINTF:
11002 return fold_builtin_snprintf (loc, arg0, arg1, arg2, NULL_TREE, ignore);
11003
11004 case BUILT_IN_STRCPY_CHK:
11005 case BUILT_IN_STPCPY_CHK:
11006 return fold_builtin_stxcpy_chk (loc, fndecl, arg0, arg1, arg2, NULL_TREE,
11007 ignore, fcode);
11008
11009 case BUILT_IN_STRCAT_CHK:
11010 return fold_builtin_strcat_chk (loc, fndecl, arg0, arg1, arg2);
11011
11012 case BUILT_IN_PRINTF_CHK:
11013 case BUILT_IN_VPRINTF_CHK:
11014 if (!validate_arg (arg0, INTEGER_TYPE)
11015 || TREE_SIDE_EFFECTS (arg0))
11016 return NULL_TREE;
11017 else
11018 return fold_builtin_printf (loc, fndecl, arg1, arg2, ignore, fcode);
11019 break;
11020
11021 case BUILT_IN_FPRINTF:
11022 case BUILT_IN_FPRINTF_UNLOCKED:
11023 case BUILT_IN_VFPRINTF:
11024 return fold_builtin_fprintf (loc, fndecl, arg0, arg1, arg2,
11025 ignore, fcode);
11026
11027 case BUILT_IN_FPRINTF_CHK:
11028 case BUILT_IN_VFPRINTF_CHK:
11029 if (!validate_arg (arg1, INTEGER_TYPE)
11030 || TREE_SIDE_EFFECTS (arg1))
11031 return NULL_TREE;
11032 else
11033 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, NULL_TREE,
11034 ignore, fcode);
11035
11036 case BUILT_IN_EXPECT:
11037 return fold_builtin_expect (loc, arg0, arg1, arg2);
11038
11039 default:
11040 break;
11041 }
11042 return NULL_TREE;
11043 }
11044
11045 /* Fold a call to built-in function FNDECL with 4 arguments, ARG0, ARG1,
11046 ARG2, and ARG3. IGNORE is true if the result of the function call is
11047 ignored. This function returns NULL_TREE if no simplification was
11048 possible. */
11049
11050 static tree
fold_builtin_4(location_t loc,tree fndecl,tree arg0,tree arg1,tree arg2,tree arg3,bool ignore)11051 fold_builtin_4 (location_t loc, tree fndecl,
11052 tree arg0, tree arg1, tree arg2, tree arg3, bool ignore)
11053 {
11054 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11055
11056 switch (fcode)
11057 {
11058 case BUILT_IN_MEMCPY_CHK:
11059 case BUILT_IN_MEMPCPY_CHK:
11060 case BUILT_IN_MEMMOVE_CHK:
11061 case BUILT_IN_MEMSET_CHK:
11062 return fold_builtin_memory_chk (loc, fndecl, arg0, arg1, arg2, arg3,
11063 NULL_TREE, ignore,
11064 DECL_FUNCTION_CODE (fndecl));
11065
11066 case BUILT_IN_STRNCPY_CHK:
11067 case BUILT_IN_STPNCPY_CHK:
11068 return fold_builtin_stxncpy_chk (loc, arg0, arg1, arg2, arg3, NULL_TREE,
11069 ignore, fcode);
11070
11071 case BUILT_IN_STRNCAT_CHK:
11072 return fold_builtin_strncat_chk (loc, fndecl, arg0, arg1, arg2, arg3);
11073
11074 case BUILT_IN_SNPRINTF:
11075 return fold_builtin_snprintf (loc, arg0, arg1, arg2, arg3, ignore);
11076
11077 case BUILT_IN_FPRINTF_CHK:
11078 case BUILT_IN_VFPRINTF_CHK:
11079 if (!validate_arg (arg1, INTEGER_TYPE)
11080 || TREE_SIDE_EFFECTS (arg1))
11081 return NULL_TREE;
11082 else
11083 return fold_builtin_fprintf (loc, fndecl, arg0, arg2, arg3,
11084 ignore, fcode);
11085 break;
11086
11087 default:
11088 break;
11089 }
11090 return NULL_TREE;
11091 }
11092
11093 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
11094 arguments, where NARGS <= 4. IGNORE is true if the result of the
11095 function call is ignored. This function returns NULL_TREE if no
11096 simplification was possible. Note that this only folds builtins with
11097 fixed argument patterns. Foldings that do varargs-to-varargs
11098 transformations, or that match calls with more than 4 arguments,
11099 need to be handled with fold_builtin_varargs instead. */
11100
11101 #define MAX_ARGS_TO_FOLD_BUILTIN 4
11102
11103 static tree
fold_builtin_n(location_t loc,tree fndecl,tree * args,int nargs,bool ignore)11104 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool ignore)
11105 {
11106 tree ret = NULL_TREE;
11107
11108 switch (nargs)
11109 {
11110 case 0:
11111 ret = fold_builtin_0 (loc, fndecl, ignore);
11112 break;
11113 case 1:
11114 ret = fold_builtin_1 (loc, fndecl, args[0], ignore);
11115 break;
11116 case 2:
11117 ret = fold_builtin_2 (loc, fndecl, args[0], args[1], ignore);
11118 break;
11119 case 3:
11120 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2], ignore);
11121 break;
11122 case 4:
11123 ret = fold_builtin_4 (loc, fndecl, args[0], args[1], args[2], args[3],
11124 ignore);
11125 break;
11126 default:
11127 break;
11128 }
11129 if (ret)
11130 {
11131 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11132 SET_EXPR_LOCATION (ret, loc);
11133 TREE_NO_WARNING (ret) = 1;
11134 return ret;
11135 }
11136 return NULL_TREE;
11137 }
11138
11139 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11140 list ARGS along with N new arguments in NEWARGS. SKIP is the number
11141 of arguments in ARGS to be omitted. OLDNARGS is the number of
11142 elements in ARGS. */
11143
11144 static tree
rewrite_call_expr_valist(location_t loc,int oldnargs,tree * args,int skip,tree fndecl,int n,va_list newargs)11145 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
11146 int skip, tree fndecl, int n, va_list newargs)
11147 {
11148 int nargs = oldnargs - skip + n;
11149 tree *buffer;
11150
11151 if (n > 0)
11152 {
11153 int i, j;
11154
11155 buffer = XALLOCAVEC (tree, nargs);
11156 for (i = 0; i < n; i++)
11157 buffer[i] = va_arg (newargs, tree);
11158 for (j = skip; j < oldnargs; j++, i++)
11159 buffer[i] = args[j];
11160 }
11161 else
11162 buffer = args + skip;
11163
11164 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
11165 }
11166
11167 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
11168 list ARGS along with N new arguments specified as the "..."
11169 parameters. SKIP is the number of arguments in ARGS to be omitted.
11170 OLDNARGS is the number of elements in ARGS. */
11171
11172 static tree
rewrite_call_expr_array(location_t loc,int oldnargs,tree * args,int skip,tree fndecl,int n,...)11173 rewrite_call_expr_array (location_t loc, int oldnargs, tree *args,
11174 int skip, tree fndecl, int n, ...)
11175 {
11176 va_list ap;
11177 tree t;
11178
11179 va_start (ap, n);
11180 t = rewrite_call_expr_valist (loc, oldnargs, args, skip, fndecl, n, ap);
11181 va_end (ap);
11182
11183 return t;
11184 }
11185
11186 /* Return true if FNDECL shouldn't be folded right now.
11187 If a built-in function has an inline attribute always_inline
11188 wrapper, defer folding it after always_inline functions have
11189 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
11190 might not be performed. */
11191
11192 bool
avoid_folding_inline_builtin(tree fndecl)11193 avoid_folding_inline_builtin (tree fndecl)
11194 {
11195 return (DECL_DECLARED_INLINE_P (fndecl)
11196 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
11197 && cfun
11198 && !cfun->always_inline_functions_inlined
11199 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
11200 }
11201
11202 /* A wrapper function for builtin folding that prevents warnings for
11203 "statement without effect" and the like, caused by removing the
11204 call node earlier than the warning is generated. */
11205
11206 tree
fold_call_expr(location_t loc,tree exp,bool ignore)11207 fold_call_expr (location_t loc, tree exp, bool ignore)
11208 {
11209 tree ret = NULL_TREE;
11210 tree fndecl = get_callee_fndecl (exp);
11211 if (fndecl
11212 && TREE_CODE (fndecl) == FUNCTION_DECL
11213 && DECL_BUILT_IN (fndecl)
11214 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
11215 yet. Defer folding until we see all the arguments
11216 (after inlining). */
11217 && !CALL_EXPR_VA_ARG_PACK (exp))
11218 {
11219 int nargs = call_expr_nargs (exp);
11220
11221 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
11222 instead last argument is __builtin_va_arg_pack (). Defer folding
11223 even in that case, until arguments are finalized. */
11224 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
11225 {
11226 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
11227 if (fndecl2
11228 && TREE_CODE (fndecl2) == FUNCTION_DECL
11229 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11230 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11231 return NULL_TREE;
11232 }
11233
11234 if (avoid_folding_inline_builtin (fndecl))
11235 return NULL_TREE;
11236
11237 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11238 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
11239 CALL_EXPR_ARGP (exp), ignore);
11240 else
11241 {
11242 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
11243 {
11244 tree *args = CALL_EXPR_ARGP (exp);
11245 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11246 }
11247 if (!ret)
11248 ret = fold_builtin_varargs (loc, fndecl, exp, ignore);
11249 if (ret)
11250 return ret;
11251 }
11252 }
11253 return NULL_TREE;
11254 }
11255
11256 /* Conveniently construct a function call expression. FNDECL names the
11257 function to be called and N arguments are passed in the array
11258 ARGARRAY. */
11259
11260 tree
build_call_expr_loc_array(location_t loc,tree fndecl,int n,tree * argarray)11261 build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray)
11262 {
11263 tree fntype = TREE_TYPE (fndecl);
11264 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
11265
11266 return fold_builtin_call_array (loc, TREE_TYPE (fntype), fn, n, argarray);
11267 }
11268
11269 /* Conveniently construct a function call expression. FNDECL names the
11270 function to be called and the arguments are passed in the vector
11271 VEC. */
11272
11273 tree
build_call_expr_loc_vec(location_t loc,tree fndecl,vec<tree,va_gc> * vec)11274 build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec)
11275 {
11276 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec),
11277 vec_safe_address (vec));
11278 }
11279
11280
11281 /* Conveniently construct a function call expression. FNDECL names the
11282 function to be called, N is the number of arguments, and the "..."
11283 parameters are the argument expressions. */
11284
11285 tree
build_call_expr_loc(location_t loc,tree fndecl,int n,...)11286 build_call_expr_loc (location_t loc, tree fndecl, int n, ...)
11287 {
11288 va_list ap;
11289 tree *argarray = XALLOCAVEC (tree, n);
11290 int i;
11291
11292 va_start (ap, n);
11293 for (i = 0; i < n; i++)
11294 argarray[i] = va_arg (ap, tree);
11295 va_end (ap);
11296 return build_call_expr_loc_array (loc, fndecl, n, argarray);
11297 }
11298
11299 /* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because
11300 varargs macros aren't supported by all bootstrap compilers. */
11301
11302 tree
build_call_expr(tree fndecl,int n,...)11303 build_call_expr (tree fndecl, int n, ...)
11304 {
11305 va_list ap;
11306 tree *argarray = XALLOCAVEC (tree, n);
11307 int i;
11308
11309 va_start (ap, n);
11310 for (i = 0; i < n; i++)
11311 argarray[i] = va_arg (ap, tree);
11312 va_end (ap);
11313 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray);
11314 }
11315
11316 /* Construct a CALL_EXPR with type TYPE with FN as the function expression.
11317 N arguments are passed in the array ARGARRAY. */
11318
11319 tree
fold_builtin_call_array(location_t loc,tree type,tree fn,int n,tree * argarray)11320 fold_builtin_call_array (location_t loc, tree type,
11321 tree fn,
11322 int n,
11323 tree *argarray)
11324 {
11325 tree ret = NULL_TREE;
11326 tree exp;
11327
11328 if (TREE_CODE (fn) == ADDR_EXPR)
11329 {
11330 tree fndecl = TREE_OPERAND (fn, 0);
11331 if (TREE_CODE (fndecl) == FUNCTION_DECL
11332 && DECL_BUILT_IN (fndecl))
11333 {
11334 /* If last argument is __builtin_va_arg_pack (), arguments to this
11335 function are not finalized yet. Defer folding until they are. */
11336 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
11337 {
11338 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
11339 if (fndecl2
11340 && TREE_CODE (fndecl2) == FUNCTION_DECL
11341 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
11342 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
11343 return build_call_array_loc (loc, type, fn, n, argarray);
11344 }
11345 if (avoid_folding_inline_builtin (fndecl))
11346 return build_call_array_loc (loc, type, fn, n, argarray);
11347 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11348 {
11349 ret = targetm.fold_builtin (fndecl, n, argarray, false);
11350 if (ret)
11351 return ret;
11352
11353 return build_call_array_loc (loc, type, fn, n, argarray);
11354 }
11355 else if (n <= MAX_ARGS_TO_FOLD_BUILTIN)
11356 {
11357 /* First try the transformations that don't require consing up
11358 an exp. */
11359 ret = fold_builtin_n (loc, fndecl, argarray, n, false);
11360 if (ret)
11361 return ret;
11362 }
11363
11364 /* If we got this far, we need to build an exp. */
11365 exp = build_call_array_loc (loc, type, fn, n, argarray);
11366 ret = fold_builtin_varargs (loc, fndecl, exp, false);
11367 return ret ? ret : exp;
11368 }
11369 }
11370
11371 return build_call_array_loc (loc, type, fn, n, argarray);
11372 }
11373
11374 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
11375 along with N new arguments specified as the "..." parameters. SKIP
11376 is the number of arguments in EXP to be omitted. This function is used
11377 to do varargs-to-varargs transformations. */
11378
11379 static tree
rewrite_call_expr(location_t loc,tree exp,int skip,tree fndecl,int n,...)11380 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
11381 {
11382 va_list ap;
11383 tree t;
11384
11385 va_start (ap, n);
11386 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
11387 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
11388 va_end (ap);
11389
11390 return t;
11391 }
11392
11393 /* Validate a single argument ARG against a tree code CODE representing
11394 a type. */
11395
11396 static bool
validate_arg(const_tree arg,enum tree_code code)11397 validate_arg (const_tree arg, enum tree_code code)
11398 {
11399 if (!arg)
11400 return false;
11401 else if (code == POINTER_TYPE)
11402 return POINTER_TYPE_P (TREE_TYPE (arg));
11403 else if (code == INTEGER_TYPE)
11404 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
11405 return code == TREE_CODE (TREE_TYPE (arg));
11406 }
11407
11408 /* This function validates the types of a function call argument list
11409 against a specified list of tree_codes. If the last specifier is a 0,
11410 that represents an ellipses, otherwise the last specifier must be a
11411 VOID_TYPE.
11412
11413 This is the GIMPLE version of validate_arglist. Eventually we want to
11414 completely convert builtins.c to work from GIMPLEs and the tree based
11415 validate_arglist will then be removed. */
11416
11417 bool
validate_gimple_arglist(const_gimple call,...)11418 validate_gimple_arglist (const_gimple call, ...)
11419 {
11420 enum tree_code code;
11421 bool res = 0;
11422 va_list ap;
11423 const_tree arg;
11424 size_t i;
11425
11426 va_start (ap, call);
11427 i = 0;
11428
11429 do
11430 {
11431 code = (enum tree_code) va_arg (ap, int);
11432 switch (code)
11433 {
11434 case 0:
11435 /* This signifies an ellipses, any further arguments are all ok. */
11436 res = true;
11437 goto end;
11438 case VOID_TYPE:
11439 /* This signifies an endlink, if no arguments remain, return
11440 true, otherwise return false. */
11441 res = (i == gimple_call_num_args (call));
11442 goto end;
11443 default:
11444 /* If no parameters remain or the parameter's code does not
11445 match the specified code, return false. Otherwise continue
11446 checking any remaining arguments. */
11447 arg = gimple_call_arg (call, i++);
11448 if (!validate_arg (arg, code))
11449 goto end;
11450 break;
11451 }
11452 }
11453 while (1);
11454
11455 /* We need gotos here since we can only have one VA_CLOSE in a
11456 function. */
11457 end: ;
11458 va_end (ap);
11459
11460 return res;
11461 }
11462
11463 /* Default target-specific builtin expander that does nothing. */
11464
11465 rtx
default_expand_builtin(tree exp ATTRIBUTE_UNUSED,rtx target ATTRIBUTE_UNUSED,rtx subtarget ATTRIBUTE_UNUSED,enum machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)11466 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
11467 rtx target ATTRIBUTE_UNUSED,
11468 rtx subtarget ATTRIBUTE_UNUSED,
11469 enum machine_mode mode ATTRIBUTE_UNUSED,
11470 int ignore ATTRIBUTE_UNUSED)
11471 {
11472 return NULL_RTX;
11473 }
11474
11475 /* Returns true is EXP represents data that would potentially reside
11476 in a readonly section. */
11477
11478 static bool
readonly_data_expr(tree exp)11479 readonly_data_expr (tree exp)
11480 {
11481 STRIP_NOPS (exp);
11482
11483 if (TREE_CODE (exp) != ADDR_EXPR)
11484 return false;
11485
11486 exp = get_base_address (TREE_OPERAND (exp, 0));
11487 if (!exp)
11488 return false;
11489
11490 /* Make sure we call decl_readonly_section only for trees it
11491 can handle (since it returns true for everything it doesn't
11492 understand). */
11493 if (TREE_CODE (exp) == STRING_CST
11494 || TREE_CODE (exp) == CONSTRUCTOR
11495 || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
11496 return decl_readonly_section (exp, 0);
11497 else
11498 return false;
11499 }
11500
11501 /* Simplify a call to the strstr builtin. S1 and S2 are the arguments
11502 to the call, and TYPE is its return type.
11503
11504 Return NULL_TREE if no simplification was possible, otherwise return the
11505 simplified form of the call as a tree.
11506
11507 The simplified form may be a constant or other expression which
11508 computes the same value, but in a more efficient manner (including
11509 calls to other builtin functions).
11510
11511 The call may contain arguments which need to be evaluated, but
11512 which are not useful to determine the result of the call. In
11513 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11514 COMPOUND_EXPR will be an argument which must be evaluated.
11515 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11516 COMPOUND_EXPR in the chain will contain the tree for the simplified
11517 form of the builtin function call. */
11518
11519 static tree
fold_builtin_strstr(location_t loc,tree s1,tree s2,tree type)11520 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
11521 {
11522 if (!validate_arg (s1, POINTER_TYPE)
11523 || !validate_arg (s2, POINTER_TYPE))
11524 return NULL_TREE;
11525 else
11526 {
11527 tree fn;
11528 const char *p1, *p2;
11529
11530 p2 = c_getstr (s2);
11531 if (p2 == NULL)
11532 return NULL_TREE;
11533
11534 p1 = c_getstr (s1);
11535 if (p1 != NULL)
11536 {
11537 const char *r = strstr (p1, p2);
11538 tree tem;
11539
11540 if (r == NULL)
11541 return build_int_cst (TREE_TYPE (s1), 0);
11542
11543 /* Return an offset into the constant string argument. */
11544 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11545 return fold_convert_loc (loc, type, tem);
11546 }
11547
11548 /* The argument is const char *, and the result is char *, so we need
11549 a type conversion here to avoid a warning. */
11550 if (p2[0] == '\0')
11551 return fold_convert_loc (loc, type, s1);
11552
11553 if (p2[1] != '\0')
11554 return NULL_TREE;
11555
11556 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11557 if (!fn)
11558 return NULL_TREE;
11559
11560 /* New argument list transforming strstr(s1, s2) to
11561 strchr(s1, s2[0]). */
11562 return build_call_expr_loc (loc, fn, 2, s1,
11563 build_int_cst (integer_type_node, p2[0]));
11564 }
11565 }
11566
11567 /* Simplify a call to the strchr builtin. S1 and S2 are the arguments to
11568 the call, and TYPE is its return type.
11569
11570 Return NULL_TREE if no simplification was possible, otherwise return the
11571 simplified form of the call as a tree.
11572
11573 The simplified form may be a constant or other expression which
11574 computes the same value, but in a more efficient manner (including
11575 calls to other builtin functions).
11576
11577 The call may contain arguments which need to be evaluated, but
11578 which are not useful to determine the result of the call. In
11579 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11580 COMPOUND_EXPR will be an argument which must be evaluated.
11581 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11582 COMPOUND_EXPR in the chain will contain the tree for the simplified
11583 form of the builtin function call. */
11584
11585 static tree
fold_builtin_strchr(location_t loc,tree s1,tree s2,tree type)11586 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
11587 {
11588 if (!validate_arg (s1, POINTER_TYPE)
11589 || !validate_arg (s2, INTEGER_TYPE))
11590 return NULL_TREE;
11591 else
11592 {
11593 const char *p1;
11594
11595 if (TREE_CODE (s2) != INTEGER_CST)
11596 return NULL_TREE;
11597
11598 p1 = c_getstr (s1);
11599 if (p1 != NULL)
11600 {
11601 char c;
11602 const char *r;
11603 tree tem;
11604
11605 if (target_char_cast (s2, &c))
11606 return NULL_TREE;
11607
11608 r = strchr (p1, c);
11609
11610 if (r == NULL)
11611 return build_int_cst (TREE_TYPE (s1), 0);
11612
11613 /* Return an offset into the constant string argument. */
11614 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11615 return fold_convert_loc (loc, type, tem);
11616 }
11617 return NULL_TREE;
11618 }
11619 }
11620
11621 /* Simplify a call to the strrchr builtin. S1 and S2 are the arguments to
11622 the call, and TYPE is its return type.
11623
11624 Return NULL_TREE if no simplification was possible, otherwise return the
11625 simplified form of the call as a tree.
11626
11627 The simplified form may be a constant or other expression which
11628 computes the same value, but in a more efficient manner (including
11629 calls to other builtin functions).
11630
11631 The call may contain arguments which need to be evaluated, but
11632 which are not useful to determine the result of the call. In
11633 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11634 COMPOUND_EXPR will be an argument which must be evaluated.
11635 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11636 COMPOUND_EXPR in the chain will contain the tree for the simplified
11637 form of the builtin function call. */
11638
11639 static tree
fold_builtin_strrchr(location_t loc,tree s1,tree s2,tree type)11640 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
11641 {
11642 if (!validate_arg (s1, POINTER_TYPE)
11643 || !validate_arg (s2, INTEGER_TYPE))
11644 return NULL_TREE;
11645 else
11646 {
11647 tree fn;
11648 const char *p1;
11649
11650 if (TREE_CODE (s2) != INTEGER_CST)
11651 return NULL_TREE;
11652
11653 p1 = c_getstr (s1);
11654 if (p1 != NULL)
11655 {
11656 char c;
11657 const char *r;
11658 tree tem;
11659
11660 if (target_char_cast (s2, &c))
11661 return NULL_TREE;
11662
11663 r = strrchr (p1, c);
11664
11665 if (r == NULL)
11666 return build_int_cst (TREE_TYPE (s1), 0);
11667
11668 /* Return an offset into the constant string argument. */
11669 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11670 return fold_convert_loc (loc, type, tem);
11671 }
11672
11673 if (! integer_zerop (s2))
11674 return NULL_TREE;
11675
11676 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11677 if (!fn)
11678 return NULL_TREE;
11679
11680 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
11681 return build_call_expr_loc (loc, fn, 2, s1, s2);
11682 }
11683 }
11684
11685 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
11686 to the call, and TYPE is its return type.
11687
11688 Return NULL_TREE if no simplification was possible, otherwise return the
11689 simplified form of the call as a tree.
11690
11691 The simplified form may be a constant or other expression which
11692 computes the same value, but in a more efficient manner (including
11693 calls to other builtin functions).
11694
11695 The call may contain arguments which need to be evaluated, but
11696 which are not useful to determine the result of the call. In
11697 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11698 COMPOUND_EXPR will be an argument which must be evaluated.
11699 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11700 COMPOUND_EXPR in the chain will contain the tree for the simplified
11701 form of the builtin function call. */
11702
11703 static tree
fold_builtin_strpbrk(location_t loc,tree s1,tree s2,tree type)11704 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
11705 {
11706 if (!validate_arg (s1, POINTER_TYPE)
11707 || !validate_arg (s2, POINTER_TYPE))
11708 return NULL_TREE;
11709 else
11710 {
11711 tree fn;
11712 const char *p1, *p2;
11713
11714 p2 = c_getstr (s2);
11715 if (p2 == NULL)
11716 return NULL_TREE;
11717
11718 p1 = c_getstr (s1);
11719 if (p1 != NULL)
11720 {
11721 const char *r = strpbrk (p1, p2);
11722 tree tem;
11723
11724 if (r == NULL)
11725 return build_int_cst (TREE_TYPE (s1), 0);
11726
11727 /* Return an offset into the constant string argument. */
11728 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
11729 return fold_convert_loc (loc, type, tem);
11730 }
11731
11732 if (p2[0] == '\0')
11733 /* strpbrk(x, "") == NULL.
11734 Evaluate and ignore s1 in case it had side-effects. */
11735 return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
11736
11737 if (p2[1] != '\0')
11738 return NULL_TREE; /* Really call strpbrk. */
11739
11740 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
11741 if (!fn)
11742 return NULL_TREE;
11743
11744 /* New argument list transforming strpbrk(s1, s2) to
11745 strchr(s1, s2[0]). */
11746 return build_call_expr_loc (loc, fn, 2, s1,
11747 build_int_cst (integer_type_node, p2[0]));
11748 }
11749 }
11750
11751 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
11752 to the call.
11753
11754 Return NULL_TREE if no simplification was possible, otherwise return the
11755 simplified form of the call as a tree.
11756
11757 The simplified form may be a constant or other expression which
11758 computes the same value, but in a more efficient manner (including
11759 calls to other builtin functions).
11760
11761 The call may contain arguments which need to be evaluated, but
11762 which are not useful to determine the result of the call. In
11763 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11764 COMPOUND_EXPR will be an argument which must be evaluated.
11765 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11766 COMPOUND_EXPR in the chain will contain the tree for the simplified
11767 form of the builtin function call. */
11768
11769 tree
fold_builtin_strcat(location_t loc ATTRIBUTE_UNUSED,tree dst,tree src,tree len)11770 fold_builtin_strcat (location_t loc ATTRIBUTE_UNUSED, tree dst, tree src,
11771 tree len)
11772 {
11773 if (!validate_arg (dst, POINTER_TYPE)
11774 || !validate_arg (src, POINTER_TYPE))
11775 return NULL_TREE;
11776 else
11777 {
11778 const char *p = c_getstr (src);
11779
11780 /* If the string length is zero, return the dst parameter. */
11781 if (p && *p == '\0')
11782 return dst;
11783
11784 if (optimize_insn_for_speed_p ())
11785 {
11786 /* See if we can store by pieces into (dst + strlen(dst)). */
11787 tree newdst, call;
11788 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11789 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
11790
11791 if (!strlen_fn || !memcpy_fn)
11792 return NULL_TREE;
11793
11794 /* If the length of the source string isn't computable don't
11795 split strcat into strlen and memcpy. */
11796 if (! len)
11797 len = c_strlen (src, 1);
11798 if (! len || TREE_SIDE_EFFECTS (len))
11799 return NULL_TREE;
11800
11801 /* Stabilize the argument list. */
11802 dst = builtin_save_expr (dst);
11803
11804 /* Create strlen (dst). */
11805 newdst = build_call_expr_loc (loc, strlen_fn, 1, dst);
11806 /* Create (dst p+ strlen (dst)). */
11807
11808 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
11809 newdst = builtin_save_expr (newdst);
11810
11811 len = fold_convert_loc (loc, size_type_node, len);
11812 len = size_binop_loc (loc, PLUS_EXPR, len,
11813 build_int_cst (size_type_node, 1));
11814
11815 call = build_call_expr_loc (loc, memcpy_fn, 3, newdst, src, len);
11816 return build2 (COMPOUND_EXPR, TREE_TYPE (dst), call, dst);
11817 }
11818 return NULL_TREE;
11819 }
11820 }
11821
11822 /* Simplify a call to the strncat builtin. DST, SRC, and LEN are the
11823 arguments to the call.
11824
11825 Return NULL_TREE if no simplification was possible, otherwise return the
11826 simplified form of the call as a tree.
11827
11828 The simplified form may be a constant or other expression which
11829 computes the same value, but in a more efficient manner (including
11830 calls to other builtin functions).
11831
11832 The call may contain arguments which need to be evaluated, but
11833 which are not useful to determine the result of the call. In
11834 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11835 COMPOUND_EXPR will be an argument which must be evaluated.
11836 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11837 COMPOUND_EXPR in the chain will contain the tree for the simplified
11838 form of the builtin function call. */
11839
11840 static tree
fold_builtin_strncat(location_t loc,tree dst,tree src,tree len)11841 fold_builtin_strncat (location_t loc, tree dst, tree src, tree len)
11842 {
11843 if (!validate_arg (dst, POINTER_TYPE)
11844 || !validate_arg (src, POINTER_TYPE)
11845 || !validate_arg (len, INTEGER_TYPE))
11846 return NULL_TREE;
11847 else
11848 {
11849 const char *p = c_getstr (src);
11850
11851 /* If the requested length is zero, or the src parameter string
11852 length is zero, return the dst parameter. */
11853 if (integer_zerop (len) || (p && *p == '\0'))
11854 return omit_two_operands_loc (loc, TREE_TYPE (dst), dst, src, len);
11855
11856 /* If the requested len is greater than or equal to the string
11857 length, call strcat. */
11858 if (TREE_CODE (len) == INTEGER_CST && p
11859 && compare_tree_int (len, strlen (p)) >= 0)
11860 {
11861 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
11862
11863 /* If the replacement _DECL isn't initialized, don't do the
11864 transformation. */
11865 if (!fn)
11866 return NULL_TREE;
11867
11868 return build_call_expr_loc (loc, fn, 2, dst, src);
11869 }
11870 return NULL_TREE;
11871 }
11872 }
11873
11874 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
11875 to the call.
11876
11877 Return NULL_TREE if no simplification was possible, otherwise return the
11878 simplified form of the call as a tree.
11879
11880 The simplified form may be a constant or other expression which
11881 computes the same value, but in a more efficient manner (including
11882 calls to other builtin functions).
11883
11884 The call may contain arguments which need to be evaluated, but
11885 which are not useful to determine the result of the call. In
11886 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11887 COMPOUND_EXPR will be an argument which must be evaluated.
11888 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11889 COMPOUND_EXPR in the chain will contain the tree for the simplified
11890 form of the builtin function call. */
11891
11892 static tree
fold_builtin_strspn(location_t loc,tree s1,tree s2)11893 fold_builtin_strspn (location_t loc, tree s1, tree s2)
11894 {
11895 if (!validate_arg (s1, POINTER_TYPE)
11896 || !validate_arg (s2, POINTER_TYPE))
11897 return NULL_TREE;
11898 else
11899 {
11900 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11901
11902 /* If both arguments are constants, evaluate at compile-time. */
11903 if (p1 && p2)
11904 {
11905 const size_t r = strspn (p1, p2);
11906 return build_int_cst (size_type_node, r);
11907 }
11908
11909 /* If either argument is "", return NULL_TREE. */
11910 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
11911 /* Evaluate and ignore both arguments in case either one has
11912 side-effects. */
11913 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
11914 s1, s2);
11915 return NULL_TREE;
11916 }
11917 }
11918
11919 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
11920 to the call.
11921
11922 Return NULL_TREE if no simplification was possible, otherwise return the
11923 simplified form of the call as a tree.
11924
11925 The simplified form may be a constant or other expression which
11926 computes the same value, but in a more efficient manner (including
11927 calls to other builtin functions).
11928
11929 The call may contain arguments which need to be evaluated, but
11930 which are not useful to determine the result of the call. In
11931 this case we return a chain of COMPOUND_EXPRs. The LHS of each
11932 COMPOUND_EXPR will be an argument which must be evaluated.
11933 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
11934 COMPOUND_EXPR in the chain will contain the tree for the simplified
11935 form of the builtin function call. */
11936
11937 static tree
fold_builtin_strcspn(location_t loc,tree s1,tree s2)11938 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
11939 {
11940 if (!validate_arg (s1, POINTER_TYPE)
11941 || !validate_arg (s2, POINTER_TYPE))
11942 return NULL_TREE;
11943 else
11944 {
11945 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
11946
11947 /* If both arguments are constants, evaluate at compile-time. */
11948 if (p1 && p2)
11949 {
11950 const size_t r = strcspn (p1, p2);
11951 return build_int_cst (size_type_node, r);
11952 }
11953
11954 /* If the first argument is "", return NULL_TREE. */
11955 if (p1 && *p1 == '\0')
11956 {
11957 /* Evaluate and ignore argument s2 in case it has
11958 side-effects. */
11959 return omit_one_operand_loc (loc, size_type_node,
11960 size_zero_node, s2);
11961 }
11962
11963 /* If the second argument is "", return __builtin_strlen(s1). */
11964 if (p2 && *p2 == '\0')
11965 {
11966 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
11967
11968 /* If the replacement _DECL isn't initialized, don't do the
11969 transformation. */
11970 if (!fn)
11971 return NULL_TREE;
11972
11973 return build_call_expr_loc (loc, fn, 1, s1);
11974 }
11975 return NULL_TREE;
11976 }
11977 }
11978
11979 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
11980 to the call. IGNORE is true if the value returned
11981 by the builtin will be ignored. UNLOCKED is true is true if this
11982 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
11983 the known length of the string. Return NULL_TREE if no simplification
11984 was possible. */
11985
11986 tree
fold_builtin_fputs(location_t loc,tree arg0,tree arg1,bool ignore,bool unlocked,tree len)11987 fold_builtin_fputs (location_t loc, tree arg0, tree arg1,
11988 bool ignore, bool unlocked, tree len)
11989 {
11990 /* If we're using an unlocked function, assume the other unlocked
11991 functions exist explicitly. */
11992 tree const fn_fputc = (unlocked
11993 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
11994 : builtin_decl_implicit (BUILT_IN_FPUTC));
11995 tree const fn_fwrite = (unlocked
11996 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
11997 : builtin_decl_implicit (BUILT_IN_FWRITE));
11998
11999 /* If the return value is used, don't do the transformation. */
12000 if (!ignore)
12001 return NULL_TREE;
12002
12003 /* Verify the arguments in the original call. */
12004 if (!validate_arg (arg0, POINTER_TYPE)
12005 || !validate_arg (arg1, POINTER_TYPE))
12006 return NULL_TREE;
12007
12008 if (! len)
12009 len = c_strlen (arg0, 0);
12010
12011 /* Get the length of the string passed to fputs. If the length
12012 can't be determined, punt. */
12013 if (!len
12014 || TREE_CODE (len) != INTEGER_CST)
12015 return NULL_TREE;
12016
12017 switch (compare_tree_int (len, 1))
12018 {
12019 case -1: /* length is 0, delete the call entirely . */
12020 return omit_one_operand_loc (loc, integer_type_node,
12021 integer_zero_node, arg1);;
12022
12023 case 0: /* length is 1, call fputc. */
12024 {
12025 const char *p = c_getstr (arg0);
12026
12027 if (p != NULL)
12028 {
12029 if (fn_fputc)
12030 return build_call_expr_loc (loc, fn_fputc, 2,
12031 build_int_cst
12032 (integer_type_node, p[0]), arg1);
12033 else
12034 return NULL_TREE;
12035 }
12036 }
12037 /* FALLTHROUGH */
12038 case 1: /* length is greater than 1, call fwrite. */
12039 {
12040 /* If optimizing for size keep fputs. */
12041 if (optimize_function_for_size_p (cfun))
12042 return NULL_TREE;
12043 /* New argument list transforming fputs(string, stream) to
12044 fwrite(string, 1, len, stream). */
12045 if (fn_fwrite)
12046 return build_call_expr_loc (loc, fn_fwrite, 4, arg0,
12047 size_one_node, len, arg1);
12048 else
12049 return NULL_TREE;
12050 }
12051 default:
12052 gcc_unreachable ();
12053 }
12054 return NULL_TREE;
12055 }
12056
12057 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12058 produced. False otherwise. This is done so that we don't output the error
12059 or warning twice or three times. */
12060
12061 bool
fold_builtin_next_arg(tree exp,bool va_start_p)12062 fold_builtin_next_arg (tree exp, bool va_start_p)
12063 {
12064 tree fntype = TREE_TYPE (current_function_decl);
12065 int nargs = call_expr_nargs (exp);
12066 tree arg;
12067 /* There is good chance the current input_location points inside the
12068 definition of the va_start macro (perhaps on the token for
12069 builtin) in a system header, so warnings will not be emitted.
12070 Use the location in real source code. */
12071 source_location current_location =
12072 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12073 NULL);
12074
12075 if (!stdarg_p (fntype))
12076 {
12077 error ("%<va_start%> used in function with fixed args");
12078 return true;
12079 }
12080
12081 if (va_start_p)
12082 {
12083 if (va_start_p && (nargs != 2))
12084 {
12085 error ("wrong number of arguments to function %<va_start%>");
12086 return true;
12087 }
12088 arg = CALL_EXPR_ARG (exp, 1);
12089 }
12090 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12091 when we checked the arguments and if needed issued a warning. */
12092 else
12093 {
12094 if (nargs == 0)
12095 {
12096 /* Evidently an out of date version of <stdarg.h>; can't validate
12097 va_start's second argument, but can still work as intended. */
12098 warning_at (current_location,
12099 OPT_Wvarargs,
12100 "%<__builtin_next_arg%> called without an argument");
12101 return true;
12102 }
12103 else if (nargs > 1)
12104 {
12105 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12106 return true;
12107 }
12108 arg = CALL_EXPR_ARG (exp, 0);
12109 }
12110
12111 if (TREE_CODE (arg) == SSA_NAME)
12112 arg = SSA_NAME_VAR (arg);
12113
12114 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12115 or __builtin_next_arg (0) the first time we see it, after checking
12116 the arguments and if needed issuing a warning. */
12117 if (!integer_zerop (arg))
12118 {
12119 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12120
12121 /* Strip off all nops for the sake of the comparison. This
12122 is not quite the same as STRIP_NOPS. It does more.
12123 We must also strip off INDIRECT_EXPR for C++ reference
12124 parameters. */
12125 while (CONVERT_EXPR_P (arg)
12126 || TREE_CODE (arg) == INDIRECT_REF)
12127 arg = TREE_OPERAND (arg, 0);
12128 if (arg != last_parm)
12129 {
12130 /* FIXME: Sometimes with the tree optimizers we can get the
12131 not the last argument even though the user used the last
12132 argument. We just warn and set the arg to be the last
12133 argument so that we will get wrong-code because of
12134 it. */
12135 warning_at (current_location,
12136 OPT_Wvarargs,
12137 "second parameter of %<va_start%> not last named argument");
12138 }
12139
12140 /* Undefined by C99 7.15.1.4p4 (va_start):
12141 "If the parameter parmN is declared with the register storage
12142 class, with a function or array type, or with a type that is
12143 not compatible with the type that results after application of
12144 the default argument promotions, the behavior is undefined."
12145 */
12146 else if (DECL_REGISTER (arg))
12147 {
12148 warning_at (current_location,
12149 OPT_Wvarargs,
12150 "undefined behaviour when second parameter of "
12151 "%<va_start%> is declared with %<register%> storage");
12152 }
12153
12154 /* We want to verify the second parameter just once before the tree
12155 optimizers are run and then avoid keeping it in the tree,
12156 as otherwise we could warn even for correct code like:
12157 void foo (int i, ...)
12158 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12159 if (va_start_p)
12160 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12161 else
12162 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12163 }
12164 return false;
12165 }
12166
12167
12168 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
12169 ORIG may be null if this is a 2-argument call. We don't attempt to
12170 simplify calls with more than 3 arguments.
12171
12172 Return NULL_TREE if no simplification was possible, otherwise return the
12173 simplified form of the call as a tree. If IGNORED is true, it means that
12174 the caller does not use the returned value of the function. */
12175
12176 static tree
fold_builtin_sprintf(location_t loc,tree dest,tree fmt,tree orig,int ignored)12177 fold_builtin_sprintf (location_t loc, tree dest, tree fmt,
12178 tree orig, int ignored)
12179 {
12180 tree call, retval;
12181 const char *fmt_str = NULL;
12182
12183 /* Verify the required arguments in the original call. We deal with two
12184 types of sprintf() calls: 'sprintf (str, fmt)' and
12185 'sprintf (dest, "%s", orig)'. */
12186 if (!validate_arg (dest, POINTER_TYPE)
12187 || !validate_arg (fmt, POINTER_TYPE))
12188 return NULL_TREE;
12189 if (orig && !validate_arg (orig, POINTER_TYPE))
12190 return NULL_TREE;
12191
12192 /* Check whether the format is a literal string constant. */
12193 fmt_str = c_getstr (fmt);
12194 if (fmt_str == NULL)
12195 return NULL_TREE;
12196
12197 call = NULL_TREE;
12198 retval = NULL_TREE;
12199
12200 if (!init_target_chars ())
12201 return NULL_TREE;
12202
12203 /* If the format doesn't contain % args or %%, use strcpy. */
12204 if (strchr (fmt_str, target_percent) == NULL)
12205 {
12206 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12207
12208 if (!fn)
12209 return NULL_TREE;
12210
12211 /* Don't optimize sprintf (buf, "abc", ptr++). */
12212 if (orig)
12213 return NULL_TREE;
12214
12215 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
12216 'format' is known to contain no % formats. */
12217 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12218 if (!ignored)
12219 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12220 }
12221
12222 /* If the format is "%s", use strcpy if the result isn't used. */
12223 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12224 {
12225 tree fn;
12226 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12227
12228 if (!fn)
12229 return NULL_TREE;
12230
12231 /* Don't crash on sprintf (str1, "%s"). */
12232 if (!orig)
12233 return NULL_TREE;
12234
12235 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
12236 if (!ignored)
12237 {
12238 retval = c_strlen (orig, 1);
12239 if (!retval || TREE_CODE (retval) != INTEGER_CST)
12240 return NULL_TREE;
12241 }
12242 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12243 }
12244
12245 if (call && retval)
12246 {
12247 retval = fold_convert_loc
12248 (loc, TREE_TYPE (TREE_TYPE (builtin_decl_implicit (BUILT_IN_SPRINTF))),
12249 retval);
12250 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12251 }
12252 else
12253 return call;
12254 }
12255
12256 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
12257 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
12258 attempt to simplify calls with more than 4 arguments.
12259
12260 Return NULL_TREE if no simplification was possible, otherwise return the
12261 simplified form of the call as a tree. If IGNORED is true, it means that
12262 the caller does not use the returned value of the function. */
12263
12264 static tree
fold_builtin_snprintf(location_t loc,tree dest,tree destsize,tree fmt,tree orig,int ignored)12265 fold_builtin_snprintf (location_t loc, tree dest, tree destsize, tree fmt,
12266 tree orig, int ignored)
12267 {
12268 tree call, retval;
12269 const char *fmt_str = NULL;
12270 unsigned HOST_WIDE_INT destlen;
12271
12272 /* Verify the required arguments in the original call. We deal with two
12273 types of snprintf() calls: 'snprintf (str, cst, fmt)' and
12274 'snprintf (dest, cst, "%s", orig)'. */
12275 if (!validate_arg (dest, POINTER_TYPE)
12276 || !validate_arg (destsize, INTEGER_TYPE)
12277 || !validate_arg (fmt, POINTER_TYPE))
12278 return NULL_TREE;
12279 if (orig && !validate_arg (orig, POINTER_TYPE))
12280 return NULL_TREE;
12281
12282 if (!tree_fits_uhwi_p (destsize))
12283 return NULL_TREE;
12284
12285 /* Check whether the format is a literal string constant. */
12286 fmt_str = c_getstr (fmt);
12287 if (fmt_str == NULL)
12288 return NULL_TREE;
12289
12290 call = NULL_TREE;
12291 retval = NULL_TREE;
12292
12293 if (!init_target_chars ())
12294 return NULL_TREE;
12295
12296 destlen = tree_to_uhwi (destsize);
12297
12298 /* If the format doesn't contain % args or %%, use strcpy. */
12299 if (strchr (fmt_str, target_percent) == NULL)
12300 {
12301 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12302 size_t len = strlen (fmt_str);
12303
12304 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
12305 if (orig)
12306 return NULL_TREE;
12307
12308 /* We could expand this as
12309 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
12310 or to
12311 memcpy (str, fmt_with_nul_at_cstm1, cst);
12312 but in the former case that might increase code size
12313 and in the latter case grow .rodata section too much.
12314 So punt for now. */
12315 if (len >= destlen)
12316 return NULL_TREE;
12317
12318 if (!fn)
12319 return NULL_TREE;
12320
12321 /* Convert snprintf (str, cst, fmt) into strcpy (str, fmt) when
12322 'format' is known to contain no % formats and
12323 strlen (fmt) < cst. */
12324 call = build_call_expr_loc (loc, fn, 2, dest, fmt);
12325
12326 if (!ignored)
12327 retval = build_int_cst (integer_type_node, strlen (fmt_str));
12328 }
12329
12330 /* If the format is "%s", use strcpy if the result isn't used. */
12331 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
12332 {
12333 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
12334 unsigned HOST_WIDE_INT origlen;
12335
12336 /* Don't crash on snprintf (str1, cst, "%s"). */
12337 if (!orig)
12338 return NULL_TREE;
12339
12340 retval = c_strlen (orig, 1);
12341 if (!retval || !tree_fits_uhwi_p (retval))
12342 return NULL_TREE;
12343
12344 origlen = tree_to_uhwi (retval);
12345 /* We could expand this as
12346 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
12347 or to
12348 memcpy (str1, str2_with_nul_at_cstm1, cst);
12349 but in the former case that might increase code size
12350 and in the latter case grow .rodata section too much.
12351 So punt for now. */
12352 if (origlen >= destlen)
12353 return NULL_TREE;
12354
12355 /* Convert snprintf (str1, cst, "%s", str2) into
12356 strcpy (str1, str2) if strlen (str2) < cst. */
12357 if (!fn)
12358 return NULL_TREE;
12359
12360 call = build_call_expr_loc (loc, fn, 2, dest, orig);
12361
12362 if (ignored)
12363 retval = NULL_TREE;
12364 }
12365
12366 if (call && retval)
12367 {
12368 tree fn = builtin_decl_explicit (BUILT_IN_SNPRINTF);
12369 retval = fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fn)), retval);
12370 return build2 (COMPOUND_EXPR, TREE_TYPE (retval), call, retval);
12371 }
12372 else
12373 return call;
12374 }
12375
12376 /* Expand a call EXP to __builtin_object_size. */
12377
12378 rtx
expand_builtin_object_size(tree exp)12379 expand_builtin_object_size (tree exp)
12380 {
12381 tree ost;
12382 int object_size_type;
12383 tree fndecl = get_callee_fndecl (exp);
12384
12385 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12386 {
12387 error ("%Kfirst argument of %D must be a pointer, second integer constant",
12388 exp, fndecl);
12389 expand_builtin_trap ();
12390 return const0_rtx;
12391 }
12392
12393 ost = CALL_EXPR_ARG (exp, 1);
12394 STRIP_NOPS (ost);
12395
12396 if (TREE_CODE (ost) != INTEGER_CST
12397 || tree_int_cst_sgn (ost) < 0
12398 || compare_tree_int (ost, 3) > 0)
12399 {
12400 error ("%Klast argument of %D is not integer constant between 0 and 3",
12401 exp, fndecl);
12402 expand_builtin_trap ();
12403 return const0_rtx;
12404 }
12405
12406 object_size_type = tree_to_shwi (ost);
12407
12408 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12409 }
12410
12411 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12412 FCODE is the BUILT_IN_* to use.
12413 Return NULL_RTX if we failed; the caller should emit a normal call,
12414 otherwise try to get the result in TARGET, if convenient (and in
12415 mode MODE if that's convenient). */
12416
12417 static rtx
expand_builtin_memory_chk(tree exp,rtx target,enum machine_mode mode,enum built_in_function fcode)12418 expand_builtin_memory_chk (tree exp, rtx target, enum machine_mode mode,
12419 enum built_in_function fcode)
12420 {
12421 tree dest, src, len, size;
12422
12423 if (!validate_arglist (exp,
12424 POINTER_TYPE,
12425 fcode == BUILT_IN_MEMSET_CHK
12426 ? INTEGER_TYPE : POINTER_TYPE,
12427 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12428 return NULL_RTX;
12429
12430 dest = CALL_EXPR_ARG (exp, 0);
12431 src = CALL_EXPR_ARG (exp, 1);
12432 len = CALL_EXPR_ARG (exp, 2);
12433 size = CALL_EXPR_ARG (exp, 3);
12434
12435 if (! tree_fits_uhwi_p (size))
12436 return NULL_RTX;
12437
12438 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
12439 {
12440 tree fn;
12441
12442 if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
12443 {
12444 warning_at (tree_nonartificial_location (exp),
12445 0, "%Kcall to %D will always overflow destination buffer",
12446 exp, get_callee_fndecl (exp));
12447 return NULL_RTX;
12448 }
12449
12450 fn = NULL_TREE;
12451 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12452 mem{cpy,pcpy,move,set} is available. */
12453 switch (fcode)
12454 {
12455 case BUILT_IN_MEMCPY_CHK:
12456 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12457 break;
12458 case BUILT_IN_MEMPCPY_CHK:
12459 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12460 break;
12461 case BUILT_IN_MEMMOVE_CHK:
12462 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12463 break;
12464 case BUILT_IN_MEMSET_CHK:
12465 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12466 break;
12467 default:
12468 break;
12469 }
12470
12471 if (! fn)
12472 return NULL_RTX;
12473
12474 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12475 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12476 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12477 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12478 }
12479 else if (fcode == BUILT_IN_MEMSET_CHK)
12480 return NULL_RTX;
12481 else
12482 {
12483 unsigned int dest_align = get_pointer_alignment (dest);
12484
12485 /* If DEST is not a pointer type, call the normal function. */
12486 if (dest_align == 0)
12487 return NULL_RTX;
12488
12489 /* If SRC and DEST are the same (and not volatile), do nothing. */
12490 if (operand_equal_p (src, dest, 0))
12491 {
12492 tree expr;
12493
12494 if (fcode != BUILT_IN_MEMPCPY_CHK)
12495 {
12496 /* Evaluate and ignore LEN in case it has side-effects. */
12497 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
12498 return expand_expr (dest, target, mode, EXPAND_NORMAL);
12499 }
12500
12501 expr = fold_build_pointer_plus (dest, len);
12502 return expand_expr (expr, target, mode, EXPAND_NORMAL);
12503 }
12504
12505 /* __memmove_chk special case. */
12506 if (fcode == BUILT_IN_MEMMOVE_CHK)
12507 {
12508 unsigned int src_align = get_pointer_alignment (src);
12509
12510 if (src_align == 0)
12511 return NULL_RTX;
12512
12513 /* If src is categorized for a readonly section we can use
12514 normal __memcpy_chk. */
12515 if (readonly_data_expr (src))
12516 {
12517 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12518 if (!fn)
12519 return NULL_RTX;
12520 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
12521 dest, src, len, size);
12522 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12523 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12524 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12525 }
12526 }
12527 return NULL_RTX;
12528 }
12529 }
12530
12531 /* Emit warning if a buffer overflow is detected at compile time. */
12532
12533 static void
maybe_emit_chk_warning(tree exp,enum built_in_function fcode)12534 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
12535 {
12536 int is_strlen = 0;
12537 tree len, size;
12538 location_t loc = tree_nonartificial_location (exp);
12539
12540 switch (fcode)
12541 {
12542 case BUILT_IN_STRCPY_CHK:
12543 case BUILT_IN_STPCPY_CHK:
12544 /* For __strcat_chk the warning will be emitted only if overflowing
12545 by at least strlen (dest) + 1 bytes. */
12546 case BUILT_IN_STRCAT_CHK:
12547 len = CALL_EXPR_ARG (exp, 1);
12548 size = CALL_EXPR_ARG (exp, 2);
12549 is_strlen = 1;
12550 break;
12551 case BUILT_IN_STRNCAT_CHK:
12552 case BUILT_IN_STRNCPY_CHK:
12553 case BUILT_IN_STPNCPY_CHK:
12554 len = CALL_EXPR_ARG (exp, 2);
12555 size = CALL_EXPR_ARG (exp, 3);
12556 break;
12557 case BUILT_IN_SNPRINTF_CHK:
12558 case BUILT_IN_VSNPRINTF_CHK:
12559 len = CALL_EXPR_ARG (exp, 1);
12560 size = CALL_EXPR_ARG (exp, 3);
12561 break;
12562 default:
12563 gcc_unreachable ();
12564 }
12565
12566 if (!len || !size)
12567 return;
12568
12569 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12570 return;
12571
12572 if (is_strlen)
12573 {
12574 len = c_strlen (len, 1);
12575 if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12576 return;
12577 }
12578 else if (fcode == BUILT_IN_STRNCAT_CHK)
12579 {
12580 tree src = CALL_EXPR_ARG (exp, 1);
12581 if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
12582 return;
12583 src = c_strlen (src, 1);
12584 if (! src || ! tree_fits_uhwi_p (src))
12585 {
12586 warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
12587 exp, get_callee_fndecl (exp));
12588 return;
12589 }
12590 else if (tree_int_cst_lt (src, size))
12591 return;
12592 }
12593 else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
12594 return;
12595
12596 warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
12597 exp, get_callee_fndecl (exp));
12598 }
12599
12600 /* Emit warning if a buffer overflow is detected at compile time
12601 in __sprintf_chk/__vsprintf_chk calls. */
12602
12603 static void
maybe_emit_sprintf_chk_warning(tree exp,enum built_in_function fcode)12604 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
12605 {
12606 tree size, len, fmt;
12607 const char *fmt_str;
12608 int nargs = call_expr_nargs (exp);
12609
12610 /* Verify the required arguments in the original call. */
12611
12612 if (nargs < 4)
12613 return;
12614 size = CALL_EXPR_ARG (exp, 2);
12615 fmt = CALL_EXPR_ARG (exp, 3);
12616
12617 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
12618 return;
12619
12620 /* Check whether the format is a literal string constant. */
12621 fmt_str = c_getstr (fmt);
12622 if (fmt_str == NULL)
12623 return;
12624
12625 if (!init_target_chars ())
12626 return;
12627
12628 /* If the format doesn't contain % args or %%, we know its size. */
12629 if (strchr (fmt_str, target_percent) == 0)
12630 len = build_int_cstu (size_type_node, strlen (fmt_str));
12631 /* If the format is "%s" and first ... argument is a string literal,
12632 we know it too. */
12633 else if (fcode == BUILT_IN_SPRINTF_CHK
12634 && strcmp (fmt_str, target_percent_s) == 0)
12635 {
12636 tree arg;
12637
12638 if (nargs < 5)
12639 return;
12640 arg = CALL_EXPR_ARG (exp, 4);
12641 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
12642 return;
12643
12644 len = c_strlen (arg, 1);
12645 if (!len || ! tree_fits_uhwi_p (len))
12646 return;
12647 }
12648 else
12649 return;
12650
12651 if (! tree_int_cst_lt (len, size))
12652 warning_at (tree_nonartificial_location (exp),
12653 0, "%Kcall to %D will always overflow destination buffer",
12654 exp, get_callee_fndecl (exp));
12655 }
12656
12657 /* Emit warning if a free is called with address of a variable. */
12658
12659 static void
maybe_emit_free_warning(tree exp)12660 maybe_emit_free_warning (tree exp)
12661 {
12662 tree arg = CALL_EXPR_ARG (exp, 0);
12663
12664 STRIP_NOPS (arg);
12665 if (TREE_CODE (arg) != ADDR_EXPR)
12666 return;
12667
12668 arg = get_base_address (TREE_OPERAND (arg, 0));
12669 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
12670 return;
12671
12672 if (SSA_VAR_P (arg))
12673 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12674 "%Kattempt to free a non-heap object %qD", exp, arg);
12675 else
12676 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
12677 "%Kattempt to free a non-heap object", exp);
12678 }
12679
12680 /* Fold a call to __builtin_object_size with arguments PTR and OST,
12681 if possible. */
12682
12683 tree
fold_builtin_object_size(tree ptr,tree ost)12684 fold_builtin_object_size (tree ptr, tree ost)
12685 {
12686 unsigned HOST_WIDE_INT bytes;
12687 int object_size_type;
12688
12689 if (!validate_arg (ptr, POINTER_TYPE)
12690 || !validate_arg (ost, INTEGER_TYPE))
12691 return NULL_TREE;
12692
12693 STRIP_NOPS (ost);
12694
12695 if (TREE_CODE (ost) != INTEGER_CST
12696 || tree_int_cst_sgn (ost) < 0
12697 || compare_tree_int (ost, 3) > 0)
12698 return NULL_TREE;
12699
12700 object_size_type = tree_to_shwi (ost);
12701
12702 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
12703 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
12704 and (size_t) 0 for types 2 and 3. */
12705 if (TREE_SIDE_EFFECTS (ptr))
12706 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
12707
12708 if (TREE_CODE (ptr) == ADDR_EXPR)
12709 {
12710 bytes = compute_builtin_object_size (ptr, object_size_type);
12711 if (double_int_fits_to_tree_p (size_type_node,
12712 double_int::from_uhwi (bytes)))
12713 return build_int_cstu (size_type_node, bytes);
12714 }
12715 else if (TREE_CODE (ptr) == SSA_NAME)
12716 {
12717 /* If object size is not known yet, delay folding until
12718 later. Maybe subsequent passes will help determining
12719 it. */
12720 bytes = compute_builtin_object_size (ptr, object_size_type);
12721 if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
12722 && double_int_fits_to_tree_p (size_type_node,
12723 double_int::from_uhwi (bytes)))
12724 return build_int_cstu (size_type_node, bytes);
12725 }
12726
12727 return NULL_TREE;
12728 }
12729
12730 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12731 DEST, SRC, LEN, and SIZE are the arguments to the call.
12732 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
12733 code of the builtin. If MAXLEN is not NULL, it is maximum length
12734 passed as third argument. */
12735
12736 tree
fold_builtin_memory_chk(location_t loc,tree fndecl,tree dest,tree src,tree len,tree size,tree maxlen,bool ignore,enum built_in_function fcode)12737 fold_builtin_memory_chk (location_t loc, tree fndecl,
12738 tree dest, tree src, tree len, tree size,
12739 tree maxlen, bool ignore,
12740 enum built_in_function fcode)
12741 {
12742 tree fn;
12743
12744 if (!validate_arg (dest, POINTER_TYPE)
12745 || !validate_arg (src,
12746 (fcode == BUILT_IN_MEMSET_CHK
12747 ? INTEGER_TYPE : POINTER_TYPE))
12748 || !validate_arg (len, INTEGER_TYPE)
12749 || !validate_arg (size, INTEGER_TYPE))
12750 return NULL_TREE;
12751
12752 /* If SRC and DEST are the same (and not volatile), return DEST
12753 (resp. DEST+LEN for __mempcpy_chk). */
12754 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
12755 {
12756 if (fcode != BUILT_IN_MEMPCPY_CHK)
12757 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12758 dest, len);
12759 else
12760 {
12761 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
12762 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), temp);
12763 }
12764 }
12765
12766 if (! tree_fits_uhwi_p (size))
12767 return NULL_TREE;
12768
12769 if (! integer_all_onesp (size))
12770 {
12771 if (! tree_fits_uhwi_p (len))
12772 {
12773 /* If LEN is not constant, try MAXLEN too.
12774 For MAXLEN only allow optimizing into non-_ocs function
12775 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12776 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12777 {
12778 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
12779 {
12780 /* (void) __mempcpy_chk () can be optimized into
12781 (void) __memcpy_chk (). */
12782 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12783 if (!fn)
12784 return NULL_TREE;
12785
12786 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12787 }
12788 return NULL_TREE;
12789 }
12790 }
12791 else
12792 maxlen = len;
12793
12794 if (tree_int_cst_lt (size, maxlen))
12795 return NULL_TREE;
12796 }
12797
12798 fn = NULL_TREE;
12799 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12800 mem{cpy,pcpy,move,set} is available. */
12801 switch (fcode)
12802 {
12803 case BUILT_IN_MEMCPY_CHK:
12804 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12805 break;
12806 case BUILT_IN_MEMPCPY_CHK:
12807 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12808 break;
12809 case BUILT_IN_MEMMOVE_CHK:
12810 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12811 break;
12812 case BUILT_IN_MEMSET_CHK:
12813 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12814 break;
12815 default:
12816 break;
12817 }
12818
12819 if (!fn)
12820 return NULL_TREE;
12821
12822 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12823 }
12824
12825 /* Fold a call to the __st[rp]cpy_chk builtin.
12826 DEST, SRC, and SIZE are the arguments to the call.
12827 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
12828 code of the builtin. If MAXLEN is not NULL, it is maximum length of
12829 strings passed as second argument. */
12830
12831 tree
fold_builtin_stxcpy_chk(location_t loc,tree fndecl,tree dest,tree src,tree size,tree maxlen,bool ignore,enum built_in_function fcode)12832 fold_builtin_stxcpy_chk (location_t loc, tree fndecl, tree dest,
12833 tree src, tree size,
12834 tree maxlen, bool ignore,
12835 enum built_in_function fcode)
12836 {
12837 tree len, fn;
12838
12839 if (!validate_arg (dest, POINTER_TYPE)
12840 || !validate_arg (src, POINTER_TYPE)
12841 || !validate_arg (size, INTEGER_TYPE))
12842 return NULL_TREE;
12843
12844 /* If SRC and DEST are the same (and not volatile), return DEST. */
12845 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
12846 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest);
12847
12848 if (! tree_fits_uhwi_p (size))
12849 return NULL_TREE;
12850
12851 if (! integer_all_onesp (size))
12852 {
12853 len = c_strlen (src, 1);
12854 if (! len || ! tree_fits_uhwi_p (len))
12855 {
12856 /* If LEN is not constant, try MAXLEN too.
12857 For MAXLEN only allow optimizing into non-_ocs function
12858 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12859 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12860 {
12861 if (fcode == BUILT_IN_STPCPY_CHK)
12862 {
12863 if (! ignore)
12864 return NULL_TREE;
12865
12866 /* If return value of __stpcpy_chk is ignored,
12867 optimize into __strcpy_chk. */
12868 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
12869 if (!fn)
12870 return NULL_TREE;
12871
12872 return build_call_expr_loc (loc, fn, 3, dest, src, size);
12873 }
12874
12875 if (! len || TREE_SIDE_EFFECTS (len))
12876 return NULL_TREE;
12877
12878 /* If c_strlen returned something, but not a constant,
12879 transform __strcpy_chk into __memcpy_chk. */
12880 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
12881 if (!fn)
12882 return NULL_TREE;
12883
12884 len = fold_convert_loc (loc, size_type_node, len);
12885 len = size_binop_loc (loc, PLUS_EXPR, len,
12886 build_int_cst (size_type_node, 1));
12887 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)),
12888 build_call_expr_loc (loc, fn, 4,
12889 dest, src, len, size));
12890 }
12891 }
12892 else
12893 maxlen = len;
12894
12895 if (! tree_int_cst_lt (maxlen, size))
12896 return NULL_TREE;
12897 }
12898
12899 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
12900 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
12901 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
12902 if (!fn)
12903 return NULL_TREE;
12904
12905 return build_call_expr_loc (loc, fn, 2, dest, src);
12906 }
12907
12908 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
12909 are the arguments to the call. If MAXLEN is not NULL, it is maximum
12910 length passed as third argument. IGNORE is true if return value can be
12911 ignored. FCODE is the BUILT_IN_* code of the builtin. */
12912
12913 tree
fold_builtin_stxncpy_chk(location_t loc,tree dest,tree src,tree len,tree size,tree maxlen,bool ignore,enum built_in_function fcode)12914 fold_builtin_stxncpy_chk (location_t loc, tree dest, tree src,
12915 tree len, tree size, tree maxlen, bool ignore,
12916 enum built_in_function fcode)
12917 {
12918 tree fn;
12919
12920 if (!validate_arg (dest, POINTER_TYPE)
12921 || !validate_arg (src, POINTER_TYPE)
12922 || !validate_arg (len, INTEGER_TYPE)
12923 || !validate_arg (size, INTEGER_TYPE))
12924 return NULL_TREE;
12925
12926 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
12927 {
12928 /* If return value of __stpncpy_chk is ignored,
12929 optimize into __strncpy_chk. */
12930 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
12931 if (fn)
12932 return build_call_expr_loc (loc, fn, 4, dest, src, len, size);
12933 }
12934
12935 if (! tree_fits_uhwi_p (size))
12936 return NULL_TREE;
12937
12938 if (! integer_all_onesp (size))
12939 {
12940 if (! tree_fits_uhwi_p (len))
12941 {
12942 /* If LEN is not constant, try MAXLEN too.
12943 For MAXLEN only allow optimizing into non-_ocs function
12944 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
12945 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
12946 return NULL_TREE;
12947 }
12948 else
12949 maxlen = len;
12950
12951 if (tree_int_cst_lt (size, maxlen))
12952 return NULL_TREE;
12953 }
12954
12955 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
12956 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
12957 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
12958 if (!fn)
12959 return NULL_TREE;
12960
12961 return build_call_expr_loc (loc, fn, 3, dest, src, len);
12962 }
12963
12964 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
12965 are the arguments to the call. */
12966
12967 static tree
fold_builtin_strcat_chk(location_t loc,tree fndecl,tree dest,tree src,tree size)12968 fold_builtin_strcat_chk (location_t loc, tree fndecl, tree dest,
12969 tree src, tree size)
12970 {
12971 tree fn;
12972 const char *p;
12973
12974 if (!validate_arg (dest, POINTER_TYPE)
12975 || !validate_arg (src, POINTER_TYPE)
12976 || !validate_arg (size, INTEGER_TYPE))
12977 return NULL_TREE;
12978
12979 p = c_getstr (src);
12980 /* If the SRC parameter is "", return DEST. */
12981 if (p && *p == '\0')
12982 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
12983
12984 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
12985 return NULL_TREE;
12986
12987 /* If __builtin_strcat_chk is used, assume strcat is available. */
12988 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
12989 if (!fn)
12990 return NULL_TREE;
12991
12992 return build_call_expr_loc (loc, fn, 2, dest, src);
12993 }
12994
12995 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
12996 LEN, and SIZE. */
12997
12998 static tree
fold_builtin_strncat_chk(location_t loc,tree fndecl,tree dest,tree src,tree len,tree size)12999 fold_builtin_strncat_chk (location_t loc, tree fndecl,
13000 tree dest, tree src, tree len, tree size)
13001 {
13002 tree fn;
13003 const char *p;
13004
13005 if (!validate_arg (dest, POINTER_TYPE)
13006 || !validate_arg (src, POINTER_TYPE)
13007 || !validate_arg (size, INTEGER_TYPE)
13008 || !validate_arg (size, INTEGER_TYPE))
13009 return NULL_TREE;
13010
13011 p = c_getstr (src);
13012 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
13013 if (p && *p == '\0')
13014 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
13015 else if (integer_zerop (len))
13016 return omit_one_operand_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), dest, src);
13017
13018 if (! tree_fits_uhwi_p (size))
13019 return NULL_TREE;
13020
13021 if (! integer_all_onesp (size))
13022 {
13023 tree src_len = c_strlen (src, 1);
13024 if (src_len
13025 && tree_fits_uhwi_p (src_len)
13026 && tree_fits_uhwi_p (len)
13027 && ! tree_int_cst_lt (len, src_len))
13028 {
13029 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
13030 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
13031 if (!fn)
13032 return NULL_TREE;
13033
13034 return build_call_expr_loc (loc, fn, 3, dest, src, size);
13035 }
13036 return NULL_TREE;
13037 }
13038
13039 /* If __builtin_strncat_chk is used, assume strncat is available. */
13040 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
13041 if (!fn)
13042 return NULL_TREE;
13043
13044 return build_call_expr_loc (loc, fn, 3, dest, src, len);
13045 }
13046
13047 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
13048 Return NULL_TREE if a normal call should be emitted rather than
13049 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
13050 or BUILT_IN_VSPRINTF_CHK. */
13051
13052 static tree
fold_builtin_sprintf_chk_1(location_t loc,int nargs,tree * args,enum built_in_function fcode)13053 fold_builtin_sprintf_chk_1 (location_t loc, int nargs, tree *args,
13054 enum built_in_function fcode)
13055 {
13056 tree dest, size, len, fn, fmt, flag;
13057 const char *fmt_str;
13058
13059 /* Verify the required arguments in the original call. */
13060 if (nargs < 4)
13061 return NULL_TREE;
13062 dest = args[0];
13063 if (!validate_arg (dest, POINTER_TYPE))
13064 return NULL_TREE;
13065 flag = args[1];
13066 if (!validate_arg (flag, INTEGER_TYPE))
13067 return NULL_TREE;
13068 size = args[2];
13069 if (!validate_arg (size, INTEGER_TYPE))
13070 return NULL_TREE;
13071 fmt = args[3];
13072 if (!validate_arg (fmt, POINTER_TYPE))
13073 return NULL_TREE;
13074
13075 if (! tree_fits_uhwi_p (size))
13076 return NULL_TREE;
13077
13078 len = NULL_TREE;
13079
13080 if (!init_target_chars ())
13081 return NULL_TREE;
13082
13083 /* Check whether the format is a literal string constant. */
13084 fmt_str = c_getstr (fmt);
13085 if (fmt_str != NULL)
13086 {
13087 /* If the format doesn't contain % args or %%, we know the size. */
13088 if (strchr (fmt_str, target_percent) == 0)
13089 {
13090 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
13091 len = build_int_cstu (size_type_node, strlen (fmt_str));
13092 }
13093 /* If the format is "%s" and first ... argument is a string literal,
13094 we know the size too. */
13095 else if (fcode == BUILT_IN_SPRINTF_CHK
13096 && strcmp (fmt_str, target_percent_s) == 0)
13097 {
13098 tree arg;
13099
13100 if (nargs == 5)
13101 {
13102 arg = args[4];
13103 if (validate_arg (arg, POINTER_TYPE))
13104 {
13105 len = c_strlen (arg, 1);
13106 if (! len || ! tree_fits_uhwi_p (len))
13107 len = NULL_TREE;
13108 }
13109 }
13110 }
13111 }
13112
13113 if (! integer_all_onesp (size))
13114 {
13115 if (! len || ! tree_int_cst_lt (len, size))
13116 return NULL_TREE;
13117 }
13118
13119 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
13120 or if format doesn't contain % chars or is "%s". */
13121 if (! integer_zerop (flag))
13122 {
13123 if (fmt_str == NULL)
13124 return NULL_TREE;
13125 if (strchr (fmt_str, target_percent) != NULL
13126 && strcmp (fmt_str, target_percent_s))
13127 return NULL_TREE;
13128 }
13129
13130 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
13131 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
13132 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
13133 if (!fn)
13134 return NULL_TREE;
13135
13136 return rewrite_call_expr_array (loc, nargs, args, 4, fn, 2, dest, fmt);
13137 }
13138
13139 /* Fold a call EXP to __{,v}sprintf_chk. Return NULL_TREE if
13140 a normal call should be emitted rather than expanding the function
13141 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
13142
13143 static tree
fold_builtin_sprintf_chk(location_t loc,tree exp,enum built_in_function fcode)13144 fold_builtin_sprintf_chk (location_t loc, tree exp,
13145 enum built_in_function fcode)
13146 {
13147 return fold_builtin_sprintf_chk_1 (loc, call_expr_nargs (exp),
13148 CALL_EXPR_ARGP (exp), fcode);
13149 }
13150
13151 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
13152 NULL_TREE if a normal call should be emitted rather than expanding
13153 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13154 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13155 passed as second argument. */
13156
13157 static tree
fold_builtin_snprintf_chk_1(location_t loc,int nargs,tree * args,tree maxlen,enum built_in_function fcode)13158 fold_builtin_snprintf_chk_1 (location_t loc, int nargs, tree *args,
13159 tree maxlen, enum built_in_function fcode)
13160 {
13161 tree dest, size, len, fn, fmt, flag;
13162 const char *fmt_str;
13163
13164 /* Verify the required arguments in the original call. */
13165 if (nargs < 5)
13166 return NULL_TREE;
13167 dest = args[0];
13168 if (!validate_arg (dest, POINTER_TYPE))
13169 return NULL_TREE;
13170 len = args[1];
13171 if (!validate_arg (len, INTEGER_TYPE))
13172 return NULL_TREE;
13173 flag = args[2];
13174 if (!validate_arg (flag, INTEGER_TYPE))
13175 return NULL_TREE;
13176 size = args[3];
13177 if (!validate_arg (size, INTEGER_TYPE))
13178 return NULL_TREE;
13179 fmt = args[4];
13180 if (!validate_arg (fmt, POINTER_TYPE))
13181 return NULL_TREE;
13182
13183 if (! tree_fits_uhwi_p (size))
13184 return NULL_TREE;
13185
13186 if (! integer_all_onesp (size))
13187 {
13188 if (! tree_fits_uhwi_p (len))
13189 {
13190 /* If LEN is not constant, try MAXLEN too.
13191 For MAXLEN only allow optimizing into non-_ocs function
13192 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
13193 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
13194 return NULL_TREE;
13195 }
13196 else
13197 maxlen = len;
13198
13199 if (tree_int_cst_lt (size, maxlen))
13200 return NULL_TREE;
13201 }
13202
13203 if (!init_target_chars ())
13204 return NULL_TREE;
13205
13206 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
13207 or if format doesn't contain % chars or is "%s". */
13208 if (! integer_zerop (flag))
13209 {
13210 fmt_str = c_getstr (fmt);
13211 if (fmt_str == NULL)
13212 return NULL_TREE;
13213 if (strchr (fmt_str, target_percent) != NULL
13214 && strcmp (fmt_str, target_percent_s))
13215 return NULL_TREE;
13216 }
13217
13218 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
13219 available. */
13220 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
13221 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
13222 if (!fn)
13223 return NULL_TREE;
13224
13225 return rewrite_call_expr_array (loc, nargs, args, 5, fn, 3, dest, len, fmt);
13226 }
13227
13228 /* Fold a call EXP to {,v}snprintf. Return NULL_TREE if
13229 a normal call should be emitted rather than expanding the function
13230 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
13231 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
13232 passed as second argument. */
13233
13234 static tree
fold_builtin_snprintf_chk(location_t loc,tree exp,tree maxlen,enum built_in_function fcode)13235 fold_builtin_snprintf_chk (location_t loc, tree exp, tree maxlen,
13236 enum built_in_function fcode)
13237 {
13238 return fold_builtin_snprintf_chk_1 (loc, call_expr_nargs (exp),
13239 CALL_EXPR_ARGP (exp), maxlen, fcode);
13240 }
13241
13242 /* Builtins with folding operations that operate on "..." arguments
13243 need special handling; we need to store the arguments in a convenient
13244 data structure before attempting any folding. Fortunately there are
13245 only a few builtins that fall into this category. FNDECL is the
13246 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
13247 result of the function call is ignored. */
13248
13249 static tree
fold_builtin_varargs(location_t loc,tree fndecl,tree exp,bool ignore ATTRIBUTE_UNUSED)13250 fold_builtin_varargs (location_t loc, tree fndecl, tree exp,
13251 bool ignore ATTRIBUTE_UNUSED)
13252 {
13253 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13254 tree ret = NULL_TREE;
13255
13256 switch (fcode)
13257 {
13258 case BUILT_IN_SPRINTF_CHK:
13259 case BUILT_IN_VSPRINTF_CHK:
13260 ret = fold_builtin_sprintf_chk (loc, exp, fcode);
13261 break;
13262
13263 case BUILT_IN_SNPRINTF_CHK:
13264 case BUILT_IN_VSNPRINTF_CHK:
13265 ret = fold_builtin_snprintf_chk (loc, exp, NULL_TREE, fcode);
13266 break;
13267
13268 case BUILT_IN_FPCLASSIFY:
13269 ret = fold_builtin_fpclassify (loc, exp);
13270 break;
13271
13272 default:
13273 break;
13274 }
13275 if (ret)
13276 {
13277 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13278 SET_EXPR_LOCATION (ret, loc);
13279 TREE_NO_WARNING (ret) = 1;
13280 return ret;
13281 }
13282 return NULL_TREE;
13283 }
13284
13285 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
13286 FMT and ARG are the arguments to the call; we don't fold cases with
13287 more than 2 arguments, and ARG may be null if this is a 1-argument case.
13288
13289 Return NULL_TREE if no simplification was possible, otherwise return the
13290 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13291 code of the function to be simplified. */
13292
13293 static tree
fold_builtin_printf(location_t loc,tree fndecl,tree fmt,tree arg,bool ignore,enum built_in_function fcode)13294 fold_builtin_printf (location_t loc, tree fndecl, tree fmt,
13295 tree arg, bool ignore,
13296 enum built_in_function fcode)
13297 {
13298 tree fn_putchar, fn_puts, newarg, call = NULL_TREE;
13299 const char *fmt_str = NULL;
13300
13301 /* If the return value is used, don't do the transformation. */
13302 if (! ignore)
13303 return NULL_TREE;
13304
13305 /* Verify the required arguments in the original call. */
13306 if (!validate_arg (fmt, POINTER_TYPE))
13307 return NULL_TREE;
13308
13309 /* Check whether the format is a literal string constant. */
13310 fmt_str = c_getstr (fmt);
13311 if (fmt_str == NULL)
13312 return NULL_TREE;
13313
13314 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
13315 {
13316 /* If we're using an unlocked function, assume the other
13317 unlocked functions exist explicitly. */
13318 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
13319 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
13320 }
13321 else
13322 {
13323 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
13324 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
13325 }
13326
13327 if (!init_target_chars ())
13328 return NULL_TREE;
13329
13330 if (strcmp (fmt_str, target_percent_s) == 0
13331 || strchr (fmt_str, target_percent) == NULL)
13332 {
13333 const char *str;
13334
13335 if (strcmp (fmt_str, target_percent_s) == 0)
13336 {
13337 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13338 return NULL_TREE;
13339
13340 if (!arg || !validate_arg (arg, POINTER_TYPE))
13341 return NULL_TREE;
13342
13343 str = c_getstr (arg);
13344 if (str == NULL)
13345 return NULL_TREE;
13346 }
13347 else
13348 {
13349 /* The format specifier doesn't contain any '%' characters. */
13350 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
13351 && arg)
13352 return NULL_TREE;
13353 str = fmt_str;
13354 }
13355
13356 /* If the string was "", printf does nothing. */
13357 if (str[0] == '\0')
13358 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13359
13360 /* If the string has length of 1, call putchar. */
13361 if (str[1] == '\0')
13362 {
13363 /* Given printf("c"), (where c is any one character,)
13364 convert "c"[0] to an int and pass that to the replacement
13365 function. */
13366 newarg = build_int_cst (integer_type_node, str[0]);
13367 if (fn_putchar)
13368 call = build_call_expr_loc (loc, fn_putchar, 1, newarg);
13369 }
13370 else
13371 {
13372 /* If the string was "string\n", call puts("string"). */
13373 size_t len = strlen (str);
13374 if ((unsigned char)str[len - 1] == target_newline
13375 && (size_t) (int) len == len
13376 && (int) len > 0)
13377 {
13378 char *newstr;
13379 tree offset_node, string_cst;
13380
13381 /* Create a NUL-terminated string that's one char shorter
13382 than the original, stripping off the trailing '\n'. */
13383 newarg = build_string_literal (len, str);
13384 string_cst = string_constant (newarg, &offset_node);
13385 gcc_checking_assert (string_cst
13386 && (TREE_STRING_LENGTH (string_cst)
13387 == (int) len)
13388 && integer_zerop (offset_node)
13389 && (unsigned char)
13390 TREE_STRING_POINTER (string_cst)[len - 1]
13391 == target_newline);
13392 /* build_string_literal creates a new STRING_CST,
13393 modify it in place to avoid double copying. */
13394 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
13395 newstr[len - 1] = '\0';
13396 if (fn_puts)
13397 call = build_call_expr_loc (loc, fn_puts, 1, newarg);
13398 }
13399 else
13400 /* We'd like to arrange to call fputs(string,stdout) here,
13401 but we need stdout and don't have a way to get it yet. */
13402 return NULL_TREE;
13403 }
13404 }
13405
13406 /* The other optimizations can be done only on the non-va_list variants. */
13407 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
13408 return NULL_TREE;
13409
13410 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
13411 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
13412 {
13413 if (!arg || !validate_arg (arg, POINTER_TYPE))
13414 return NULL_TREE;
13415 if (fn_puts)
13416 call = build_call_expr_loc (loc, fn_puts, 1, arg);
13417 }
13418
13419 /* If the format specifier was "%c", call __builtin_putchar(arg). */
13420 else if (strcmp (fmt_str, target_percent_c) == 0)
13421 {
13422 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13423 return NULL_TREE;
13424 if (fn_putchar)
13425 call = build_call_expr_loc (loc, fn_putchar, 1, arg);
13426 }
13427
13428 if (!call)
13429 return NULL_TREE;
13430
13431 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13432 }
13433
13434 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
13435 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
13436 more than 3 arguments, and ARG may be null in the 2-argument case.
13437
13438 Return NULL_TREE if no simplification was possible, otherwise return the
13439 simplified form of the call as a tree. FCODE is the BUILT_IN_*
13440 code of the function to be simplified. */
13441
13442 static tree
fold_builtin_fprintf(location_t loc,tree fndecl,tree fp,tree fmt,tree arg,bool ignore,enum built_in_function fcode)13443 fold_builtin_fprintf (location_t loc, tree fndecl, tree fp,
13444 tree fmt, tree arg, bool ignore,
13445 enum built_in_function fcode)
13446 {
13447 tree fn_fputc, fn_fputs, call = NULL_TREE;
13448 const char *fmt_str = NULL;
13449
13450 /* If the return value is used, don't do the transformation. */
13451 if (! ignore)
13452 return NULL_TREE;
13453
13454 /* Verify the required arguments in the original call. */
13455 if (!validate_arg (fp, POINTER_TYPE))
13456 return NULL_TREE;
13457 if (!validate_arg (fmt, POINTER_TYPE))
13458 return NULL_TREE;
13459
13460 /* Check whether the format is a literal string constant. */
13461 fmt_str = c_getstr (fmt);
13462 if (fmt_str == NULL)
13463 return NULL_TREE;
13464
13465 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
13466 {
13467 /* If we're using an unlocked function, assume the other
13468 unlocked functions exist explicitly. */
13469 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
13470 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
13471 }
13472 else
13473 {
13474 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
13475 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
13476 }
13477
13478 if (!init_target_chars ())
13479 return NULL_TREE;
13480
13481 /* If the format doesn't contain % args or %%, use strcpy. */
13482 if (strchr (fmt_str, target_percent) == NULL)
13483 {
13484 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
13485 && arg)
13486 return NULL_TREE;
13487
13488 /* If the format specifier was "", fprintf does nothing. */
13489 if (fmt_str[0] == '\0')
13490 {
13491 /* If FP has side-effects, just wait until gimplification is
13492 done. */
13493 if (TREE_SIDE_EFFECTS (fp))
13494 return NULL_TREE;
13495
13496 return build_int_cst (TREE_TYPE (TREE_TYPE (fndecl)), 0);
13497 }
13498
13499 /* When "string" doesn't contain %, replace all cases of
13500 fprintf (fp, string) with fputs (string, fp). The fputs
13501 builtin will take care of special cases like length == 1. */
13502 if (fn_fputs)
13503 call = build_call_expr_loc (loc, fn_fputs, 2, fmt, fp);
13504 }
13505
13506 /* The other optimizations can be done only on the non-va_list variants. */
13507 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
13508 return NULL_TREE;
13509
13510 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
13511 else if (strcmp (fmt_str, target_percent_s) == 0)
13512 {
13513 if (!arg || !validate_arg (arg, POINTER_TYPE))
13514 return NULL_TREE;
13515 if (fn_fputs)
13516 call = build_call_expr_loc (loc, fn_fputs, 2, arg, fp);
13517 }
13518
13519 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
13520 else if (strcmp (fmt_str, target_percent_c) == 0)
13521 {
13522 if (!arg || !validate_arg (arg, INTEGER_TYPE))
13523 return NULL_TREE;
13524 if (fn_fputc)
13525 call = build_call_expr_loc (loc, fn_fputc, 2, arg, fp);
13526 }
13527
13528 if (!call)
13529 return NULL_TREE;
13530 return fold_convert_loc (loc, TREE_TYPE (TREE_TYPE (fndecl)), call);
13531 }
13532
13533 /* Initialize format string characters in the target charset. */
13534
13535 static bool
init_target_chars(void)13536 init_target_chars (void)
13537 {
13538 static bool init;
13539 if (!init)
13540 {
13541 target_newline = lang_hooks.to_target_charset ('\n');
13542 target_percent = lang_hooks.to_target_charset ('%');
13543 target_c = lang_hooks.to_target_charset ('c');
13544 target_s = lang_hooks.to_target_charset ('s');
13545 if (target_newline == 0 || target_percent == 0 || target_c == 0
13546 || target_s == 0)
13547 return false;
13548
13549 target_percent_c[0] = target_percent;
13550 target_percent_c[1] = target_c;
13551 target_percent_c[2] = '\0';
13552
13553 target_percent_s[0] = target_percent;
13554 target_percent_s[1] = target_s;
13555 target_percent_s[2] = '\0';
13556
13557 target_percent_s_newline[0] = target_percent;
13558 target_percent_s_newline[1] = target_s;
13559 target_percent_s_newline[2] = target_newline;
13560 target_percent_s_newline[3] = '\0';
13561
13562 init = true;
13563 }
13564 return true;
13565 }
13566
13567 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13568 and no overflow/underflow occurred. INEXACT is true if M was not
13569 exactly calculated. TYPE is the tree type for the result. This
13570 function assumes that you cleared the MPFR flags and then
13571 calculated M to see if anything subsequently set a flag prior to
13572 entering this function. Return NULL_TREE if any checks fail. */
13573
13574 static tree
do_mpfr_ckconv(mpfr_srcptr m,tree type,int inexact)13575 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
13576 {
13577 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13578 overflow/underflow occurred. If -frounding-math, proceed iff the
13579 result of calling FUNC was exact. */
13580 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
13581 && (!flag_rounding_math || !inexact))
13582 {
13583 REAL_VALUE_TYPE rr;
13584
13585 real_from_mpfr (&rr, m, type, GMP_RNDN);
13586 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
13587 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13588 but the mpft_t is not, then we underflowed in the
13589 conversion. */
13590 if (real_isfinite (&rr)
13591 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
13592 {
13593 REAL_VALUE_TYPE rmode;
13594
13595 real_convert (&rmode, TYPE_MODE (type), &rr);
13596 /* Proceed iff the specified mode can hold the value. */
13597 if (real_identical (&rmode, &rr))
13598 return build_real (type, rmode);
13599 }
13600 }
13601 return NULL_TREE;
13602 }
13603
13604 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
13605 number and no overflow/underflow occurred. INEXACT is true if M
13606 was not exactly calculated. TYPE is the tree type for the result.
13607 This function assumes that you cleared the MPFR flags and then
13608 calculated M to see if anything subsequently set a flag prior to
13609 entering this function. Return NULL_TREE if any checks fail, if
13610 FORCE_CONVERT is true, then bypass the checks. */
13611
13612 static tree
do_mpc_ckconv(mpc_srcptr m,tree type,int inexact,int force_convert)13613 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
13614 {
13615 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
13616 overflow/underflow occurred. If -frounding-math, proceed iff the
13617 result of calling FUNC was exact. */
13618 if (force_convert
13619 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
13620 && !mpfr_overflow_p () && !mpfr_underflow_p ()
13621 && (!flag_rounding_math || !inexact)))
13622 {
13623 REAL_VALUE_TYPE re, im;
13624
13625 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
13626 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
13627 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
13628 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
13629 but the mpft_t is not, then we underflowed in the
13630 conversion. */
13631 if (force_convert
13632 || (real_isfinite (&re) && real_isfinite (&im)
13633 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
13634 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
13635 {
13636 REAL_VALUE_TYPE re_mode, im_mode;
13637
13638 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
13639 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
13640 /* Proceed iff the specified mode can hold the value. */
13641 if (force_convert
13642 || (real_identical (&re_mode, &re)
13643 && real_identical (&im_mode, &im)))
13644 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
13645 build_real (TREE_TYPE (type), im_mode));
13646 }
13647 }
13648 return NULL_TREE;
13649 }
13650
13651 /* If argument ARG is a REAL_CST, call the one-argument mpfr function
13652 FUNC on it and return the resulting value as a tree with type TYPE.
13653 If MIN and/or MAX are not NULL, then the supplied ARG must be
13654 within those bounds. If INCLUSIVE is true, then MIN/MAX are
13655 acceptable values, otherwise they are not. The mpfr precision is
13656 set to the precision of TYPE. We assume that function FUNC returns
13657 zero if the result could be calculated exactly within the requested
13658 precision. */
13659
13660 static tree
do_mpfr_arg1(tree arg,tree type,int (* func)(mpfr_ptr,mpfr_srcptr,mp_rnd_t),const REAL_VALUE_TYPE * min,const REAL_VALUE_TYPE * max,bool inclusive)13661 do_mpfr_arg1 (tree arg, tree type, int (*func)(mpfr_ptr, mpfr_srcptr, mp_rnd_t),
13662 const REAL_VALUE_TYPE *min, const REAL_VALUE_TYPE *max,
13663 bool inclusive)
13664 {
13665 tree result = NULL_TREE;
13666
13667 STRIP_NOPS (arg);
13668
13669 /* To proceed, MPFR must exactly represent the target floating point
13670 format, which only happens when the target base equals two. */
13671 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13672 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg))
13673 {
13674 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13675
13676 if (real_isfinite (ra)
13677 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min))
13678 && (!max || real_compare (inclusive ? LE_EXPR: LT_EXPR , ra, max)))
13679 {
13680 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13681 const int prec = fmt->p;
13682 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13683 int inexact;
13684 mpfr_t m;
13685
13686 mpfr_init2 (m, prec);
13687 mpfr_from_real (m, ra, GMP_RNDN);
13688 mpfr_clear_flags ();
13689 inexact = func (m, m, rnd);
13690 result = do_mpfr_ckconv (m, type, inexact);
13691 mpfr_clear (m);
13692 }
13693 }
13694
13695 return result;
13696 }
13697
13698 /* If argument ARG is a REAL_CST, call the two-argument mpfr function
13699 FUNC on it and return the resulting value as a tree with type TYPE.
13700 The mpfr precision is set to the precision of TYPE. We assume that
13701 function FUNC returns zero if the result could be calculated
13702 exactly within the requested precision. */
13703
13704 static tree
do_mpfr_arg2(tree arg1,tree arg2,tree type,int (* func)(mpfr_ptr,mpfr_srcptr,mpfr_srcptr,mp_rnd_t))13705 do_mpfr_arg2 (tree arg1, tree arg2, tree type,
13706 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13707 {
13708 tree result = NULL_TREE;
13709
13710 STRIP_NOPS (arg1);
13711 STRIP_NOPS (arg2);
13712
13713 /* To proceed, MPFR must exactly represent the target floating point
13714 format, which only happens when the target base equals two. */
13715 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13716 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13717 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13718 {
13719 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13720 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13721
13722 if (real_isfinite (ra1) && real_isfinite (ra2))
13723 {
13724 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13725 const int prec = fmt->p;
13726 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13727 int inexact;
13728 mpfr_t m1, m2;
13729
13730 mpfr_inits2 (prec, m1, m2, NULL);
13731 mpfr_from_real (m1, ra1, GMP_RNDN);
13732 mpfr_from_real (m2, ra2, GMP_RNDN);
13733 mpfr_clear_flags ();
13734 inexact = func (m1, m1, m2, rnd);
13735 result = do_mpfr_ckconv (m1, type, inexact);
13736 mpfr_clears (m1, m2, NULL);
13737 }
13738 }
13739
13740 return result;
13741 }
13742
13743 /* If argument ARG is a REAL_CST, call the three-argument mpfr function
13744 FUNC on it and return the resulting value as a tree with type TYPE.
13745 The mpfr precision is set to the precision of TYPE. We assume that
13746 function FUNC returns zero if the result could be calculated
13747 exactly within the requested precision. */
13748
13749 static tree
do_mpfr_arg3(tree arg1,tree arg2,tree arg3,tree type,int (* func)(mpfr_ptr,mpfr_srcptr,mpfr_srcptr,mpfr_srcptr,mp_rnd_t))13750 do_mpfr_arg3 (tree arg1, tree arg2, tree arg3, tree type,
13751 int (*func)(mpfr_ptr, mpfr_srcptr, mpfr_srcptr, mpfr_srcptr, mp_rnd_t))
13752 {
13753 tree result = NULL_TREE;
13754
13755 STRIP_NOPS (arg1);
13756 STRIP_NOPS (arg2);
13757 STRIP_NOPS (arg3);
13758
13759 /* To proceed, MPFR must exactly represent the target floating point
13760 format, which only happens when the target base equals two. */
13761 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13762 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1)
13763 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2)
13764 && TREE_CODE (arg3) == REAL_CST && !TREE_OVERFLOW (arg3))
13765 {
13766 const REAL_VALUE_TYPE *const ra1 = &TREE_REAL_CST (arg1);
13767 const REAL_VALUE_TYPE *const ra2 = &TREE_REAL_CST (arg2);
13768 const REAL_VALUE_TYPE *const ra3 = &TREE_REAL_CST (arg3);
13769
13770 if (real_isfinite (ra1) && real_isfinite (ra2) && real_isfinite (ra3))
13771 {
13772 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13773 const int prec = fmt->p;
13774 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13775 int inexact;
13776 mpfr_t m1, m2, m3;
13777
13778 mpfr_inits2 (prec, m1, m2, m3, NULL);
13779 mpfr_from_real (m1, ra1, GMP_RNDN);
13780 mpfr_from_real (m2, ra2, GMP_RNDN);
13781 mpfr_from_real (m3, ra3, GMP_RNDN);
13782 mpfr_clear_flags ();
13783 inexact = func (m1, m1, m2, m3, rnd);
13784 result = do_mpfr_ckconv (m1, type, inexact);
13785 mpfr_clears (m1, m2, m3, NULL);
13786 }
13787 }
13788
13789 return result;
13790 }
13791
13792 /* If argument ARG is a REAL_CST, call mpfr_sin_cos() on it and set
13793 the pointers *(ARG_SINP) and *(ARG_COSP) to the resulting values.
13794 If ARG_SINP and ARG_COSP are NULL then the result is returned
13795 as a complex value.
13796 The type is taken from the type of ARG and is used for setting the
13797 precision of the calculation and results. */
13798
13799 static tree
do_mpfr_sincos(tree arg,tree arg_sinp,tree arg_cosp)13800 do_mpfr_sincos (tree arg, tree arg_sinp, tree arg_cosp)
13801 {
13802 tree const type = TREE_TYPE (arg);
13803 tree result = NULL_TREE;
13804
13805 STRIP_NOPS (arg);
13806
13807 /* To proceed, MPFR must exactly represent the target floating point
13808 format, which only happens when the target base equals two. */
13809 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13810 && TREE_CODE (arg) == REAL_CST
13811 && !TREE_OVERFLOW (arg))
13812 {
13813 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg);
13814
13815 if (real_isfinite (ra))
13816 {
13817 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13818 const int prec = fmt->p;
13819 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13820 tree result_s, result_c;
13821 int inexact;
13822 mpfr_t m, ms, mc;
13823
13824 mpfr_inits2 (prec, m, ms, mc, NULL);
13825 mpfr_from_real (m, ra, GMP_RNDN);
13826 mpfr_clear_flags ();
13827 inexact = mpfr_sin_cos (ms, mc, m, rnd);
13828 result_s = do_mpfr_ckconv (ms, type, inexact);
13829 result_c = do_mpfr_ckconv (mc, type, inexact);
13830 mpfr_clears (m, ms, mc, NULL);
13831 if (result_s && result_c)
13832 {
13833 /* If we are to return in a complex value do so. */
13834 if (!arg_sinp && !arg_cosp)
13835 return build_complex (build_complex_type (type),
13836 result_c, result_s);
13837
13838 /* Dereference the sin/cos pointer arguments. */
13839 arg_sinp = build_fold_indirect_ref (arg_sinp);
13840 arg_cosp = build_fold_indirect_ref (arg_cosp);
13841 /* Proceed if valid pointer type were passed in. */
13842 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_sinp)) == TYPE_MAIN_VARIANT (type)
13843 && TYPE_MAIN_VARIANT (TREE_TYPE (arg_cosp)) == TYPE_MAIN_VARIANT (type))
13844 {
13845 /* Set the values. */
13846 result_s = fold_build2 (MODIFY_EXPR, type, arg_sinp,
13847 result_s);
13848 TREE_SIDE_EFFECTS (result_s) = 1;
13849 result_c = fold_build2 (MODIFY_EXPR, type, arg_cosp,
13850 result_c);
13851 TREE_SIDE_EFFECTS (result_c) = 1;
13852 /* Combine the assignments into a compound expr. */
13853 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13854 result_s, result_c));
13855 }
13856 }
13857 }
13858 }
13859 return result;
13860 }
13861
13862 /* If argument ARG1 is an INTEGER_CST and ARG2 is a REAL_CST, call the
13863 two-argument mpfr order N Bessel function FUNC on them and return
13864 the resulting value as a tree with type TYPE. The mpfr precision
13865 is set to the precision of TYPE. We assume that function FUNC
13866 returns zero if the result could be calculated exactly within the
13867 requested precision. */
13868 static tree
do_mpfr_bessel_n(tree arg1,tree arg2,tree type,int (* func)(mpfr_ptr,long,mpfr_srcptr,mp_rnd_t),const REAL_VALUE_TYPE * min,bool inclusive)13869 do_mpfr_bessel_n (tree arg1, tree arg2, tree type,
13870 int (*func)(mpfr_ptr, long, mpfr_srcptr, mp_rnd_t),
13871 const REAL_VALUE_TYPE *min, bool inclusive)
13872 {
13873 tree result = NULL_TREE;
13874
13875 STRIP_NOPS (arg1);
13876 STRIP_NOPS (arg2);
13877
13878 /* To proceed, MPFR must exactly represent the target floating point
13879 format, which only happens when the target base equals two. */
13880 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13881 && tree_fits_shwi_p (arg1)
13882 && TREE_CODE (arg2) == REAL_CST && !TREE_OVERFLOW (arg2))
13883 {
13884 const HOST_WIDE_INT n = tree_to_shwi (arg1);
13885 const REAL_VALUE_TYPE *const ra = &TREE_REAL_CST (arg2);
13886
13887 if (n == (long)n
13888 && real_isfinite (ra)
13889 && (!min || real_compare (inclusive ? GE_EXPR: GT_EXPR , ra, min)))
13890 {
13891 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13892 const int prec = fmt->p;
13893 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13894 int inexact;
13895 mpfr_t m;
13896
13897 mpfr_init2 (m, prec);
13898 mpfr_from_real (m, ra, GMP_RNDN);
13899 mpfr_clear_flags ();
13900 inexact = func (m, n, m, rnd);
13901 result = do_mpfr_ckconv (m, type, inexact);
13902 mpfr_clear (m);
13903 }
13904 }
13905
13906 return result;
13907 }
13908
13909 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
13910 the pointer *(ARG_QUO) and return the result. The type is taken
13911 from the type of ARG0 and is used for setting the precision of the
13912 calculation and results. */
13913
13914 static tree
do_mpfr_remquo(tree arg0,tree arg1,tree arg_quo)13915 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
13916 {
13917 tree const type = TREE_TYPE (arg0);
13918 tree result = NULL_TREE;
13919
13920 STRIP_NOPS (arg0);
13921 STRIP_NOPS (arg1);
13922
13923 /* To proceed, MPFR must exactly represent the target floating point
13924 format, which only happens when the target base equals two. */
13925 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
13926 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
13927 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
13928 {
13929 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
13930 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
13931
13932 if (real_isfinite (ra0) && real_isfinite (ra1))
13933 {
13934 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
13935 const int prec = fmt->p;
13936 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
13937 tree result_rem;
13938 long integer_quo;
13939 mpfr_t m0, m1;
13940
13941 mpfr_inits2 (prec, m0, m1, NULL);
13942 mpfr_from_real (m0, ra0, GMP_RNDN);
13943 mpfr_from_real (m1, ra1, GMP_RNDN);
13944 mpfr_clear_flags ();
13945 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
13946 /* Remquo is independent of the rounding mode, so pass
13947 inexact=0 to do_mpfr_ckconv(). */
13948 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
13949 mpfr_clears (m0, m1, NULL);
13950 if (result_rem)
13951 {
13952 /* MPFR calculates quo in the host's long so it may
13953 return more bits in quo than the target int can hold
13954 if sizeof(host long) > sizeof(target int). This can
13955 happen even for native compilers in LP64 mode. In
13956 these cases, modulo the quo value with the largest
13957 number that the target int can hold while leaving one
13958 bit for the sign. */
13959 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
13960 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
13961
13962 /* Dereference the quo pointer argument. */
13963 arg_quo = build_fold_indirect_ref (arg_quo);
13964 /* Proceed iff a valid pointer type was passed in. */
13965 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
13966 {
13967 /* Set the value. */
13968 tree result_quo
13969 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
13970 build_int_cst (TREE_TYPE (arg_quo),
13971 integer_quo));
13972 TREE_SIDE_EFFECTS (result_quo) = 1;
13973 /* Combine the quo assignment with the rem. */
13974 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
13975 result_quo, result_rem));
13976 }
13977 }
13978 }
13979 }
13980 return result;
13981 }
13982
13983 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
13984 resulting value as a tree with type TYPE. The mpfr precision is
13985 set to the precision of TYPE. We assume that this mpfr function
13986 returns zero if the result could be calculated exactly within the
13987 requested precision. In addition, the integer pointer represented
13988 by ARG_SG will be dereferenced and set to the appropriate signgam
13989 (-1,1) value. */
13990
13991 static tree
do_mpfr_lgamma_r(tree arg,tree arg_sg,tree type)13992 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
13993 {
13994 tree result = NULL_TREE;
13995
13996 STRIP_NOPS (arg);
13997
13998 /* To proceed, MPFR must exactly represent the target floating point
13999 format, which only happens when the target base equals two. Also
14000 verify ARG is a constant and that ARG_SG is an int pointer. */
14001 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
14002 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
14003 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
14004 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
14005 {
14006 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
14007
14008 /* In addition to NaN and Inf, the argument cannot be zero or a
14009 negative integer. */
14010 if (real_isfinite (ra)
14011 && ra->cl != rvc_zero
14012 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
14013 {
14014 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14015 const int prec = fmt->p;
14016 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
14017 int inexact, sg;
14018 mpfr_t m;
14019 tree result_lg;
14020
14021 mpfr_init2 (m, prec);
14022 mpfr_from_real (m, ra, GMP_RNDN);
14023 mpfr_clear_flags ();
14024 inexact = mpfr_lgamma (m, &sg, m, rnd);
14025 result_lg = do_mpfr_ckconv (m, type, inexact);
14026 mpfr_clear (m);
14027 if (result_lg)
14028 {
14029 tree result_sg;
14030
14031 /* Dereference the arg_sg pointer argument. */
14032 arg_sg = build_fold_indirect_ref (arg_sg);
14033 /* Assign the signgam value into *arg_sg. */
14034 result_sg = fold_build2 (MODIFY_EXPR,
14035 TREE_TYPE (arg_sg), arg_sg,
14036 build_int_cst (TREE_TYPE (arg_sg), sg));
14037 TREE_SIDE_EFFECTS (result_sg) = 1;
14038 /* Combine the signgam assignment with the lgamma result. */
14039 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14040 result_sg, result_lg));
14041 }
14042 }
14043 }
14044
14045 return result;
14046 }
14047
14048 /* If argument ARG is a COMPLEX_CST, call the one-argument mpc
14049 function FUNC on it and return the resulting value as a tree with
14050 type TYPE. The mpfr precision is set to the precision of TYPE. We
14051 assume that function FUNC returns zero if the result could be
14052 calculated exactly within the requested precision. */
14053
14054 static tree
do_mpc_arg1(tree arg,tree type,int (* func)(mpc_ptr,mpc_srcptr,mpc_rnd_t))14055 do_mpc_arg1 (tree arg, tree type, int (*func)(mpc_ptr, mpc_srcptr, mpc_rnd_t))
14056 {
14057 tree result = NULL_TREE;
14058
14059 STRIP_NOPS (arg);
14060
14061 /* To proceed, MPFR must exactly represent the target floating point
14062 format, which only happens when the target base equals two. */
14063 if (TREE_CODE (arg) == COMPLEX_CST && !TREE_OVERFLOW (arg)
14064 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE
14065 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg))))->b == 2)
14066 {
14067 const REAL_VALUE_TYPE *const re = TREE_REAL_CST_PTR (TREE_REALPART (arg));
14068 const REAL_VALUE_TYPE *const im = TREE_REAL_CST_PTR (TREE_IMAGPART (arg));
14069
14070 if (real_isfinite (re) && real_isfinite (im))
14071 {
14072 const struct real_format *const fmt =
14073 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14074 const int prec = fmt->p;
14075 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14076 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14077 int inexact;
14078 mpc_t m;
14079
14080 mpc_init2 (m, prec);
14081 mpfr_from_real (mpc_realref (m), re, rnd);
14082 mpfr_from_real (mpc_imagref (m), im, rnd);
14083 mpfr_clear_flags ();
14084 inexact = func (m, m, crnd);
14085 result = do_mpc_ckconv (m, type, inexact, /*force_convert=*/ 0);
14086 mpc_clear (m);
14087 }
14088 }
14089
14090 return result;
14091 }
14092
14093 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14094 mpc function FUNC on it and return the resulting value as a tree
14095 with type TYPE. The mpfr precision is set to the precision of
14096 TYPE. We assume that function FUNC returns zero if the result
14097 could be calculated exactly within the requested precision. If
14098 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14099 in the arguments and/or results. */
14100
14101 tree
do_mpc_arg2(tree arg0,tree arg1,tree type,int do_nonfinite,int (* func)(mpc_ptr,mpc_srcptr,mpc_srcptr,mpc_rnd_t))14102 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14103 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14104 {
14105 tree result = NULL_TREE;
14106
14107 STRIP_NOPS (arg0);
14108 STRIP_NOPS (arg1);
14109
14110 /* To proceed, MPFR must exactly represent the target floating point
14111 format, which only happens when the target base equals two. */
14112 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14113 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14114 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14115 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14116 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14117 {
14118 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14119 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14120 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14121 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14122
14123 if (do_nonfinite
14124 || (real_isfinite (re0) && real_isfinite (im0)
14125 && real_isfinite (re1) && real_isfinite (im1)))
14126 {
14127 const struct real_format *const fmt =
14128 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14129 const int prec = fmt->p;
14130 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
14131 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14132 int inexact;
14133 mpc_t m0, m1;
14134
14135 mpc_init2 (m0, prec);
14136 mpc_init2 (m1, prec);
14137 mpfr_from_real (mpc_realref (m0), re0, rnd);
14138 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14139 mpfr_from_real (mpc_realref (m1), re1, rnd);
14140 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14141 mpfr_clear_flags ();
14142 inexact = func (m0, m0, m1, crnd);
14143 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14144 mpc_clear (m0);
14145 mpc_clear (m1);
14146 }
14147 }
14148
14149 return result;
14150 }
14151
14152 /* Fold a call STMT to __{,v}sprintf_chk. Return NULL_TREE if
14153 a normal call should be emitted rather than expanding the function
14154 inline. FCODE is either BUILT_IN_SPRINTF_CHK or BUILT_IN_VSPRINTF_CHK. */
14155
14156 static tree
gimple_fold_builtin_sprintf_chk(gimple stmt,enum built_in_function fcode)14157 gimple_fold_builtin_sprintf_chk (gimple stmt, enum built_in_function fcode)
14158 {
14159 int nargs = gimple_call_num_args (stmt);
14160
14161 return fold_builtin_sprintf_chk_1 (gimple_location (stmt), nargs,
14162 (nargs > 0
14163 ? gimple_call_arg_ptr (stmt, 0)
14164 : &error_mark_node), fcode);
14165 }
14166
14167 /* Fold a call STMT to {,v}snprintf. Return NULL_TREE if
14168 a normal call should be emitted rather than expanding the function
14169 inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
14170 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
14171 passed as second argument. */
14172
14173 tree
gimple_fold_builtin_snprintf_chk(gimple stmt,tree maxlen,enum built_in_function fcode)14174 gimple_fold_builtin_snprintf_chk (gimple stmt, tree maxlen,
14175 enum built_in_function fcode)
14176 {
14177 int nargs = gimple_call_num_args (stmt);
14178
14179 return fold_builtin_snprintf_chk_1 (gimple_location (stmt), nargs,
14180 (nargs > 0
14181 ? gimple_call_arg_ptr (stmt, 0)
14182 : &error_mark_node), maxlen, fcode);
14183 }
14184
14185 /* Builtins with folding operations that operate on "..." arguments
14186 need special handling; we need to store the arguments in a convenient
14187 data structure before attempting any folding. Fortunately there are
14188 only a few builtins that fall into this category. FNDECL is the
14189 function, EXP is the CALL_EXPR for the call, and IGNORE is true if the
14190 result of the function call is ignored. */
14191
14192 static tree
gimple_fold_builtin_varargs(tree fndecl,gimple stmt,bool ignore ATTRIBUTE_UNUSED)14193 gimple_fold_builtin_varargs (tree fndecl, gimple stmt,
14194 bool ignore ATTRIBUTE_UNUSED)
14195 {
14196 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
14197 tree ret = NULL_TREE;
14198
14199 switch (fcode)
14200 {
14201 case BUILT_IN_SPRINTF_CHK:
14202 case BUILT_IN_VSPRINTF_CHK:
14203 ret = gimple_fold_builtin_sprintf_chk (stmt, fcode);
14204 break;
14205
14206 case BUILT_IN_SNPRINTF_CHK:
14207 case BUILT_IN_VSNPRINTF_CHK:
14208 ret = gimple_fold_builtin_snprintf_chk (stmt, NULL_TREE, fcode);
14209
14210 default:
14211 break;
14212 }
14213 if (ret)
14214 {
14215 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
14216 TREE_NO_WARNING (ret) = 1;
14217 return ret;
14218 }
14219 return NULL_TREE;
14220 }
14221
14222 /* A wrapper function for builtin folding that prevents warnings for
14223 "statement without effect" and the like, caused by removing the
14224 call node earlier than the warning is generated. */
14225
14226 tree
fold_call_stmt(gimple stmt,bool ignore)14227 fold_call_stmt (gimple stmt, bool ignore)
14228 {
14229 tree ret = NULL_TREE;
14230 tree fndecl = gimple_call_fndecl (stmt);
14231 location_t loc = gimple_location (stmt);
14232 if (fndecl
14233 && TREE_CODE (fndecl) == FUNCTION_DECL
14234 && DECL_BUILT_IN (fndecl)
14235 && !gimple_call_va_arg_pack_p (stmt))
14236 {
14237 int nargs = gimple_call_num_args (stmt);
14238 tree *args = (nargs > 0
14239 ? gimple_call_arg_ptr (stmt, 0)
14240 : &error_mark_node);
14241
14242 if (avoid_folding_inline_builtin (fndecl))
14243 return NULL_TREE;
14244 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14245 {
14246 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14247 }
14248 else
14249 {
14250 if (nargs <= MAX_ARGS_TO_FOLD_BUILTIN)
14251 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
14252 if (!ret)
14253 ret = gimple_fold_builtin_varargs (fndecl, stmt, ignore);
14254 if (ret)
14255 {
14256 /* Propagate location information from original call to
14257 expansion of builtin. Otherwise things like
14258 maybe_emit_chk_warning, that operate on the expansion
14259 of a builtin, will use the wrong location information. */
14260 if (gimple_has_location (stmt))
14261 {
14262 tree realret = ret;
14263 if (TREE_CODE (ret) == NOP_EXPR)
14264 realret = TREE_OPERAND (ret, 0);
14265 if (CAN_HAVE_LOCATION_P (realret)
14266 && !EXPR_HAS_LOCATION (realret))
14267 SET_EXPR_LOCATION (realret, loc);
14268 return realret;
14269 }
14270 return ret;
14271 }
14272 }
14273 }
14274 return NULL_TREE;
14275 }
14276
14277 /* Look up the function in builtin_decl that corresponds to DECL
14278 and set ASMSPEC as its user assembler name. DECL must be a
14279 function decl that declares a builtin. */
14280
14281 void
set_builtin_user_assembler_name(tree decl,const char * asmspec)14282 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14283 {
14284 tree builtin;
14285 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
14286 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
14287 && asmspec != 0);
14288
14289 builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14290 set_user_assembler_name (builtin, asmspec);
14291 switch (DECL_FUNCTION_CODE (decl))
14292 {
14293 case BUILT_IN_MEMCPY:
14294 init_block_move_fn (asmspec);
14295 memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
14296 break;
14297 case BUILT_IN_MEMSET:
14298 init_block_clear_fn (asmspec);
14299 memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
14300 break;
14301 case BUILT_IN_MEMMOVE:
14302 memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
14303 break;
14304 case BUILT_IN_MEMCMP:
14305 memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
14306 break;
14307 case BUILT_IN_ABORT:
14308 abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
14309 break;
14310 case BUILT_IN_FFS:
14311 if (INT_TYPE_SIZE < BITS_PER_WORD)
14312 {
14313 set_user_assembler_libfunc ("ffs", asmspec);
14314 set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
14315 MODE_INT, 0), "ffs");
14316 }
14317 break;
14318 default:
14319 break;
14320 }
14321 }
14322
14323 /* Return true if DECL is a builtin that expands to a constant or similarly
14324 simple code. */
14325 bool
is_simple_builtin(tree decl)14326 is_simple_builtin (tree decl)
14327 {
14328 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14329 switch (DECL_FUNCTION_CODE (decl))
14330 {
14331 /* Builtins that expand to constants. */
14332 case BUILT_IN_CONSTANT_P:
14333 case BUILT_IN_EXPECT:
14334 case BUILT_IN_OBJECT_SIZE:
14335 case BUILT_IN_UNREACHABLE:
14336 /* Simple register moves or loads from stack. */
14337 case BUILT_IN_ASSUME_ALIGNED:
14338 case BUILT_IN_RETURN_ADDRESS:
14339 case BUILT_IN_EXTRACT_RETURN_ADDR:
14340 case BUILT_IN_FROB_RETURN_ADDR:
14341 case BUILT_IN_RETURN:
14342 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14343 case BUILT_IN_FRAME_ADDRESS:
14344 case BUILT_IN_VA_END:
14345 case BUILT_IN_STACK_SAVE:
14346 case BUILT_IN_STACK_RESTORE:
14347 /* Exception state returns or moves registers around. */
14348 case BUILT_IN_EH_FILTER:
14349 case BUILT_IN_EH_POINTER:
14350 case BUILT_IN_EH_COPY_VALUES:
14351 return true;
14352
14353 default:
14354 return false;
14355 }
14356
14357 return false;
14358 }
14359
14360 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14361 most probably expanded inline into reasonably simple code. This is a
14362 superset of is_simple_builtin. */
14363 bool
is_inexpensive_builtin(tree decl)14364 is_inexpensive_builtin (tree decl)
14365 {
14366 if (!decl)
14367 return false;
14368 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14369 return true;
14370 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14371 switch (DECL_FUNCTION_CODE (decl))
14372 {
14373 case BUILT_IN_ABS:
14374 case BUILT_IN_ALLOCA:
14375 case BUILT_IN_ALLOCA_WITH_ALIGN:
14376 case BUILT_IN_BSWAP16:
14377 case BUILT_IN_BSWAP32:
14378 case BUILT_IN_BSWAP64:
14379 case BUILT_IN_CLZ:
14380 case BUILT_IN_CLZIMAX:
14381 case BUILT_IN_CLZL:
14382 case BUILT_IN_CLZLL:
14383 case BUILT_IN_CTZ:
14384 case BUILT_IN_CTZIMAX:
14385 case BUILT_IN_CTZL:
14386 case BUILT_IN_CTZLL:
14387 case BUILT_IN_FFS:
14388 case BUILT_IN_FFSIMAX:
14389 case BUILT_IN_FFSL:
14390 case BUILT_IN_FFSLL:
14391 case BUILT_IN_IMAXABS:
14392 case BUILT_IN_FINITE:
14393 case BUILT_IN_FINITEF:
14394 case BUILT_IN_FINITEL:
14395 case BUILT_IN_FINITED32:
14396 case BUILT_IN_FINITED64:
14397 case BUILT_IN_FINITED128:
14398 case BUILT_IN_FPCLASSIFY:
14399 case BUILT_IN_ISFINITE:
14400 case BUILT_IN_ISINF_SIGN:
14401 case BUILT_IN_ISINF:
14402 case BUILT_IN_ISINFF:
14403 case BUILT_IN_ISINFL:
14404 case BUILT_IN_ISINFD32:
14405 case BUILT_IN_ISINFD64:
14406 case BUILT_IN_ISINFD128:
14407 case BUILT_IN_ISNAN:
14408 case BUILT_IN_ISNANF:
14409 case BUILT_IN_ISNANL:
14410 case BUILT_IN_ISNAND32:
14411 case BUILT_IN_ISNAND64:
14412 case BUILT_IN_ISNAND128:
14413 case BUILT_IN_ISNORMAL:
14414 case BUILT_IN_ISGREATER:
14415 case BUILT_IN_ISGREATEREQUAL:
14416 case BUILT_IN_ISLESS:
14417 case BUILT_IN_ISLESSEQUAL:
14418 case BUILT_IN_ISLESSGREATER:
14419 case BUILT_IN_ISUNORDERED:
14420 case BUILT_IN_VA_ARG_PACK:
14421 case BUILT_IN_VA_ARG_PACK_LEN:
14422 case BUILT_IN_VA_COPY:
14423 case BUILT_IN_TRAP:
14424 case BUILT_IN_SAVEREGS:
14425 case BUILT_IN_POPCOUNTL:
14426 case BUILT_IN_POPCOUNTLL:
14427 case BUILT_IN_POPCOUNTIMAX:
14428 case BUILT_IN_POPCOUNT:
14429 case BUILT_IN_PARITYL:
14430 case BUILT_IN_PARITYLL:
14431 case BUILT_IN_PARITYIMAX:
14432 case BUILT_IN_PARITY:
14433 case BUILT_IN_LABS:
14434 case BUILT_IN_LLABS:
14435 case BUILT_IN_PREFETCH:
14436 return true;
14437
14438 default:
14439 return is_simple_builtin (decl);
14440 }
14441
14442 return false;
14443 }
14444