1 /* Expand builtin functions.
2 Copyright (C) 1988-2018 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "realmpfr.h"
52 #include "cfgrtl.h"
53 #include "except.h"
54 #include "dojump.h"
55 #include "explow.h"
56 #include "stmt.h"
57 #include "expr.h"
58 #include "libfuncs.h"
59 #include "output.h"
60 #include "typeclass.h"
61 #include "langhooks.h"
62 #include "value-prof.h"
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "tree-chkp.h"
68 #include "rtl-chkp.h"
69 #include "internal-fn.h"
70 #include "case-cfn-macros.h"
71 #include "gimple-fold.h"
72 #include "intl.h"
73 #include "file-prefix-map.h" /* remap_macro_filename() */
74
75 struct target_builtins default_target_builtins;
76 #if SWITCHABLE_TARGET
77 struct target_builtins *this_target_builtins = &default_target_builtins;
78 #endif
79
80 /* Define the names of the builtin function types and codes. */
81 const char *const built_in_class_names[BUILT_IN_LAST]
82 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
83
84 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
85 const char * built_in_names[(int) END_BUILTINS] =
86 {
87 #include "builtins.def"
88 };
89
90 /* Setup an array of builtin_info_type, make sure each element decl is
91 initialized to NULL_TREE. */
92 builtin_info_type builtin_info[(int)END_BUILTINS];
93
94 /* Non-zero if __builtin_constant_p should be folded right away. */
95 bool force_folding_builtin_constant_p;
96
97 static rtx c_readstr (const char *, scalar_int_mode);
98 static int target_char_cast (tree, char *);
99 static rtx get_memory_rtx (tree, tree);
100 static int apply_args_size (void);
101 static int apply_result_size (void);
102 static rtx result_vector (int, rtx);
103 static void expand_builtin_prefetch (tree);
104 static rtx expand_builtin_apply_args (void);
105 static rtx expand_builtin_apply_args_1 (void);
106 static rtx expand_builtin_apply (rtx, rtx, rtx);
107 static void expand_builtin_return (rtx);
108 static enum type_class type_to_class (tree);
109 static rtx expand_builtin_classify_type (tree);
110 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
111 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
112 static rtx expand_builtin_interclass_mathfn (tree, rtx);
113 static rtx expand_builtin_sincos (tree);
114 static rtx expand_builtin_cexpi (tree, rtx);
115 static rtx expand_builtin_int_roundingfn (tree, rtx);
116 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
117 static rtx expand_builtin_next_arg (void);
118 static rtx expand_builtin_va_start (tree);
119 static rtx expand_builtin_va_end (tree);
120 static rtx expand_builtin_va_copy (tree);
121 static rtx expand_builtin_strcmp (tree, rtx);
122 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
123 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
124 static rtx expand_builtin_memchr (tree, rtx);
125 static rtx expand_builtin_memcpy (tree, rtx);
126 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
127 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
128 rtx target, tree exp, int endp);
129 static rtx expand_builtin_memmove (tree, rtx);
130 static rtx expand_builtin_mempcpy (tree, rtx);
131 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, int);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
143 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
144 static rtx expand_builtin_bzero (tree);
145 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static tree fold_builtin_constant_p (tree);
152 static tree fold_builtin_classify_type (tree);
153 static tree fold_builtin_strlen (location_t, tree, tree);
154 static tree fold_builtin_inf (location_t, tree, int);
155 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
156 static bool validate_arg (const_tree, enum tree_code code);
157 static rtx expand_builtin_fabs (tree, rtx, rtx);
158 static rtx expand_builtin_signbit (tree, rtx);
159 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
160 static tree fold_builtin_isascii (location_t, tree);
161 static tree fold_builtin_toascii (location_t, tree);
162 static tree fold_builtin_isdigit (location_t, tree);
163 static tree fold_builtin_fabs (location_t, tree, tree);
164 static tree fold_builtin_abs (location_t, tree, tree);
165 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
166 enum tree_code);
167 static tree fold_builtin_0 (location_t, tree);
168 static tree fold_builtin_1 (location_t, tree, tree);
169 static tree fold_builtin_2 (location_t, tree, tree, tree);
170 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
171 static tree fold_builtin_varargs (location_t, tree, tree*, int);
172
173 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
174 static tree fold_builtin_strspn (location_t, tree, tree);
175 static tree fold_builtin_strcspn (location_t, tree, tree);
176
177 static rtx expand_builtin_object_size (tree);
178 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
179 enum built_in_function);
180 static void maybe_emit_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_free_warning (tree);
183 static tree fold_builtin_object_size (tree, tree);
184
185 unsigned HOST_WIDE_INT target_newline;
186 unsigned HOST_WIDE_INT target_percent;
187 static unsigned HOST_WIDE_INT target_c;
188 static unsigned HOST_WIDE_INT target_s;
189 char target_percent_c[3];
190 char target_percent_s[3];
191 char target_percent_s_newline[4];
192 static tree do_mpfr_remquo (tree, tree, tree);
193 static tree do_mpfr_lgamma_r (tree, tree, tree);
194 static void expand_builtin_sync_synchronize (void);
195
196 /* Return true if NAME starts with __builtin_ or __sync_. */
197
198 static bool
is_builtin_name(const char * name)199 is_builtin_name (const char *name)
200 {
201 if (strncmp (name, "__builtin_", 10) == 0)
202 return true;
203 if (strncmp (name, "__sync_", 7) == 0)
204 return true;
205 if (strncmp (name, "__atomic_", 9) == 0)
206 return true;
207 return false;
208 }
209
210
211 /* Return true if DECL is a function symbol representing a built-in. */
212
213 bool
is_builtin_fn(tree decl)214 is_builtin_fn (tree decl)
215 {
216 return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
217 }
218
219 /* Return true if NODE should be considered for inline expansion regardless
220 of the optimization level. This means whenever a function is invoked with
221 its "internal" name, which normally contains the prefix "__builtin". */
222
223 bool
called_as_built_in(tree node)224 called_as_built_in (tree node)
225 {
226 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
227 we want the name used to call the function, not the name it
228 will have. */
229 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
230 return is_builtin_name (name);
231 }
232
233 /* Compute values M and N such that M divides (address of EXP - N) and such
234 that N < M. If these numbers can be determined, store M in alignp and N in
235 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
236 *alignp and any bit-offset to *bitposp.
237
238 Note that the address (and thus the alignment) computed here is based
239 on the address to which a symbol resolves, whereas DECL_ALIGN is based
240 on the address at which an object is actually located. These two
241 addresses are not always the same. For example, on ARM targets,
242 the address &foo of a Thumb function foo() has the lowest bit set,
243 whereas foo() itself starts on an even address.
244
245 If ADDR_P is true we are taking the address of the memory reference EXP
246 and thus cannot rely on the access taking place. */
247
248 static bool
get_object_alignment_2(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp,bool addr_p)249 get_object_alignment_2 (tree exp, unsigned int *alignp,
250 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
251 {
252 poly_int64 bitsize, bitpos;
253 tree offset;
254 machine_mode mode;
255 int unsignedp, reversep, volatilep;
256 unsigned int align = BITS_PER_UNIT;
257 bool known_alignment = false;
258
259 /* Get the innermost object and the constant (bitpos) and possibly
260 variable (offset) offset of the access. */
261 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
262 &unsignedp, &reversep, &volatilep);
263
264 /* Extract alignment information from the innermost object and
265 possibly adjust bitpos and offset. */
266 if (TREE_CODE (exp) == FUNCTION_DECL)
267 {
268 /* Function addresses can encode extra information besides their
269 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
270 allows the low bit to be used as a virtual bit, we know
271 that the address itself must be at least 2-byte aligned. */
272 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
273 align = 2 * BITS_PER_UNIT;
274 }
275 else if (TREE_CODE (exp) == LABEL_DECL)
276 ;
277 else if (TREE_CODE (exp) == CONST_DECL)
278 {
279 /* The alignment of a CONST_DECL is determined by its initializer. */
280 exp = DECL_INITIAL (exp);
281 align = TYPE_ALIGN (TREE_TYPE (exp));
282 if (CONSTANT_CLASS_P (exp))
283 align = targetm.constant_alignment (exp, align);
284
285 known_alignment = true;
286 }
287 else if (DECL_P (exp))
288 {
289 align = DECL_ALIGN (exp);
290 known_alignment = true;
291 }
292 else if (TREE_CODE (exp) == INDIRECT_REF
293 || TREE_CODE (exp) == MEM_REF
294 || TREE_CODE (exp) == TARGET_MEM_REF)
295 {
296 tree addr = TREE_OPERAND (exp, 0);
297 unsigned ptr_align;
298 unsigned HOST_WIDE_INT ptr_bitpos;
299 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
300
301 /* If the address is explicitely aligned, handle that. */
302 if (TREE_CODE (addr) == BIT_AND_EXPR
303 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
304 {
305 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
306 ptr_bitmask *= BITS_PER_UNIT;
307 align = least_bit_hwi (ptr_bitmask);
308 addr = TREE_OPERAND (addr, 0);
309 }
310
311 known_alignment
312 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
313 align = MAX (ptr_align, align);
314
315 /* Re-apply explicit alignment to the bitpos. */
316 ptr_bitpos &= ptr_bitmask;
317
318 /* The alignment of the pointer operand in a TARGET_MEM_REF
319 has to take the variable offset parts into account. */
320 if (TREE_CODE (exp) == TARGET_MEM_REF)
321 {
322 if (TMR_INDEX (exp))
323 {
324 unsigned HOST_WIDE_INT step = 1;
325 if (TMR_STEP (exp))
326 step = TREE_INT_CST_LOW (TMR_STEP (exp));
327 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
328 }
329 if (TMR_INDEX2 (exp))
330 align = BITS_PER_UNIT;
331 known_alignment = false;
332 }
333
334 /* When EXP is an actual memory reference then we can use
335 TYPE_ALIGN of a pointer indirection to derive alignment.
336 Do so only if get_pointer_alignment_1 did not reveal absolute
337 alignment knowledge and if using that alignment would
338 improve the situation. */
339 unsigned int talign;
340 if (!addr_p && !known_alignment
341 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
342 && talign > align)
343 align = talign;
344 else
345 {
346 /* Else adjust bitpos accordingly. */
347 bitpos += ptr_bitpos;
348 if (TREE_CODE (exp) == MEM_REF
349 || TREE_CODE (exp) == TARGET_MEM_REF)
350 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
351 }
352 }
353 else if (TREE_CODE (exp) == STRING_CST)
354 {
355 /* STRING_CST are the only constant objects we allow to be not
356 wrapped inside a CONST_DECL. */
357 align = TYPE_ALIGN (TREE_TYPE (exp));
358 if (CONSTANT_CLASS_P (exp))
359 align = targetm.constant_alignment (exp, align);
360
361 known_alignment = true;
362 }
363
364 /* If there is a non-constant offset part extract the maximum
365 alignment that can prevail. */
366 if (offset)
367 {
368 unsigned int trailing_zeros = tree_ctz (offset);
369 if (trailing_zeros < HOST_BITS_PER_INT)
370 {
371 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
372 if (inner)
373 align = MIN (align, inner);
374 }
375 }
376
377 /* Account for the alignment of runtime coefficients, so that the constant
378 bitpos is guaranteed to be accurate. */
379 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
380 if (alt_align != 0 && alt_align < align)
381 {
382 align = alt_align;
383 known_alignment = false;
384 }
385
386 *alignp = align;
387 *bitposp = bitpos.coeffs[0] & (align - 1);
388 return known_alignment;
389 }
390
391 /* For a memory reference expression EXP compute values M and N such that M
392 divides (&EXP - N) and such that N < M. If these numbers can be determined,
393 store M in alignp and N in *BITPOSP and return true. Otherwise return false
394 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
395
396 bool
get_object_alignment_1(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp)397 get_object_alignment_1 (tree exp, unsigned int *alignp,
398 unsigned HOST_WIDE_INT *bitposp)
399 {
400 return get_object_alignment_2 (exp, alignp, bitposp, false);
401 }
402
403 /* Return the alignment in bits of EXP, an object. */
404
405 unsigned int
get_object_alignment(tree exp)406 get_object_alignment (tree exp)
407 {
408 unsigned HOST_WIDE_INT bitpos = 0;
409 unsigned int align;
410
411 get_object_alignment_1 (exp, &align, &bitpos);
412
413 /* align and bitpos now specify known low bits of the pointer.
414 ptr & (align - 1) == bitpos. */
415
416 if (bitpos != 0)
417 align = least_bit_hwi (bitpos);
418 return align;
419 }
420
421 /* For a pointer valued expression EXP compute values M and N such that M
422 divides (EXP - N) and such that N < M. If these numbers can be determined,
423 store M in alignp and N in *BITPOSP and return true. Return false if
424 the results are just a conservative approximation.
425
426 If EXP is not a pointer, false is returned too. */
427
428 bool
get_pointer_alignment_1(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp)429 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
430 unsigned HOST_WIDE_INT *bitposp)
431 {
432 STRIP_NOPS (exp);
433
434 if (TREE_CODE (exp) == ADDR_EXPR)
435 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
436 alignp, bitposp, true);
437 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
438 {
439 unsigned int align;
440 unsigned HOST_WIDE_INT bitpos;
441 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
442 &align, &bitpos);
443 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
444 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
445 else
446 {
447 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
448 if (trailing_zeros < HOST_BITS_PER_INT)
449 {
450 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
451 if (inner)
452 align = MIN (align, inner);
453 }
454 }
455 *alignp = align;
456 *bitposp = bitpos & (align - 1);
457 return res;
458 }
459 else if (TREE_CODE (exp) == SSA_NAME
460 && POINTER_TYPE_P (TREE_TYPE (exp)))
461 {
462 unsigned int ptr_align, ptr_misalign;
463 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
464
465 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
466 {
467 *bitposp = ptr_misalign * BITS_PER_UNIT;
468 *alignp = ptr_align * BITS_PER_UNIT;
469 /* Make sure to return a sensible alignment when the multiplication
470 by BITS_PER_UNIT overflowed. */
471 if (*alignp == 0)
472 *alignp = 1u << (HOST_BITS_PER_INT - 1);
473 /* We cannot really tell whether this result is an approximation. */
474 return false;
475 }
476 else
477 {
478 *bitposp = 0;
479 *alignp = BITS_PER_UNIT;
480 return false;
481 }
482 }
483 else if (TREE_CODE (exp) == INTEGER_CST)
484 {
485 *alignp = BIGGEST_ALIGNMENT;
486 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
487 & (BIGGEST_ALIGNMENT - 1));
488 return true;
489 }
490
491 *bitposp = 0;
492 *alignp = BITS_PER_UNIT;
493 return false;
494 }
495
496 /* Return the alignment in bits of EXP, a pointer valued expression.
497 The alignment returned is, by default, the alignment of the thing that
498 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
499
500 Otherwise, look at the expression to see if we can do better, i.e., if the
501 expression is actually pointing at an object whose alignment is tighter. */
502
503 unsigned int
get_pointer_alignment(tree exp)504 get_pointer_alignment (tree exp)
505 {
506 unsigned HOST_WIDE_INT bitpos = 0;
507 unsigned int align;
508
509 get_pointer_alignment_1 (exp, &align, &bitpos);
510
511 /* align and bitpos now specify known low bits of the pointer.
512 ptr & (align - 1) == bitpos. */
513
514 if (bitpos != 0)
515 align = least_bit_hwi (bitpos);
516
517 return align;
518 }
519
520 /* Return the number of non-zero elements in the sequence
521 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
522 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
523
524 static unsigned
string_length(const void * ptr,unsigned eltsize,unsigned maxelts)525 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
526 {
527 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
528
529 unsigned n;
530
531 if (eltsize == 1)
532 {
533 /* Optimize the common case of plain char. */
534 for (n = 0; n < maxelts; n++)
535 {
536 const char *elt = (const char*) ptr + n;
537 if (!*elt)
538 break;
539 }
540 }
541 else
542 {
543 for (n = 0; n < maxelts; n++)
544 {
545 const char *elt = (const char*) ptr + n * eltsize;
546 if (!memcmp (elt, "\0\0\0\0", eltsize))
547 break;
548 }
549 }
550 return n;
551 }
552
553 /* Compute the length of a null-terminated character string or wide
554 character string handling character sizes of 1, 2, and 4 bytes.
555 TREE_STRING_LENGTH is not the right way because it evaluates to
556 the size of the character array in bytes (as opposed to characters)
557 and because it can contain a zero byte in the middle.
558
559 ONLY_VALUE should be nonzero if the result is not going to be emitted
560 into the instruction stream and zero if it is going to be expanded.
561 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
562 is returned, otherwise NULL, since
563 len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
564 evaluate the side-effects.
565
566 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
567 accesses. Note that this implies the result is not going to be emitted
568 into the instruction stream.
569
570 The value returned is of type `ssizetype'.
571
572 Unfortunately, string_constant can't access the values of const char
573 arrays with initializers, so neither can we do so here. */
574
575 tree
c_strlen(tree src,int only_value)576 c_strlen (tree src, int only_value)
577 {
578 STRIP_NOPS (src);
579 if (TREE_CODE (src) == COND_EXPR
580 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
581 {
582 tree len1, len2;
583
584 len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
585 len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
586 if (tree_int_cst_equal (len1, len2))
587 return len1;
588 }
589
590 if (TREE_CODE (src) == COMPOUND_EXPR
591 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
592 return c_strlen (TREE_OPERAND (src, 1), only_value);
593
594 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
595
596 /* Offset from the beginning of the string in bytes. */
597 tree byteoff;
598 src = string_constant (src, &byteoff);
599 if (src == 0)
600 return NULL_TREE;
601
602 /* Determine the size of the string element. */
603 unsigned eltsize
604 = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src))));
605
606 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
607 length of SRC. */
608 unsigned maxelts = TREE_STRING_LENGTH (src) / eltsize - 1;
609
610 /* PTR can point to the byte representation of any string type, including
611 char* and wchar_t*. */
612 const char *ptr = TREE_STRING_POINTER (src);
613
614 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
615 {
616 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
617 compute the offset to the following null if we don't know where to
618 start searching for it. */
619 if (string_length (ptr, eltsize, maxelts) < maxelts)
620 {
621 /* Return when an embedded null character is found. */
622 return NULL_TREE;
623 }
624
625 if (!maxelts)
626 return ssize_int (0);
627
628 /* We don't know the starting offset, but we do know that the string
629 has no internal zero bytes. We can assume that the offset falls
630 within the bounds of the string; otherwise, the programmer deserves
631 what he gets. Subtract the offset from the length of the string,
632 and return that. This would perhaps not be valid if we were dealing
633 with named arrays in addition to literal string constants. */
634
635 return size_diffop_loc (loc, size_int (maxelts * eltsize), byteoff);
636 }
637
638 /* Offset from the beginning of the string in elements. */
639 HOST_WIDE_INT eltoff;
640
641 /* We have a known offset into the string. Start searching there for
642 a null character if we can represent it as a single HOST_WIDE_INT. */
643 if (byteoff == 0)
644 eltoff = 0;
645 else if (! tree_fits_shwi_p (byteoff))
646 eltoff = -1;
647 else
648 eltoff = tree_to_shwi (byteoff) / eltsize;
649
650 /* If the offset is known to be out of bounds, warn, and call strlen at
651 runtime. */
652 if (eltoff < 0 || eltoff > maxelts)
653 {
654 /* Suppress multiple warnings for propagated constant strings. */
655 if (only_value != 2
656 && !TREE_NO_WARNING (src))
657 {
658 warning_at (loc, OPT_Warray_bounds,
659 "offset %qwi outside bounds of constant string",
660 eltoff);
661 TREE_NO_WARNING (src) = 1;
662 }
663 return NULL_TREE;
664 }
665
666 /* Use strlen to search for the first zero byte. Since any strings
667 constructed with build_string will have nulls appended, we win even
668 if we get handed something like (char[4])"abcd".
669
670 Since ELTOFF is our starting index into the string, no further
671 calculation is needed. */
672 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
673 maxelts - eltoff);
674
675 return ssize_int (len);
676 }
677
678 /* Return a constant integer corresponding to target reading
679 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
680
681 static rtx
c_readstr(const char * str,scalar_int_mode mode)682 c_readstr (const char *str, scalar_int_mode mode)
683 {
684 HOST_WIDE_INT ch;
685 unsigned int i, j;
686 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
687
688 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
689 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
690 / HOST_BITS_PER_WIDE_INT;
691
692 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
693 for (i = 0; i < len; i++)
694 tmp[i] = 0;
695
696 ch = 1;
697 for (i = 0; i < GET_MODE_SIZE (mode); i++)
698 {
699 j = i;
700 if (WORDS_BIG_ENDIAN)
701 j = GET_MODE_SIZE (mode) - i - 1;
702 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
703 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
704 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
705 j *= BITS_PER_UNIT;
706
707 if (ch)
708 ch = (unsigned char) str[i];
709 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
710 }
711
712 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
713 return immed_wide_int_const (c, mode);
714 }
715
716 /* Cast a target constant CST to target CHAR and if that value fits into
717 host char type, return zero and put that value into variable pointed to by
718 P. */
719
720 static int
target_char_cast(tree cst,char * p)721 target_char_cast (tree cst, char *p)
722 {
723 unsigned HOST_WIDE_INT val, hostval;
724
725 if (TREE_CODE (cst) != INTEGER_CST
726 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
727 return 1;
728
729 /* Do not care if it fits or not right here. */
730 val = TREE_INT_CST_LOW (cst);
731
732 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
733 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
734
735 hostval = val;
736 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
737 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
738
739 if (val != hostval)
740 return 1;
741
742 *p = hostval;
743 return 0;
744 }
745
746 /* Similar to save_expr, but assumes that arbitrary code is not executed
747 in between the multiple evaluations. In particular, we assume that a
748 non-addressable local variable will not be modified. */
749
750 static tree
builtin_save_expr(tree exp)751 builtin_save_expr (tree exp)
752 {
753 if (TREE_CODE (exp) == SSA_NAME
754 || (TREE_ADDRESSABLE (exp) == 0
755 && (TREE_CODE (exp) == PARM_DECL
756 || (VAR_P (exp) && !TREE_STATIC (exp)))))
757 return exp;
758
759 return save_expr (exp);
760 }
761
762 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
763 times to get the address of either a higher stack frame, or a return
764 address located within it (depending on FNDECL_CODE). */
765
766 static rtx
expand_builtin_return_addr(enum built_in_function fndecl_code,int count)767 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
768 {
769 int i;
770 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
771 if (tem == NULL_RTX)
772 {
773 /* For a zero count with __builtin_return_address, we don't care what
774 frame address we return, because target-specific definitions will
775 override us. Therefore frame pointer elimination is OK, and using
776 the soft frame pointer is OK.
777
778 For a nonzero count, or a zero count with __builtin_frame_address,
779 we require a stable offset from the current frame pointer to the
780 previous one, so we must use the hard frame pointer, and
781 we must disable frame pointer elimination. */
782 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
783 tem = frame_pointer_rtx;
784 else
785 {
786 tem = hard_frame_pointer_rtx;
787
788 /* Tell reload not to eliminate the frame pointer. */
789 crtl->accesses_prior_frames = 1;
790 }
791 }
792
793 if (count > 0)
794 SETUP_FRAME_ADDRESSES ();
795
796 /* On the SPARC, the return address is not in the frame, it is in a
797 register. There is no way to access it off of the current frame
798 pointer, but it can be accessed off the previous frame pointer by
799 reading the value from the register window save area. */
800 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
801 count--;
802
803 /* Scan back COUNT frames to the specified frame. */
804 for (i = 0; i < count; i++)
805 {
806 /* Assume the dynamic chain pointer is in the word that the
807 frame address points to, unless otherwise specified. */
808 tem = DYNAMIC_CHAIN_ADDRESS (tem);
809 tem = memory_address (Pmode, tem);
810 tem = gen_frame_mem (Pmode, tem);
811 tem = copy_to_reg (tem);
812 }
813
814 /* For __builtin_frame_address, return what we've got. But, on
815 the SPARC for example, we may have to add a bias. */
816 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
817 return FRAME_ADDR_RTX (tem);
818
819 /* For __builtin_return_address, get the return address from that frame. */
820 #ifdef RETURN_ADDR_RTX
821 tem = RETURN_ADDR_RTX (count, tem);
822 #else
823 tem = memory_address (Pmode,
824 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
825 tem = gen_frame_mem (Pmode, tem);
826 #endif
827 return tem;
828 }
829
830 /* Alias set used for setjmp buffer. */
831 static alias_set_type setjmp_alias_set = -1;
832
833 /* Construct the leading half of a __builtin_setjmp call. Control will
834 return to RECEIVER_LABEL. This is also called directly by the SJLJ
835 exception handling code. */
836
837 void
expand_builtin_setjmp_setup(rtx buf_addr,rtx receiver_label)838 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
839 {
840 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
841 rtx stack_save;
842 rtx mem;
843
844 if (setjmp_alias_set == -1)
845 setjmp_alias_set = new_alias_set ();
846
847 buf_addr = convert_memory_address (Pmode, buf_addr);
848
849 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
850
851 /* We store the frame pointer and the address of receiver_label in
852 the buffer and use the rest of it for the stack save area, which
853 is machine-dependent. */
854
855 mem = gen_rtx_MEM (Pmode, buf_addr);
856 set_mem_alias_set (mem, setjmp_alias_set);
857 emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
858
859 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
860 GET_MODE_SIZE (Pmode))),
861 set_mem_alias_set (mem, setjmp_alias_set);
862
863 emit_move_insn (validize_mem (mem),
864 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
865
866 stack_save = gen_rtx_MEM (sa_mode,
867 plus_constant (Pmode, buf_addr,
868 2 * GET_MODE_SIZE (Pmode)));
869 set_mem_alias_set (stack_save, setjmp_alias_set);
870 emit_stack_save (SAVE_NONLOCAL, &stack_save);
871
872 /* If there is further processing to do, do it. */
873 if (targetm.have_builtin_setjmp_setup ())
874 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
875
876 /* We have a nonlocal label. */
877 cfun->has_nonlocal_label = 1;
878 }
879
880 /* Construct the trailing part of a __builtin_setjmp call. This is
881 also called directly by the SJLJ exception handling code.
882 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
883
884 void
expand_builtin_setjmp_receiver(rtx receiver_label)885 expand_builtin_setjmp_receiver (rtx receiver_label)
886 {
887 rtx chain;
888
889 /* Mark the FP as used when we get here, so we have to make sure it's
890 marked as used by this function. */
891 emit_use (hard_frame_pointer_rtx);
892
893 /* Mark the static chain as clobbered here so life information
894 doesn't get messed up for it. */
895 chain = rtx_for_static_chain (current_function_decl, true);
896 if (chain && REG_P (chain))
897 emit_clobber (chain);
898
899 /* Now put in the code to restore the frame pointer, and argument
900 pointer, if needed. */
901 if (! targetm.have_nonlocal_goto ())
902 {
903 /* First adjust our frame pointer to its actual value. It was
904 previously set to the start of the virtual area corresponding to
905 the stacked variables when we branched here and now needs to be
906 adjusted to the actual hardware fp value.
907
908 Assignments to virtual registers are converted by
909 instantiate_virtual_regs into the corresponding assignment
910 to the underlying register (fp in this case) that makes
911 the original assignment true.
912 So the following insn will actually be decrementing fp by
913 TARGET_STARTING_FRAME_OFFSET. */
914 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
915
916 /* Restoring the frame pointer also modifies the hard frame pointer.
917 Mark it used (so that the previous assignment remains live once
918 the frame pointer is eliminated) and clobbered (to represent the
919 implicit update from the assignment). */
920 emit_use (hard_frame_pointer_rtx);
921 emit_clobber (hard_frame_pointer_rtx);
922 }
923
924 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
925 {
926 /* If the argument pointer can be eliminated in favor of the
927 frame pointer, we don't need to restore it. We assume here
928 that if such an elimination is present, it can always be used.
929 This is the case on all known machines; if we don't make this
930 assumption, we do unnecessary saving on many machines. */
931 size_t i;
932 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
933
934 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
935 if (elim_regs[i].from == ARG_POINTER_REGNUM
936 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
937 break;
938
939 if (i == ARRAY_SIZE (elim_regs))
940 {
941 /* Now restore our arg pointer from the address at which it
942 was saved in our stack frame. */
943 emit_move_insn (crtl->args.internal_arg_pointer,
944 copy_to_reg (get_arg_pointer_save_area ()));
945 }
946 }
947
948 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
949 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
950 else if (targetm.have_nonlocal_goto_receiver ())
951 emit_insn (targetm.gen_nonlocal_goto_receiver ());
952 else
953 { /* Nothing */ }
954
955 /* We must not allow the code we just generated to be reordered by
956 scheduling. Specifically, the update of the frame pointer must
957 happen immediately, not later. */
958 emit_insn (gen_blockage ());
959 }
960
961 /* __builtin_longjmp is passed a pointer to an array of five words (not
962 all will be used on all machines). It operates similarly to the C
963 library function of the same name, but is more efficient. Much of
964 the code below is copied from the handling of non-local gotos. */
965
966 static void
expand_builtin_longjmp(rtx buf_addr,rtx value)967 expand_builtin_longjmp (rtx buf_addr, rtx value)
968 {
969 rtx fp, lab, stack;
970 rtx_insn *insn, *last;
971 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
972
973 /* DRAP is needed for stack realign if longjmp is expanded to current
974 function */
975 if (SUPPORTS_STACK_ALIGNMENT)
976 crtl->need_drap = true;
977
978 if (setjmp_alias_set == -1)
979 setjmp_alias_set = new_alias_set ();
980
981 buf_addr = convert_memory_address (Pmode, buf_addr);
982
983 buf_addr = force_reg (Pmode, buf_addr);
984
985 /* We require that the user must pass a second argument of 1, because
986 that is what builtin_setjmp will return. */
987 gcc_assert (value == const1_rtx);
988
989 last = get_last_insn ();
990 if (targetm.have_builtin_longjmp ())
991 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
992 else
993 {
994 fp = gen_rtx_MEM (Pmode, buf_addr);
995 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
996 GET_MODE_SIZE (Pmode)));
997
998 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
999 2 * GET_MODE_SIZE (Pmode)));
1000 set_mem_alias_set (fp, setjmp_alias_set);
1001 set_mem_alias_set (lab, setjmp_alias_set);
1002 set_mem_alias_set (stack, setjmp_alias_set);
1003
1004 /* Pick up FP, label, and SP from the block and jump. This code is
1005 from expand_goto in stmt.c; see there for detailed comments. */
1006 if (targetm.have_nonlocal_goto ())
1007 /* We have to pass a value to the nonlocal_goto pattern that will
1008 get copied into the static_chain pointer, but it does not matter
1009 what that value is, because builtin_setjmp does not use it. */
1010 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1011 else
1012 {
1013 lab = copy_to_reg (lab);
1014
1015 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1016 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1017
1018 emit_move_insn (hard_frame_pointer_rtx, fp);
1019 emit_stack_restore (SAVE_NONLOCAL, stack);
1020
1021 emit_use (hard_frame_pointer_rtx);
1022 emit_use (stack_pointer_rtx);
1023 emit_indirect_jump (lab);
1024 }
1025 }
1026
1027 /* Search backwards and mark the jump insn as a non-local goto.
1028 Note that this precludes the use of __builtin_longjmp to a
1029 __builtin_setjmp target in the same function. However, we've
1030 already cautioned the user that these functions are for
1031 internal exception handling use only. */
1032 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1033 {
1034 gcc_assert (insn != last);
1035
1036 if (JUMP_P (insn))
1037 {
1038 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1039 break;
1040 }
1041 else if (CALL_P (insn))
1042 break;
1043 }
1044 }
1045
1046 static inline bool
more_const_call_expr_args_p(const const_call_expr_arg_iterator * iter)1047 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1048 {
1049 return (iter->i < iter->n);
1050 }
1051
1052 /* This function validates the types of a function call argument list
1053 against a specified list of tree_codes. If the last specifier is a 0,
1054 that represents an ellipsis, otherwise the last specifier must be a
1055 VOID_TYPE. */
1056
1057 static bool
validate_arglist(const_tree callexpr,...)1058 validate_arglist (const_tree callexpr, ...)
1059 {
1060 enum tree_code code;
1061 bool res = 0;
1062 va_list ap;
1063 const_call_expr_arg_iterator iter;
1064 const_tree arg;
1065
1066 va_start (ap, callexpr);
1067 init_const_call_expr_arg_iterator (callexpr, &iter);
1068
1069 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1070 tree fn = CALL_EXPR_FN (callexpr);
1071 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1072
1073 for (unsigned argno = 1; ; ++argno)
1074 {
1075 code = (enum tree_code) va_arg (ap, int);
1076
1077 switch (code)
1078 {
1079 case 0:
1080 /* This signifies an ellipses, any further arguments are all ok. */
1081 res = true;
1082 goto end;
1083 case VOID_TYPE:
1084 /* This signifies an endlink, if no arguments remain, return
1085 true, otherwise return false. */
1086 res = !more_const_call_expr_args_p (&iter);
1087 goto end;
1088 case POINTER_TYPE:
1089 /* The actual argument must be nonnull when either the whole
1090 called function has been declared nonnull, or when the formal
1091 argument corresponding to the actual argument has been. */
1092 if (argmap
1093 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1094 {
1095 arg = next_const_call_expr_arg (&iter);
1096 if (!validate_arg (arg, code) || integer_zerop (arg))
1097 goto end;
1098 break;
1099 }
1100 /* FALLTHRU */
1101 default:
1102 /* If no parameters remain or the parameter's code does not
1103 match the specified code, return false. Otherwise continue
1104 checking any remaining arguments. */
1105 arg = next_const_call_expr_arg (&iter);
1106 if (!validate_arg (arg, code))
1107 goto end;
1108 break;
1109 }
1110 }
1111
1112 /* We need gotos here since we can only have one VA_CLOSE in a
1113 function. */
1114 end: ;
1115 va_end (ap);
1116
1117 BITMAP_FREE (argmap);
1118
1119 return res;
1120 }
1121
1122 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1123 and the address of the save area. */
1124
1125 static rtx
expand_builtin_nonlocal_goto(tree exp)1126 expand_builtin_nonlocal_goto (tree exp)
1127 {
1128 tree t_label, t_save_area;
1129 rtx r_label, r_save_area, r_fp, r_sp;
1130 rtx_insn *insn;
1131
1132 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1133 return NULL_RTX;
1134
1135 t_label = CALL_EXPR_ARG (exp, 0);
1136 t_save_area = CALL_EXPR_ARG (exp, 1);
1137
1138 r_label = expand_normal (t_label);
1139 r_label = convert_memory_address (Pmode, r_label);
1140 r_save_area = expand_normal (t_save_area);
1141 r_save_area = convert_memory_address (Pmode, r_save_area);
1142 /* Copy the address of the save location to a register just in case it was
1143 based on the frame pointer. */
1144 r_save_area = copy_to_reg (r_save_area);
1145 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1146 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1147 plus_constant (Pmode, r_save_area,
1148 GET_MODE_SIZE (Pmode)));
1149
1150 crtl->has_nonlocal_goto = 1;
1151
1152 /* ??? We no longer need to pass the static chain value, afaik. */
1153 if (targetm.have_nonlocal_goto ())
1154 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1155 else
1156 {
1157 r_label = copy_to_reg (r_label);
1158
1159 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1160 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1161
1162 /* Restore frame pointer for containing function. */
1163 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1164 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1165
1166 /* USE of hard_frame_pointer_rtx added for consistency;
1167 not clear if really needed. */
1168 emit_use (hard_frame_pointer_rtx);
1169 emit_use (stack_pointer_rtx);
1170
1171 /* If the architecture is using a GP register, we must
1172 conservatively assume that the target function makes use of it.
1173 The prologue of functions with nonlocal gotos must therefore
1174 initialize the GP register to the appropriate value, and we
1175 must then make sure that this value is live at the point
1176 of the jump. (Note that this doesn't necessarily apply
1177 to targets with a nonlocal_goto pattern; they are free
1178 to implement it in their own way. Note also that this is
1179 a no-op if the GP register is a global invariant.) */
1180 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1181 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1182 emit_use (pic_offset_table_rtx);
1183
1184 emit_indirect_jump (r_label);
1185 }
1186
1187 /* Search backwards to the jump insn and mark it as a
1188 non-local goto. */
1189 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1190 {
1191 if (JUMP_P (insn))
1192 {
1193 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1194 break;
1195 }
1196 else if (CALL_P (insn))
1197 break;
1198 }
1199
1200 return const0_rtx;
1201 }
1202
1203 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1204 (not all will be used on all machines) that was passed to __builtin_setjmp.
1205 It updates the stack pointer in that block to the current value. This is
1206 also called directly by the SJLJ exception handling code. */
1207
1208 void
expand_builtin_update_setjmp_buf(rtx buf_addr)1209 expand_builtin_update_setjmp_buf (rtx buf_addr)
1210 {
1211 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1212 buf_addr = convert_memory_address (Pmode, buf_addr);
1213 rtx stack_save
1214 = gen_rtx_MEM (sa_mode,
1215 memory_address
1216 (sa_mode,
1217 plus_constant (Pmode, buf_addr,
1218 2 * GET_MODE_SIZE (Pmode))));
1219
1220 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1221 }
1222
1223 /* Expand a call to __builtin_prefetch. For a target that does not support
1224 data prefetch, evaluate the memory address argument in case it has side
1225 effects. */
1226
1227 static void
expand_builtin_prefetch(tree exp)1228 expand_builtin_prefetch (tree exp)
1229 {
1230 tree arg0, arg1, arg2;
1231 int nargs;
1232 rtx op0, op1, op2;
1233
1234 if (!validate_arglist (exp, POINTER_TYPE, 0))
1235 return;
1236
1237 arg0 = CALL_EXPR_ARG (exp, 0);
1238
1239 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1240 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1241 locality). */
1242 nargs = call_expr_nargs (exp);
1243 if (nargs > 1)
1244 arg1 = CALL_EXPR_ARG (exp, 1);
1245 else
1246 arg1 = integer_zero_node;
1247 if (nargs > 2)
1248 arg2 = CALL_EXPR_ARG (exp, 2);
1249 else
1250 arg2 = integer_three_node;
1251
1252 /* Argument 0 is an address. */
1253 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1254
1255 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1256 if (TREE_CODE (arg1) != INTEGER_CST)
1257 {
1258 error ("second argument to %<__builtin_prefetch%> must be a constant");
1259 arg1 = integer_zero_node;
1260 }
1261 op1 = expand_normal (arg1);
1262 /* Argument 1 must be either zero or one. */
1263 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1264 {
1265 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1266 " using zero");
1267 op1 = const0_rtx;
1268 }
1269
1270 /* Argument 2 (locality) must be a compile-time constant int. */
1271 if (TREE_CODE (arg2) != INTEGER_CST)
1272 {
1273 error ("third argument to %<__builtin_prefetch%> must be a constant");
1274 arg2 = integer_zero_node;
1275 }
1276 op2 = expand_normal (arg2);
1277 /* Argument 2 must be 0, 1, 2, or 3. */
1278 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1279 {
1280 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1281 op2 = const0_rtx;
1282 }
1283
1284 if (targetm.have_prefetch ())
1285 {
1286 struct expand_operand ops[3];
1287
1288 create_address_operand (&ops[0], op0);
1289 create_integer_operand (&ops[1], INTVAL (op1));
1290 create_integer_operand (&ops[2], INTVAL (op2));
1291 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1292 return;
1293 }
1294
1295 /* Don't do anything with direct references to volatile memory, but
1296 generate code to handle other side effects. */
1297 if (!MEM_P (op0) && side_effects_p (op0))
1298 emit_insn (op0);
1299 }
1300
1301 /* Get a MEM rtx for expression EXP which is the address of an operand
1302 to be used in a string instruction (cmpstrsi, movmemsi, ..). LEN is
1303 the maximum length of the block of memory that might be accessed or
1304 NULL if unknown. */
1305
1306 static rtx
get_memory_rtx(tree exp,tree len)1307 get_memory_rtx (tree exp, tree len)
1308 {
1309 tree orig_exp = exp;
1310 rtx addr, mem;
1311
1312 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1313 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1314 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1315 exp = TREE_OPERAND (exp, 0);
1316
1317 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1318 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1319
1320 /* Get an expression we can use to find the attributes to assign to MEM.
1321 First remove any nops. */
1322 while (CONVERT_EXPR_P (exp)
1323 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1324 exp = TREE_OPERAND (exp, 0);
1325
1326 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1327 (as builtin stringops may alias with anything). */
1328 exp = fold_build2 (MEM_REF,
1329 build_array_type (char_type_node,
1330 build_range_type (sizetype,
1331 size_one_node, len)),
1332 exp, build_int_cst (ptr_type_node, 0));
1333
1334 /* If the MEM_REF has no acceptable address, try to get the base object
1335 from the original address we got, and build an all-aliasing
1336 unknown-sized access to that one. */
1337 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1338 set_mem_attributes (mem, exp, 0);
1339 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1340 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1341 0))))
1342 {
1343 exp = build_fold_addr_expr (exp);
1344 exp = fold_build2 (MEM_REF,
1345 build_array_type (char_type_node,
1346 build_range_type (sizetype,
1347 size_zero_node,
1348 NULL)),
1349 exp, build_int_cst (ptr_type_node, 0));
1350 set_mem_attributes (mem, exp, 0);
1351 }
1352 set_mem_alias_set (mem, 0);
1353 return mem;
1354 }
1355
1356 /* Built-in functions to perform an untyped call and return. */
1357
1358 #define apply_args_mode \
1359 (this_target_builtins->x_apply_args_mode)
1360 #define apply_result_mode \
1361 (this_target_builtins->x_apply_result_mode)
1362
1363 /* Return the size required for the block returned by __builtin_apply_args,
1364 and initialize apply_args_mode. */
1365
1366 static int
apply_args_size(void)1367 apply_args_size (void)
1368 {
1369 static int size = -1;
1370 int align;
1371 unsigned int regno;
1372
1373 /* The values computed by this function never change. */
1374 if (size < 0)
1375 {
1376 /* The first value is the incoming arg-pointer. */
1377 size = GET_MODE_SIZE (Pmode);
1378
1379 /* The second value is the structure value address unless this is
1380 passed as an "invisible" first argument. */
1381 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1382 size += GET_MODE_SIZE (Pmode);
1383
1384 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1385 if (FUNCTION_ARG_REGNO_P (regno))
1386 {
1387 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1388
1389 gcc_assert (mode != VOIDmode);
1390
1391 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1392 if (size % align != 0)
1393 size = CEIL (size, align) * align;
1394 size += GET_MODE_SIZE (mode);
1395 apply_args_mode[regno] = mode;
1396 }
1397 else
1398 {
1399 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1400 }
1401 }
1402 return size;
1403 }
1404
1405 /* Return the size required for the block returned by __builtin_apply,
1406 and initialize apply_result_mode. */
1407
1408 static int
apply_result_size(void)1409 apply_result_size (void)
1410 {
1411 static int size = -1;
1412 int align, regno;
1413
1414 /* The values computed by this function never change. */
1415 if (size < 0)
1416 {
1417 size = 0;
1418
1419 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1420 if (targetm.calls.function_value_regno_p (regno))
1421 {
1422 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1423
1424 gcc_assert (mode != VOIDmode);
1425
1426 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1427 if (size % align != 0)
1428 size = CEIL (size, align) * align;
1429 size += GET_MODE_SIZE (mode);
1430 apply_result_mode[regno] = mode;
1431 }
1432 else
1433 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1434
1435 /* Allow targets that use untyped_call and untyped_return to override
1436 the size so that machine-specific information can be stored here. */
1437 #ifdef APPLY_RESULT_SIZE
1438 size = APPLY_RESULT_SIZE;
1439 #endif
1440 }
1441 return size;
1442 }
1443
1444 /* Create a vector describing the result block RESULT. If SAVEP is true,
1445 the result block is used to save the values; otherwise it is used to
1446 restore the values. */
1447
1448 static rtx
result_vector(int savep,rtx result)1449 result_vector (int savep, rtx result)
1450 {
1451 int regno, size, align, nelts;
1452 fixed_size_mode mode;
1453 rtx reg, mem;
1454 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1455
1456 size = nelts = 0;
1457 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1458 if ((mode = apply_result_mode[regno]) != VOIDmode)
1459 {
1460 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1461 if (size % align != 0)
1462 size = CEIL (size, align) * align;
1463 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1464 mem = adjust_address (result, mode, size);
1465 savevec[nelts++] = (savep
1466 ? gen_rtx_SET (mem, reg)
1467 : gen_rtx_SET (reg, mem));
1468 size += GET_MODE_SIZE (mode);
1469 }
1470 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1471 }
1472
1473 /* Save the state required to perform an untyped call with the same
1474 arguments as were passed to the current function. */
1475
1476 static rtx
expand_builtin_apply_args_1(void)1477 expand_builtin_apply_args_1 (void)
1478 {
1479 rtx registers, tem;
1480 int size, align, regno;
1481 fixed_size_mode mode;
1482 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1483
1484 /* Create a block where the arg-pointer, structure value address,
1485 and argument registers can be saved. */
1486 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1487
1488 /* Walk past the arg-pointer and structure value address. */
1489 size = GET_MODE_SIZE (Pmode);
1490 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1491 size += GET_MODE_SIZE (Pmode);
1492
1493 /* Save each register used in calling a function to the block. */
1494 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1495 if ((mode = apply_args_mode[regno]) != VOIDmode)
1496 {
1497 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1498 if (size % align != 0)
1499 size = CEIL (size, align) * align;
1500
1501 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1502
1503 emit_move_insn (adjust_address (registers, mode, size), tem);
1504 size += GET_MODE_SIZE (mode);
1505 }
1506
1507 /* Save the arg pointer to the block. */
1508 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1509 /* We need the pointer as the caller actually passed them to us, not
1510 as we might have pretended they were passed. Make sure it's a valid
1511 operand, as emit_move_insn isn't expected to handle a PLUS. */
1512 if (STACK_GROWS_DOWNWARD)
1513 tem
1514 = force_operand (plus_constant (Pmode, tem,
1515 crtl->args.pretend_args_size),
1516 NULL_RTX);
1517 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1518
1519 size = GET_MODE_SIZE (Pmode);
1520
1521 /* Save the structure value address unless this is passed as an
1522 "invisible" first argument. */
1523 if (struct_incoming_value)
1524 {
1525 emit_move_insn (adjust_address (registers, Pmode, size),
1526 copy_to_reg (struct_incoming_value));
1527 size += GET_MODE_SIZE (Pmode);
1528 }
1529
1530 /* Return the address of the block. */
1531 return copy_addr_to_reg (XEXP (registers, 0));
1532 }
1533
1534 /* __builtin_apply_args returns block of memory allocated on
1535 the stack into which is stored the arg pointer, structure
1536 value address, static chain, and all the registers that might
1537 possibly be used in performing a function call. The code is
1538 moved to the start of the function so the incoming values are
1539 saved. */
1540
1541 static rtx
expand_builtin_apply_args(void)1542 expand_builtin_apply_args (void)
1543 {
1544 /* Don't do __builtin_apply_args more than once in a function.
1545 Save the result of the first call and reuse it. */
1546 if (apply_args_value != 0)
1547 return apply_args_value;
1548 {
1549 /* When this function is called, it means that registers must be
1550 saved on entry to this function. So we migrate the
1551 call to the first insn of this function. */
1552 rtx temp;
1553
1554 start_sequence ();
1555 temp = expand_builtin_apply_args_1 ();
1556 rtx_insn *seq = get_insns ();
1557 end_sequence ();
1558
1559 apply_args_value = temp;
1560
1561 /* Put the insns after the NOTE that starts the function.
1562 If this is inside a start_sequence, make the outer-level insn
1563 chain current, so the code is placed at the start of the
1564 function. If internal_arg_pointer is a non-virtual pseudo,
1565 it needs to be placed after the function that initializes
1566 that pseudo. */
1567 push_topmost_sequence ();
1568 if (REG_P (crtl->args.internal_arg_pointer)
1569 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1570 emit_insn_before (seq, parm_birth_insn);
1571 else
1572 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1573 pop_topmost_sequence ();
1574 return temp;
1575 }
1576 }
1577
1578 /* Perform an untyped call and save the state required to perform an
1579 untyped return of whatever value was returned by the given function. */
1580
1581 static rtx
expand_builtin_apply(rtx function,rtx arguments,rtx argsize)1582 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1583 {
1584 int size, align, regno;
1585 fixed_size_mode mode;
1586 rtx incoming_args, result, reg, dest, src;
1587 rtx_call_insn *call_insn;
1588 rtx old_stack_level = 0;
1589 rtx call_fusage = 0;
1590 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1591
1592 arguments = convert_memory_address (Pmode, arguments);
1593
1594 /* Create a block where the return registers can be saved. */
1595 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1596
1597 /* Fetch the arg pointer from the ARGUMENTS block. */
1598 incoming_args = gen_reg_rtx (Pmode);
1599 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1600 if (!STACK_GROWS_DOWNWARD)
1601 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1602 incoming_args, 0, OPTAB_LIB_WIDEN);
1603
1604 /* Push a new argument block and copy the arguments. Do not allow
1605 the (potential) memcpy call below to interfere with our stack
1606 manipulations. */
1607 do_pending_stack_adjust ();
1608 NO_DEFER_POP;
1609
1610 /* Save the stack with nonlocal if available. */
1611 if (targetm.have_save_stack_nonlocal ())
1612 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1613 else
1614 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1615
1616 /* Allocate a block of memory onto the stack and copy the memory
1617 arguments to the outgoing arguments address. We can pass TRUE
1618 as the 4th argument because we just saved the stack pointer
1619 and will restore it right after the call. */
1620 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1621
1622 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1623 may have already set current_function_calls_alloca to true.
1624 current_function_calls_alloca won't be set if argsize is zero,
1625 so we have to guarantee need_drap is true here. */
1626 if (SUPPORTS_STACK_ALIGNMENT)
1627 crtl->need_drap = true;
1628
1629 dest = virtual_outgoing_args_rtx;
1630 if (!STACK_GROWS_DOWNWARD)
1631 {
1632 if (CONST_INT_P (argsize))
1633 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1634 else
1635 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1636 }
1637 dest = gen_rtx_MEM (BLKmode, dest);
1638 set_mem_align (dest, PARM_BOUNDARY);
1639 src = gen_rtx_MEM (BLKmode, incoming_args);
1640 set_mem_align (src, PARM_BOUNDARY);
1641 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1642
1643 /* Refer to the argument block. */
1644 apply_args_size ();
1645 arguments = gen_rtx_MEM (BLKmode, arguments);
1646 set_mem_align (arguments, PARM_BOUNDARY);
1647
1648 /* Walk past the arg-pointer and structure value address. */
1649 size = GET_MODE_SIZE (Pmode);
1650 if (struct_value)
1651 size += GET_MODE_SIZE (Pmode);
1652
1653 /* Restore each of the registers previously saved. Make USE insns
1654 for each of these registers for use in making the call. */
1655 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1656 if ((mode = apply_args_mode[regno]) != VOIDmode)
1657 {
1658 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1659 if (size % align != 0)
1660 size = CEIL (size, align) * align;
1661 reg = gen_rtx_REG (mode, regno);
1662 emit_move_insn (reg, adjust_address (arguments, mode, size));
1663 use_reg (&call_fusage, reg);
1664 size += GET_MODE_SIZE (mode);
1665 }
1666
1667 /* Restore the structure value address unless this is passed as an
1668 "invisible" first argument. */
1669 size = GET_MODE_SIZE (Pmode);
1670 if (struct_value)
1671 {
1672 rtx value = gen_reg_rtx (Pmode);
1673 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1674 emit_move_insn (struct_value, value);
1675 if (REG_P (struct_value))
1676 use_reg (&call_fusage, struct_value);
1677 size += GET_MODE_SIZE (Pmode);
1678 }
1679
1680 /* All arguments and registers used for the call are set up by now! */
1681 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1682
1683 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1684 and we don't want to load it into a register as an optimization,
1685 because prepare_call_address already did it if it should be done. */
1686 if (GET_CODE (function) != SYMBOL_REF)
1687 function = memory_address (FUNCTION_MODE, function);
1688
1689 /* Generate the actual call instruction and save the return value. */
1690 if (targetm.have_untyped_call ())
1691 {
1692 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1693 emit_call_insn (targetm.gen_untyped_call (mem, result,
1694 result_vector (1, result)));
1695 }
1696 else if (targetm.have_call_value ())
1697 {
1698 rtx valreg = 0;
1699
1700 /* Locate the unique return register. It is not possible to
1701 express a call that sets more than one return register using
1702 call_value; use untyped_call for that. In fact, untyped_call
1703 only needs to save the return registers in the given block. */
1704 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1705 if ((mode = apply_result_mode[regno]) != VOIDmode)
1706 {
1707 gcc_assert (!valreg); /* have_untyped_call required. */
1708
1709 valreg = gen_rtx_REG (mode, regno);
1710 }
1711
1712 emit_insn (targetm.gen_call_value (valreg,
1713 gen_rtx_MEM (FUNCTION_MODE, function),
1714 const0_rtx, NULL_RTX, const0_rtx));
1715
1716 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1717 }
1718 else
1719 gcc_unreachable ();
1720
1721 /* Find the CALL insn we just emitted, and attach the register usage
1722 information. */
1723 call_insn = last_call_insn ();
1724 add_function_usage_to (call_insn, call_fusage);
1725
1726 /* Restore the stack. */
1727 if (targetm.have_save_stack_nonlocal ())
1728 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1729 else
1730 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1731 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1732
1733 OK_DEFER_POP;
1734
1735 /* Return the address of the result block. */
1736 result = copy_addr_to_reg (XEXP (result, 0));
1737 return convert_memory_address (ptr_mode, result);
1738 }
1739
1740 /* Perform an untyped return. */
1741
1742 static void
expand_builtin_return(rtx result)1743 expand_builtin_return (rtx result)
1744 {
1745 int size, align, regno;
1746 fixed_size_mode mode;
1747 rtx reg;
1748 rtx_insn *call_fusage = 0;
1749
1750 result = convert_memory_address (Pmode, result);
1751
1752 apply_result_size ();
1753 result = gen_rtx_MEM (BLKmode, result);
1754
1755 if (targetm.have_untyped_return ())
1756 {
1757 rtx vector = result_vector (0, result);
1758 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1759 emit_barrier ();
1760 return;
1761 }
1762
1763 /* Restore the return value and note that each value is used. */
1764 size = 0;
1765 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1766 if ((mode = apply_result_mode[regno]) != VOIDmode)
1767 {
1768 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1769 if (size % align != 0)
1770 size = CEIL (size, align) * align;
1771 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1772 emit_move_insn (reg, adjust_address (result, mode, size));
1773
1774 push_to_sequence (call_fusage);
1775 emit_use (reg);
1776 call_fusage = get_insns ();
1777 end_sequence ();
1778 size += GET_MODE_SIZE (mode);
1779 }
1780
1781 /* Put the USE insns before the return. */
1782 emit_insn (call_fusage);
1783
1784 /* Return whatever values was restored by jumping directly to the end
1785 of the function. */
1786 expand_naked_return ();
1787 }
1788
1789 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1790
1791 static enum type_class
type_to_class(tree type)1792 type_to_class (tree type)
1793 {
1794 switch (TREE_CODE (type))
1795 {
1796 case VOID_TYPE: return void_type_class;
1797 case INTEGER_TYPE: return integer_type_class;
1798 case ENUMERAL_TYPE: return enumeral_type_class;
1799 case BOOLEAN_TYPE: return boolean_type_class;
1800 case POINTER_TYPE: return pointer_type_class;
1801 case REFERENCE_TYPE: return reference_type_class;
1802 case OFFSET_TYPE: return offset_type_class;
1803 case REAL_TYPE: return real_type_class;
1804 case COMPLEX_TYPE: return complex_type_class;
1805 case FUNCTION_TYPE: return function_type_class;
1806 case METHOD_TYPE: return method_type_class;
1807 case RECORD_TYPE: return record_type_class;
1808 case UNION_TYPE:
1809 case QUAL_UNION_TYPE: return union_type_class;
1810 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1811 ? string_type_class : array_type_class);
1812 case LANG_TYPE: return lang_type_class;
1813 default: return no_type_class;
1814 }
1815 }
1816
1817 /* Expand a call EXP to __builtin_classify_type. */
1818
1819 static rtx
expand_builtin_classify_type(tree exp)1820 expand_builtin_classify_type (tree exp)
1821 {
1822 if (call_expr_nargs (exp))
1823 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1824 return GEN_INT (no_type_class);
1825 }
1826
1827 /* This helper macro, meant to be used in mathfn_built_in below, determines
1828 which among a set of builtin math functions is appropriate for a given type
1829 mode. The `F' (float) and `L' (long double) are automatically generated
1830 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1831 types, there are additional types that are considered with 'F32', 'F64',
1832 'F128', etc. suffixes. */
1833 #define CASE_MATHFN(MATHFN) \
1834 CASE_CFN_##MATHFN: \
1835 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1836 fcodel = BUILT_IN_##MATHFN##L ; break;
1837 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1838 types. */
1839 #define CASE_MATHFN_FLOATN(MATHFN) \
1840 CASE_CFN_##MATHFN: \
1841 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1842 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1843 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1844 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1845 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1846 break;
1847 /* Similar to above, but appends _R after any F/L suffix. */
1848 #define CASE_MATHFN_REENT(MATHFN) \
1849 case CFN_BUILT_IN_##MATHFN##_R: \
1850 case CFN_BUILT_IN_##MATHFN##F_R: \
1851 case CFN_BUILT_IN_##MATHFN##L_R: \
1852 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1853 fcodel = BUILT_IN_##MATHFN##L_R ; break;
1854
1855 /* Return a function equivalent to FN but operating on floating-point
1856 values of type TYPE, or END_BUILTINS if no such function exists.
1857 This is purely an operation on function codes; it does not guarantee
1858 that the target actually has an implementation of the function. */
1859
1860 static built_in_function
mathfn_built_in_2(tree type,combined_fn fn)1861 mathfn_built_in_2 (tree type, combined_fn fn)
1862 {
1863 tree mtype;
1864 built_in_function fcode, fcodef, fcodel;
1865 built_in_function fcodef16 = END_BUILTINS;
1866 built_in_function fcodef32 = END_BUILTINS;
1867 built_in_function fcodef64 = END_BUILTINS;
1868 built_in_function fcodef128 = END_BUILTINS;
1869 built_in_function fcodef32x = END_BUILTINS;
1870 built_in_function fcodef64x = END_BUILTINS;
1871 built_in_function fcodef128x = END_BUILTINS;
1872
1873 switch (fn)
1874 {
1875 CASE_MATHFN (ACOS)
1876 CASE_MATHFN (ACOSH)
1877 CASE_MATHFN (ASIN)
1878 CASE_MATHFN (ASINH)
1879 CASE_MATHFN (ATAN)
1880 CASE_MATHFN (ATAN2)
1881 CASE_MATHFN (ATANH)
1882 CASE_MATHFN (CBRT)
1883 CASE_MATHFN_FLOATN (CEIL)
1884 CASE_MATHFN (CEXPI)
1885 CASE_MATHFN_FLOATN (COPYSIGN)
1886 CASE_MATHFN (COS)
1887 CASE_MATHFN (COSH)
1888 CASE_MATHFN (DREM)
1889 CASE_MATHFN (ERF)
1890 CASE_MATHFN (ERFC)
1891 CASE_MATHFN (EXP)
1892 CASE_MATHFN (EXP10)
1893 CASE_MATHFN (EXP2)
1894 CASE_MATHFN (EXPM1)
1895 CASE_MATHFN (FABS)
1896 CASE_MATHFN (FDIM)
1897 CASE_MATHFN_FLOATN (FLOOR)
1898 CASE_MATHFN_FLOATN (FMA)
1899 CASE_MATHFN_FLOATN (FMAX)
1900 CASE_MATHFN_FLOATN (FMIN)
1901 CASE_MATHFN (FMOD)
1902 CASE_MATHFN (FREXP)
1903 CASE_MATHFN (GAMMA)
1904 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1905 CASE_MATHFN (HUGE_VAL)
1906 CASE_MATHFN (HYPOT)
1907 CASE_MATHFN (ILOGB)
1908 CASE_MATHFN (ICEIL)
1909 CASE_MATHFN (IFLOOR)
1910 CASE_MATHFN (INF)
1911 CASE_MATHFN (IRINT)
1912 CASE_MATHFN (IROUND)
1913 CASE_MATHFN (ISINF)
1914 CASE_MATHFN (J0)
1915 CASE_MATHFN (J1)
1916 CASE_MATHFN (JN)
1917 CASE_MATHFN (LCEIL)
1918 CASE_MATHFN (LDEXP)
1919 CASE_MATHFN (LFLOOR)
1920 CASE_MATHFN (LGAMMA)
1921 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1922 CASE_MATHFN (LLCEIL)
1923 CASE_MATHFN (LLFLOOR)
1924 CASE_MATHFN (LLRINT)
1925 CASE_MATHFN (LLROUND)
1926 CASE_MATHFN (LOG)
1927 CASE_MATHFN (LOG10)
1928 CASE_MATHFN (LOG1P)
1929 CASE_MATHFN (LOG2)
1930 CASE_MATHFN (LOGB)
1931 CASE_MATHFN (LRINT)
1932 CASE_MATHFN (LROUND)
1933 CASE_MATHFN (MODF)
1934 CASE_MATHFN (NAN)
1935 CASE_MATHFN (NANS)
1936 CASE_MATHFN_FLOATN (NEARBYINT)
1937 CASE_MATHFN (NEXTAFTER)
1938 CASE_MATHFN (NEXTTOWARD)
1939 CASE_MATHFN (POW)
1940 CASE_MATHFN (POWI)
1941 CASE_MATHFN (POW10)
1942 CASE_MATHFN (REMAINDER)
1943 CASE_MATHFN (REMQUO)
1944 CASE_MATHFN_FLOATN (RINT)
1945 CASE_MATHFN_FLOATN (ROUND)
1946 CASE_MATHFN (SCALB)
1947 CASE_MATHFN (SCALBLN)
1948 CASE_MATHFN (SCALBN)
1949 CASE_MATHFN (SIGNBIT)
1950 CASE_MATHFN (SIGNIFICAND)
1951 CASE_MATHFN (SIN)
1952 CASE_MATHFN (SINCOS)
1953 CASE_MATHFN (SINH)
1954 CASE_MATHFN_FLOATN (SQRT)
1955 CASE_MATHFN (TAN)
1956 CASE_MATHFN (TANH)
1957 CASE_MATHFN (TGAMMA)
1958 CASE_MATHFN_FLOATN (TRUNC)
1959 CASE_MATHFN (Y0)
1960 CASE_MATHFN (Y1)
1961 CASE_MATHFN (YN)
1962
1963 default:
1964 return END_BUILTINS;
1965 }
1966
1967 mtype = TYPE_MAIN_VARIANT (type);
1968 if (mtype == double_type_node)
1969 return fcode;
1970 else if (mtype == float_type_node)
1971 return fcodef;
1972 else if (mtype == long_double_type_node)
1973 return fcodel;
1974 else if (mtype == float16_type_node)
1975 return fcodef16;
1976 else if (mtype == float32_type_node)
1977 return fcodef32;
1978 else if (mtype == float64_type_node)
1979 return fcodef64;
1980 else if (mtype == float128_type_node)
1981 return fcodef128;
1982 else if (mtype == float32x_type_node)
1983 return fcodef32x;
1984 else if (mtype == float64x_type_node)
1985 return fcodef64x;
1986 else if (mtype == float128x_type_node)
1987 return fcodef128x;
1988 else
1989 return END_BUILTINS;
1990 }
1991
1992 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1993 if available. If IMPLICIT_P is true use the implicit builtin declaration,
1994 otherwise use the explicit declaration. If we can't do the conversion,
1995 return null. */
1996
1997 static tree
mathfn_built_in_1(tree type,combined_fn fn,bool implicit_p)1998 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1999 {
2000 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2001 if (fcode2 == END_BUILTINS)
2002 return NULL_TREE;
2003
2004 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2005 return NULL_TREE;
2006
2007 return builtin_decl_explicit (fcode2);
2008 }
2009
2010 /* Like mathfn_built_in_1, but always use the implicit array. */
2011
2012 tree
mathfn_built_in(tree type,combined_fn fn)2013 mathfn_built_in (tree type, combined_fn fn)
2014 {
2015 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2016 }
2017
2018 /* Like mathfn_built_in_1, but take a built_in_function and
2019 always use the implicit array. */
2020
2021 tree
mathfn_built_in(tree type,enum built_in_function fn)2022 mathfn_built_in (tree type, enum built_in_function fn)
2023 {
2024 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2025 }
2026
2027 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2028 return its code, otherwise return IFN_LAST. Note that this function
2029 only tests whether the function is defined in internals.def, not whether
2030 it is actually available on the target. */
2031
2032 internal_fn
associated_internal_fn(tree fndecl)2033 associated_internal_fn (tree fndecl)
2034 {
2035 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2036 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2037 switch (DECL_FUNCTION_CODE (fndecl))
2038 {
2039 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2040 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2041 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2042 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2043 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2044 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2045 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2046 #include "internal-fn.def"
2047
2048 CASE_FLT_FN (BUILT_IN_POW10):
2049 return IFN_EXP10;
2050
2051 CASE_FLT_FN (BUILT_IN_DREM):
2052 return IFN_REMAINDER;
2053
2054 CASE_FLT_FN (BUILT_IN_SCALBN):
2055 CASE_FLT_FN (BUILT_IN_SCALBLN):
2056 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2057 return IFN_LDEXP;
2058 return IFN_LAST;
2059
2060 default:
2061 return IFN_LAST;
2062 }
2063 }
2064
2065 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2066 on the current target by a call to an internal function, return the
2067 code of that internal function, otherwise return IFN_LAST. The caller
2068 is responsible for ensuring that any side-effects of the built-in
2069 call are dealt with correctly. E.g. if CALL sets errno, the caller
2070 must decide that the errno result isn't needed or make it available
2071 in some other way. */
2072
2073 internal_fn
replacement_internal_fn(gcall * call)2074 replacement_internal_fn (gcall *call)
2075 {
2076 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2077 {
2078 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2079 if (ifn != IFN_LAST)
2080 {
2081 tree_pair types = direct_internal_fn_types (ifn, call);
2082 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2083 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2084 return ifn;
2085 }
2086 }
2087 return IFN_LAST;
2088 }
2089
2090 /* Expand a call to the builtin trinary math functions (fma).
2091 Return NULL_RTX if a normal call should be emitted rather than expanding the
2092 function in-line. EXP is the expression that is a call to the builtin
2093 function; if convenient, the result should be placed in TARGET.
2094 SUBTARGET may be used as the target for computing one of EXP's
2095 operands. */
2096
2097 static rtx
expand_builtin_mathfn_ternary(tree exp,rtx target,rtx subtarget)2098 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2099 {
2100 optab builtin_optab;
2101 rtx op0, op1, op2, result;
2102 rtx_insn *insns;
2103 tree fndecl = get_callee_fndecl (exp);
2104 tree arg0, arg1, arg2;
2105 machine_mode mode;
2106
2107 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2108 return NULL_RTX;
2109
2110 arg0 = CALL_EXPR_ARG (exp, 0);
2111 arg1 = CALL_EXPR_ARG (exp, 1);
2112 arg2 = CALL_EXPR_ARG (exp, 2);
2113
2114 switch (DECL_FUNCTION_CODE (fndecl))
2115 {
2116 CASE_FLT_FN (BUILT_IN_FMA):
2117 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2118 builtin_optab = fma_optab; break;
2119 default:
2120 gcc_unreachable ();
2121 }
2122
2123 /* Make a suitable register to place result in. */
2124 mode = TYPE_MODE (TREE_TYPE (exp));
2125
2126 /* Before working hard, check whether the instruction is available. */
2127 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2128 return NULL_RTX;
2129
2130 result = gen_reg_rtx (mode);
2131
2132 /* Always stabilize the argument list. */
2133 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2134 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2135 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2136
2137 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2138 op1 = expand_normal (arg1);
2139 op2 = expand_normal (arg2);
2140
2141 start_sequence ();
2142
2143 /* Compute into RESULT.
2144 Set RESULT to wherever the result comes back. */
2145 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2146 result, 0);
2147
2148 /* If we were unable to expand via the builtin, stop the sequence
2149 (without outputting the insns) and call to the library function
2150 with the stabilized argument list. */
2151 if (result == 0)
2152 {
2153 end_sequence ();
2154 return expand_call (exp, target, target == const0_rtx);
2155 }
2156
2157 /* Output the entire sequence. */
2158 insns = get_insns ();
2159 end_sequence ();
2160 emit_insn (insns);
2161
2162 return result;
2163 }
2164
2165 /* Expand a call to the builtin sin and cos math functions.
2166 Return NULL_RTX if a normal call should be emitted rather than expanding the
2167 function in-line. EXP is the expression that is a call to the builtin
2168 function; if convenient, the result should be placed in TARGET.
2169 SUBTARGET may be used as the target for computing one of EXP's
2170 operands. */
2171
2172 static rtx
expand_builtin_mathfn_3(tree exp,rtx target,rtx subtarget)2173 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2174 {
2175 optab builtin_optab;
2176 rtx op0;
2177 rtx_insn *insns;
2178 tree fndecl = get_callee_fndecl (exp);
2179 machine_mode mode;
2180 tree arg;
2181
2182 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2183 return NULL_RTX;
2184
2185 arg = CALL_EXPR_ARG (exp, 0);
2186
2187 switch (DECL_FUNCTION_CODE (fndecl))
2188 {
2189 CASE_FLT_FN (BUILT_IN_SIN):
2190 CASE_FLT_FN (BUILT_IN_COS):
2191 builtin_optab = sincos_optab; break;
2192 default:
2193 gcc_unreachable ();
2194 }
2195
2196 /* Make a suitable register to place result in. */
2197 mode = TYPE_MODE (TREE_TYPE (exp));
2198
2199 /* Check if sincos insn is available, otherwise fallback
2200 to sin or cos insn. */
2201 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2202 switch (DECL_FUNCTION_CODE (fndecl))
2203 {
2204 CASE_FLT_FN (BUILT_IN_SIN):
2205 builtin_optab = sin_optab; break;
2206 CASE_FLT_FN (BUILT_IN_COS):
2207 builtin_optab = cos_optab; break;
2208 default:
2209 gcc_unreachable ();
2210 }
2211
2212 /* Before working hard, check whether the instruction is available. */
2213 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2214 {
2215 rtx result = gen_reg_rtx (mode);
2216
2217 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2218 need to expand the argument again. This way, we will not perform
2219 side-effects more the once. */
2220 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2221
2222 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2223
2224 start_sequence ();
2225
2226 /* Compute into RESULT.
2227 Set RESULT to wherever the result comes back. */
2228 if (builtin_optab == sincos_optab)
2229 {
2230 int ok;
2231
2232 switch (DECL_FUNCTION_CODE (fndecl))
2233 {
2234 CASE_FLT_FN (BUILT_IN_SIN):
2235 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2236 break;
2237 CASE_FLT_FN (BUILT_IN_COS):
2238 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2239 break;
2240 default:
2241 gcc_unreachable ();
2242 }
2243 gcc_assert (ok);
2244 }
2245 else
2246 result = expand_unop (mode, builtin_optab, op0, result, 0);
2247
2248 if (result != 0)
2249 {
2250 /* Output the entire sequence. */
2251 insns = get_insns ();
2252 end_sequence ();
2253 emit_insn (insns);
2254 return result;
2255 }
2256
2257 /* If we were unable to expand via the builtin, stop the sequence
2258 (without outputting the insns) and call to the library function
2259 with the stabilized argument list. */
2260 end_sequence ();
2261 }
2262
2263 return expand_call (exp, target, target == const0_rtx);
2264 }
2265
2266 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2267 return an RTL instruction code that implements the functionality.
2268 If that isn't possible or available return CODE_FOR_nothing. */
2269
2270 static enum insn_code
interclass_mathfn_icode(tree arg,tree fndecl)2271 interclass_mathfn_icode (tree arg, tree fndecl)
2272 {
2273 bool errno_set = false;
2274 optab builtin_optab = unknown_optab;
2275 machine_mode mode;
2276
2277 switch (DECL_FUNCTION_CODE (fndecl))
2278 {
2279 CASE_FLT_FN (BUILT_IN_ILOGB):
2280 errno_set = true; builtin_optab = ilogb_optab; break;
2281 CASE_FLT_FN (BUILT_IN_ISINF):
2282 builtin_optab = isinf_optab; break;
2283 case BUILT_IN_ISNORMAL:
2284 case BUILT_IN_ISFINITE:
2285 CASE_FLT_FN (BUILT_IN_FINITE):
2286 case BUILT_IN_FINITED32:
2287 case BUILT_IN_FINITED64:
2288 case BUILT_IN_FINITED128:
2289 case BUILT_IN_ISINFD32:
2290 case BUILT_IN_ISINFD64:
2291 case BUILT_IN_ISINFD128:
2292 /* These builtins have no optabs (yet). */
2293 break;
2294 default:
2295 gcc_unreachable ();
2296 }
2297
2298 /* There's no easy way to detect the case we need to set EDOM. */
2299 if (flag_errno_math && errno_set)
2300 return CODE_FOR_nothing;
2301
2302 /* Optab mode depends on the mode of the input argument. */
2303 mode = TYPE_MODE (TREE_TYPE (arg));
2304
2305 if (builtin_optab)
2306 return optab_handler (builtin_optab, mode);
2307 return CODE_FOR_nothing;
2308 }
2309
2310 /* Expand a call to one of the builtin math functions that operate on
2311 floating point argument and output an integer result (ilogb, isinf,
2312 isnan, etc).
2313 Return 0 if a normal call should be emitted rather than expanding the
2314 function in-line. EXP is the expression that is a call to the builtin
2315 function; if convenient, the result should be placed in TARGET. */
2316
2317 static rtx
expand_builtin_interclass_mathfn(tree exp,rtx target)2318 expand_builtin_interclass_mathfn (tree exp, rtx target)
2319 {
2320 enum insn_code icode = CODE_FOR_nothing;
2321 rtx op0;
2322 tree fndecl = get_callee_fndecl (exp);
2323 machine_mode mode;
2324 tree arg;
2325
2326 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2327 return NULL_RTX;
2328
2329 arg = CALL_EXPR_ARG (exp, 0);
2330 icode = interclass_mathfn_icode (arg, fndecl);
2331 mode = TYPE_MODE (TREE_TYPE (arg));
2332
2333 if (icode != CODE_FOR_nothing)
2334 {
2335 struct expand_operand ops[1];
2336 rtx_insn *last = get_last_insn ();
2337 tree orig_arg = arg;
2338
2339 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2340 need to expand the argument again. This way, we will not perform
2341 side-effects more the once. */
2342 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2343
2344 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2345
2346 if (mode != GET_MODE (op0))
2347 op0 = convert_to_mode (mode, op0, 0);
2348
2349 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2350 if (maybe_legitimize_operands (icode, 0, 1, ops)
2351 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2352 return ops[0].value;
2353
2354 delete_insns_since (last);
2355 CALL_EXPR_ARG (exp, 0) = orig_arg;
2356 }
2357
2358 return NULL_RTX;
2359 }
2360
2361 /* Expand a call to the builtin sincos math function.
2362 Return NULL_RTX if a normal call should be emitted rather than expanding the
2363 function in-line. EXP is the expression that is a call to the builtin
2364 function. */
2365
2366 static rtx
expand_builtin_sincos(tree exp)2367 expand_builtin_sincos (tree exp)
2368 {
2369 rtx op0, op1, op2, target1, target2;
2370 machine_mode mode;
2371 tree arg, sinp, cosp;
2372 int result;
2373 location_t loc = EXPR_LOCATION (exp);
2374 tree alias_type, alias_off;
2375
2376 if (!validate_arglist (exp, REAL_TYPE,
2377 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2378 return NULL_RTX;
2379
2380 arg = CALL_EXPR_ARG (exp, 0);
2381 sinp = CALL_EXPR_ARG (exp, 1);
2382 cosp = CALL_EXPR_ARG (exp, 2);
2383
2384 /* Make a suitable register to place result in. */
2385 mode = TYPE_MODE (TREE_TYPE (arg));
2386
2387 /* Check if sincos insn is available, otherwise emit the call. */
2388 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2389 return NULL_RTX;
2390
2391 target1 = gen_reg_rtx (mode);
2392 target2 = gen_reg_rtx (mode);
2393
2394 op0 = expand_normal (arg);
2395 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2396 alias_off = build_int_cst (alias_type, 0);
2397 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2398 sinp, alias_off));
2399 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2400 cosp, alias_off));
2401
2402 /* Compute into target1 and target2.
2403 Set TARGET to wherever the result comes back. */
2404 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2405 gcc_assert (result);
2406
2407 /* Move target1 and target2 to the memory locations indicated
2408 by op1 and op2. */
2409 emit_move_insn (op1, target1);
2410 emit_move_insn (op2, target2);
2411
2412 return const0_rtx;
2413 }
2414
2415 /* Expand a call to the internal cexpi builtin to the sincos math function.
2416 EXP is the expression that is a call to the builtin function; if convenient,
2417 the result should be placed in TARGET. */
2418
2419 static rtx
expand_builtin_cexpi(tree exp,rtx target)2420 expand_builtin_cexpi (tree exp, rtx target)
2421 {
2422 tree fndecl = get_callee_fndecl (exp);
2423 tree arg, type;
2424 machine_mode mode;
2425 rtx op0, op1, op2;
2426 location_t loc = EXPR_LOCATION (exp);
2427
2428 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2429 return NULL_RTX;
2430
2431 arg = CALL_EXPR_ARG (exp, 0);
2432 type = TREE_TYPE (arg);
2433 mode = TYPE_MODE (TREE_TYPE (arg));
2434
2435 /* Try expanding via a sincos optab, fall back to emitting a libcall
2436 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2437 is only generated from sincos, cexp or if we have either of them. */
2438 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2439 {
2440 op1 = gen_reg_rtx (mode);
2441 op2 = gen_reg_rtx (mode);
2442
2443 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2444
2445 /* Compute into op1 and op2. */
2446 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2447 }
2448 else if (targetm.libc_has_function (function_sincos))
2449 {
2450 tree call, fn = NULL_TREE;
2451 tree top1, top2;
2452 rtx op1a, op2a;
2453
2454 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2455 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2456 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2457 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2458 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2459 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2460 else
2461 gcc_unreachable ();
2462
2463 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2464 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2465 op1a = copy_addr_to_reg (XEXP (op1, 0));
2466 op2a = copy_addr_to_reg (XEXP (op2, 0));
2467 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2468 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2469
2470 /* Make sure not to fold the sincos call again. */
2471 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2472 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2473 call, 3, arg, top1, top2));
2474 }
2475 else
2476 {
2477 tree call, fn = NULL_TREE, narg;
2478 tree ctype = build_complex_type (type);
2479
2480 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2481 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2482 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2483 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2484 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2485 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2486 else
2487 gcc_unreachable ();
2488
2489 /* If we don't have a decl for cexp create one. This is the
2490 friendliest fallback if the user calls __builtin_cexpi
2491 without full target C99 function support. */
2492 if (fn == NULL_TREE)
2493 {
2494 tree fntype;
2495 const char *name = NULL;
2496
2497 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2498 name = "cexpf";
2499 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2500 name = "cexp";
2501 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2502 name = "cexpl";
2503
2504 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2505 fn = build_fn_decl (name, fntype);
2506 }
2507
2508 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2509 build_real (type, dconst0), arg);
2510
2511 /* Make sure not to fold the cexp call again. */
2512 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2513 return expand_expr (build_call_nary (ctype, call, 1, narg),
2514 target, VOIDmode, EXPAND_NORMAL);
2515 }
2516
2517 /* Now build the proper return type. */
2518 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2519 make_tree (TREE_TYPE (arg), op2),
2520 make_tree (TREE_TYPE (arg), op1)),
2521 target, VOIDmode, EXPAND_NORMAL);
2522 }
2523
2524 /* Conveniently construct a function call expression. FNDECL names the
2525 function to be called, N is the number of arguments, and the "..."
2526 parameters are the argument expressions. Unlike build_call_exr
2527 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2528
2529 static tree
build_call_nofold_loc(location_t loc,tree fndecl,int n,...)2530 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2531 {
2532 va_list ap;
2533 tree fntype = TREE_TYPE (fndecl);
2534 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2535
2536 va_start (ap, n);
2537 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2538 va_end (ap);
2539 SET_EXPR_LOCATION (fn, loc);
2540 return fn;
2541 }
2542
2543 /* Expand a call to one of the builtin rounding functions gcc defines
2544 as an extension (lfloor and lceil). As these are gcc extensions we
2545 do not need to worry about setting errno to EDOM.
2546 If expanding via optab fails, lower expression to (int)(floor(x)).
2547 EXP is the expression that is a call to the builtin function;
2548 if convenient, the result should be placed in TARGET. */
2549
2550 static rtx
expand_builtin_int_roundingfn(tree exp,rtx target)2551 expand_builtin_int_roundingfn (tree exp, rtx target)
2552 {
2553 convert_optab builtin_optab;
2554 rtx op0, tmp;
2555 rtx_insn *insns;
2556 tree fndecl = get_callee_fndecl (exp);
2557 enum built_in_function fallback_fn;
2558 tree fallback_fndecl;
2559 machine_mode mode;
2560 tree arg;
2561
2562 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2563 gcc_unreachable ();
2564
2565 arg = CALL_EXPR_ARG (exp, 0);
2566
2567 switch (DECL_FUNCTION_CODE (fndecl))
2568 {
2569 CASE_FLT_FN (BUILT_IN_ICEIL):
2570 CASE_FLT_FN (BUILT_IN_LCEIL):
2571 CASE_FLT_FN (BUILT_IN_LLCEIL):
2572 builtin_optab = lceil_optab;
2573 fallback_fn = BUILT_IN_CEIL;
2574 break;
2575
2576 CASE_FLT_FN (BUILT_IN_IFLOOR):
2577 CASE_FLT_FN (BUILT_IN_LFLOOR):
2578 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2579 builtin_optab = lfloor_optab;
2580 fallback_fn = BUILT_IN_FLOOR;
2581 break;
2582
2583 default:
2584 gcc_unreachable ();
2585 }
2586
2587 /* Make a suitable register to place result in. */
2588 mode = TYPE_MODE (TREE_TYPE (exp));
2589
2590 target = gen_reg_rtx (mode);
2591
2592 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2593 need to expand the argument again. This way, we will not perform
2594 side-effects more the once. */
2595 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2596
2597 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2598
2599 start_sequence ();
2600
2601 /* Compute into TARGET. */
2602 if (expand_sfix_optab (target, op0, builtin_optab))
2603 {
2604 /* Output the entire sequence. */
2605 insns = get_insns ();
2606 end_sequence ();
2607 emit_insn (insns);
2608 return target;
2609 }
2610
2611 /* If we were unable to expand via the builtin, stop the sequence
2612 (without outputting the insns). */
2613 end_sequence ();
2614
2615 /* Fall back to floating point rounding optab. */
2616 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2617
2618 /* For non-C99 targets we may end up without a fallback fndecl here
2619 if the user called __builtin_lfloor directly. In this case emit
2620 a call to the floor/ceil variants nevertheless. This should result
2621 in the best user experience for not full C99 targets. */
2622 if (fallback_fndecl == NULL_TREE)
2623 {
2624 tree fntype;
2625 const char *name = NULL;
2626
2627 switch (DECL_FUNCTION_CODE (fndecl))
2628 {
2629 case BUILT_IN_ICEIL:
2630 case BUILT_IN_LCEIL:
2631 case BUILT_IN_LLCEIL:
2632 name = "ceil";
2633 break;
2634 case BUILT_IN_ICEILF:
2635 case BUILT_IN_LCEILF:
2636 case BUILT_IN_LLCEILF:
2637 name = "ceilf";
2638 break;
2639 case BUILT_IN_ICEILL:
2640 case BUILT_IN_LCEILL:
2641 case BUILT_IN_LLCEILL:
2642 name = "ceill";
2643 break;
2644 case BUILT_IN_IFLOOR:
2645 case BUILT_IN_LFLOOR:
2646 case BUILT_IN_LLFLOOR:
2647 name = "floor";
2648 break;
2649 case BUILT_IN_IFLOORF:
2650 case BUILT_IN_LFLOORF:
2651 case BUILT_IN_LLFLOORF:
2652 name = "floorf";
2653 break;
2654 case BUILT_IN_IFLOORL:
2655 case BUILT_IN_LFLOORL:
2656 case BUILT_IN_LLFLOORL:
2657 name = "floorl";
2658 break;
2659 default:
2660 gcc_unreachable ();
2661 }
2662
2663 fntype = build_function_type_list (TREE_TYPE (arg),
2664 TREE_TYPE (arg), NULL_TREE);
2665 fallback_fndecl = build_fn_decl (name, fntype);
2666 }
2667
2668 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2669
2670 tmp = expand_normal (exp);
2671 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2672
2673 /* Truncate the result of floating point optab to integer
2674 via expand_fix (). */
2675 target = gen_reg_rtx (mode);
2676 expand_fix (target, tmp, 0);
2677
2678 return target;
2679 }
2680
2681 /* Expand a call to one of the builtin math functions doing integer
2682 conversion (lrint).
2683 Return 0 if a normal call should be emitted rather than expanding the
2684 function in-line. EXP is the expression that is a call to the builtin
2685 function; if convenient, the result should be placed in TARGET. */
2686
2687 static rtx
expand_builtin_int_roundingfn_2(tree exp,rtx target)2688 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2689 {
2690 convert_optab builtin_optab;
2691 rtx op0;
2692 rtx_insn *insns;
2693 tree fndecl = get_callee_fndecl (exp);
2694 tree arg;
2695 machine_mode mode;
2696 enum built_in_function fallback_fn = BUILT_IN_NONE;
2697
2698 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2699 gcc_unreachable ();
2700
2701 arg = CALL_EXPR_ARG (exp, 0);
2702
2703 switch (DECL_FUNCTION_CODE (fndecl))
2704 {
2705 CASE_FLT_FN (BUILT_IN_IRINT):
2706 fallback_fn = BUILT_IN_LRINT;
2707 gcc_fallthrough ();
2708 CASE_FLT_FN (BUILT_IN_LRINT):
2709 CASE_FLT_FN (BUILT_IN_LLRINT):
2710 builtin_optab = lrint_optab;
2711 break;
2712
2713 CASE_FLT_FN (BUILT_IN_IROUND):
2714 fallback_fn = BUILT_IN_LROUND;
2715 gcc_fallthrough ();
2716 CASE_FLT_FN (BUILT_IN_LROUND):
2717 CASE_FLT_FN (BUILT_IN_LLROUND):
2718 builtin_optab = lround_optab;
2719 break;
2720
2721 default:
2722 gcc_unreachable ();
2723 }
2724
2725 /* There's no easy way to detect the case we need to set EDOM. */
2726 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2727 return NULL_RTX;
2728
2729 /* Make a suitable register to place result in. */
2730 mode = TYPE_MODE (TREE_TYPE (exp));
2731
2732 /* There's no easy way to detect the case we need to set EDOM. */
2733 if (!flag_errno_math)
2734 {
2735 rtx result = gen_reg_rtx (mode);
2736
2737 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2738 need to expand the argument again. This way, we will not perform
2739 side-effects more the once. */
2740 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2741
2742 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2743
2744 start_sequence ();
2745
2746 if (expand_sfix_optab (result, op0, builtin_optab))
2747 {
2748 /* Output the entire sequence. */
2749 insns = get_insns ();
2750 end_sequence ();
2751 emit_insn (insns);
2752 return result;
2753 }
2754
2755 /* If we were unable to expand via the builtin, stop the sequence
2756 (without outputting the insns) and call to the library function
2757 with the stabilized argument list. */
2758 end_sequence ();
2759 }
2760
2761 if (fallback_fn != BUILT_IN_NONE)
2762 {
2763 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2764 targets, (int) round (x) should never be transformed into
2765 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2766 a call to lround in the hope that the target provides at least some
2767 C99 functions. This should result in the best user experience for
2768 not full C99 targets. */
2769 tree fallback_fndecl = mathfn_built_in_1
2770 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2771
2772 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2773 fallback_fndecl, 1, arg);
2774
2775 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2776 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2777 return convert_to_mode (mode, target, 0);
2778 }
2779
2780 return expand_call (exp, target, target == const0_rtx);
2781 }
2782
2783 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2784 a normal call should be emitted rather than expanding the function
2785 in-line. EXP is the expression that is a call to the builtin
2786 function; if convenient, the result should be placed in TARGET. */
2787
2788 static rtx
expand_builtin_powi(tree exp,rtx target)2789 expand_builtin_powi (tree exp, rtx target)
2790 {
2791 tree arg0, arg1;
2792 rtx op0, op1;
2793 machine_mode mode;
2794 machine_mode mode2;
2795
2796 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2797 return NULL_RTX;
2798
2799 arg0 = CALL_EXPR_ARG (exp, 0);
2800 arg1 = CALL_EXPR_ARG (exp, 1);
2801 mode = TYPE_MODE (TREE_TYPE (exp));
2802
2803 /* Emit a libcall to libgcc. */
2804
2805 /* Mode of the 2nd argument must match that of an int. */
2806 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2807
2808 if (target == NULL_RTX)
2809 target = gen_reg_rtx (mode);
2810
2811 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2812 if (GET_MODE (op0) != mode)
2813 op0 = convert_to_mode (mode, op0, 0);
2814 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2815 if (GET_MODE (op1) != mode2)
2816 op1 = convert_to_mode (mode2, op1, 0);
2817
2818 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2819 target, LCT_CONST, mode,
2820 op0, mode, op1, mode2);
2821
2822 return target;
2823 }
2824
2825 /* Expand expression EXP which is a call to the strlen builtin. Return
2826 NULL_RTX if we failed the caller should emit a normal call, otherwise
2827 try to get the result in TARGET, if convenient. */
2828
2829 static rtx
expand_builtin_strlen(tree exp,rtx target,machine_mode target_mode)2830 expand_builtin_strlen (tree exp, rtx target,
2831 machine_mode target_mode)
2832 {
2833 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2834 return NULL_RTX;
2835
2836 struct expand_operand ops[4];
2837 rtx pat;
2838 tree len;
2839 tree src = CALL_EXPR_ARG (exp, 0);
2840 rtx src_reg;
2841 rtx_insn *before_strlen;
2842 machine_mode insn_mode;
2843 enum insn_code icode = CODE_FOR_nothing;
2844 unsigned int align;
2845
2846 /* If the length can be computed at compile-time, return it. */
2847 len = c_strlen (src, 0);
2848 if (len)
2849 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2850
2851 /* If the length can be computed at compile-time and is constant
2852 integer, but there are side-effects in src, evaluate
2853 src for side-effects, then return len.
2854 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2855 can be optimized into: i++; x = 3; */
2856 len = c_strlen (src, 1);
2857 if (len && TREE_CODE (len) == INTEGER_CST)
2858 {
2859 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2860 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2861 }
2862
2863 align = get_pointer_alignment (src) / BITS_PER_UNIT;
2864
2865 /* If SRC is not a pointer type, don't do this operation inline. */
2866 if (align == 0)
2867 return NULL_RTX;
2868
2869 /* Bail out if we can't compute strlen in the right mode. */
2870 FOR_EACH_MODE_FROM (insn_mode, target_mode)
2871 {
2872 icode = optab_handler (strlen_optab, insn_mode);
2873 if (icode != CODE_FOR_nothing)
2874 break;
2875 }
2876 if (insn_mode == VOIDmode)
2877 return NULL_RTX;
2878
2879 /* Make a place to hold the source address. We will not expand
2880 the actual source until we are sure that the expansion will
2881 not fail -- there are trees that cannot be expanded twice. */
2882 src_reg = gen_reg_rtx (Pmode);
2883
2884 /* Mark the beginning of the strlen sequence so we can emit the
2885 source operand later. */
2886 before_strlen = get_last_insn ();
2887
2888 create_output_operand (&ops[0], target, insn_mode);
2889 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2890 create_integer_operand (&ops[2], 0);
2891 create_integer_operand (&ops[3], align);
2892 if (!maybe_expand_insn (icode, 4, ops))
2893 return NULL_RTX;
2894
2895 /* Check to see if the argument was declared attribute nonstring
2896 and if so, issue a warning since at this point it's not known
2897 to be nul-terminated. */
2898 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
2899
2900 /* Now that we are assured of success, expand the source. */
2901 start_sequence ();
2902 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2903 if (pat != src_reg)
2904 {
2905 #ifdef POINTERS_EXTEND_UNSIGNED
2906 if (GET_MODE (pat) != Pmode)
2907 pat = convert_to_mode (Pmode, pat,
2908 POINTERS_EXTEND_UNSIGNED);
2909 #endif
2910 emit_move_insn (src_reg, pat);
2911 }
2912 pat = get_insns ();
2913 end_sequence ();
2914
2915 if (before_strlen)
2916 emit_insn_after (pat, before_strlen);
2917 else
2918 emit_insn_before (pat, get_insns ());
2919
2920 /* Return the value in the proper mode for this function. */
2921 if (GET_MODE (ops[0].value) == target_mode)
2922 target = ops[0].value;
2923 else if (target != 0)
2924 convert_move (target, ops[0].value, 0);
2925 else
2926 target = convert_to_mode (target_mode, ops[0].value, 0);
2927
2928 return target;
2929 }
2930
2931 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2932 bytes from constant string DATA + OFFSET and return it as target
2933 constant. */
2934
2935 static rtx
builtin_memcpy_read_str(void * data,HOST_WIDE_INT offset,scalar_int_mode mode)2936 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2937 scalar_int_mode mode)
2938 {
2939 const char *str = (const char *) data;
2940
2941 gcc_assert (offset >= 0
2942 && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2943 <= strlen (str) + 1));
2944
2945 return c_readstr (str + offset, mode);
2946 }
2947
2948 /* LEN specify length of the block of memcpy/memset operation.
2949 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2950 In some cases we can make very likely guess on max size, then we
2951 set it into PROBABLE_MAX_SIZE. */
2952
2953 static void
determine_block_size(tree len,rtx len_rtx,unsigned HOST_WIDE_INT * min_size,unsigned HOST_WIDE_INT * max_size,unsigned HOST_WIDE_INT * probable_max_size)2954 determine_block_size (tree len, rtx len_rtx,
2955 unsigned HOST_WIDE_INT *min_size,
2956 unsigned HOST_WIDE_INT *max_size,
2957 unsigned HOST_WIDE_INT *probable_max_size)
2958 {
2959 if (CONST_INT_P (len_rtx))
2960 {
2961 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2962 return;
2963 }
2964 else
2965 {
2966 wide_int min, max;
2967 enum value_range_type range_type = VR_UNDEFINED;
2968
2969 /* Determine bounds from the type. */
2970 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2971 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2972 else
2973 *min_size = 0;
2974 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2975 *probable_max_size = *max_size
2976 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2977 else
2978 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2979
2980 if (TREE_CODE (len) == SSA_NAME)
2981 range_type = get_range_info (len, &min, &max);
2982 if (range_type == VR_RANGE)
2983 {
2984 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2985 *min_size = min.to_uhwi ();
2986 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2987 *probable_max_size = *max_size = max.to_uhwi ();
2988 }
2989 else if (range_type == VR_ANTI_RANGE)
2990 {
2991 /* Anti range 0...N lets us to determine minimal size to N+1. */
2992 if (min == 0)
2993 {
2994 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2995 *min_size = max.to_uhwi () + 1;
2996 }
2997 /* Code like
2998
2999 int n;
3000 if (n < 100)
3001 memcpy (a, b, n)
3002
3003 Produce anti range allowing negative values of N. We still
3004 can use the information and make a guess that N is not negative.
3005 */
3006 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3007 *probable_max_size = min.to_uhwi () - 1;
3008 }
3009 }
3010 gcc_checking_assert (*max_size <=
3011 (unsigned HOST_WIDE_INT)
3012 GET_MODE_MASK (GET_MODE (len_rtx)));
3013 }
3014
3015 /* Try to verify that the sizes and lengths of the arguments to a string
3016 manipulation function given by EXP are within valid bounds and that
3017 the operation does not lead to buffer overflow or read past the end.
3018 Arguments other than EXP may be null. When non-null, the arguments
3019 have the following meaning:
3020 DST is the destination of a copy call or NULL otherwise.
3021 SRC is the source of a copy call or NULL otherwise.
3022 DSTWRITE is the number of bytes written into the destination obtained
3023 from the user-supplied size argument to the function (such as in
3024 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3025 MAXREAD is the user-supplied bound on the length of the source sequence
3026 (such as in strncat(d, s, N). It specifies the upper limit on the number
3027 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3028 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3029 expression EXP is a string function call (as opposed to a memory call
3030 like memcpy). As an exception, SRCSTR can also be an integer denoting
3031 the precomputed size of the source string or object (for functions like
3032 memcpy).
3033 DSTSIZE is the size of the destination object specified by the last
3034 argument to the _chk builtins, typically resulting from the expansion
3035 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3036 DSTSIZE).
3037
3038 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3039 SIZE_MAX.
3040
3041 If the call is successfully verified as safe return true, otherwise
3042 return false. */
3043
3044 static bool
check_access(tree exp,tree,tree,tree dstwrite,tree maxread,tree srcstr,tree dstsize)3045 check_access (tree exp, tree, tree, tree dstwrite,
3046 tree maxread, tree srcstr, tree dstsize)
3047 {
3048 int opt = OPT_Wstringop_overflow_;
3049
3050 /* The size of the largest object is half the address space, or
3051 PTRDIFF_MAX. (This is way too permissive.) */
3052 tree maxobjsize = max_object_size ();
3053
3054 /* Either the length of the source string for string functions or
3055 the size of the source object for raw memory functions. */
3056 tree slen = NULL_TREE;
3057
3058 tree range[2] = { NULL_TREE, NULL_TREE };
3059
3060 /* Set to true when the exact number of bytes written by a string
3061 function like strcpy is not known and the only thing that is
3062 known is that it must be at least one (for the terminating nul). */
3063 bool at_least_one = false;
3064 if (srcstr)
3065 {
3066 /* SRCSTR is normally a pointer to string but as a special case
3067 it can be an integer denoting the length of a string. */
3068 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3069 {
3070 /* Try to determine the range of lengths the source string
3071 refers to. If it can be determined and is less than
3072 the upper bound given by MAXREAD add one to it for
3073 the terminating nul. Otherwise, set it to one for
3074 the same reason, or to MAXREAD as appropriate. */
3075 get_range_strlen (srcstr, range);
3076 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3077 {
3078 if (maxread && tree_int_cst_le (maxread, range[0]))
3079 range[0] = range[1] = maxread;
3080 else
3081 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3082 range[0], size_one_node);
3083
3084 if (maxread && tree_int_cst_le (maxread, range[1]))
3085 range[1] = maxread;
3086 else if (!integer_all_onesp (range[1]))
3087 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3088 range[1], size_one_node);
3089
3090 slen = range[0];
3091 }
3092 else
3093 {
3094 at_least_one = true;
3095 slen = size_one_node;
3096 }
3097 }
3098 else
3099 slen = srcstr;
3100 }
3101
3102 if (!dstwrite && !maxread)
3103 {
3104 /* When the only available piece of data is the object size
3105 there is nothing to do. */
3106 if (!slen)
3107 return true;
3108
3109 /* Otherwise, when the length of the source sequence is known
3110 (as with strlen), set DSTWRITE to it. */
3111 if (!range[0])
3112 dstwrite = slen;
3113 }
3114
3115 if (!dstsize)
3116 dstsize = maxobjsize;
3117
3118 if (dstwrite)
3119 get_size_range (dstwrite, range);
3120
3121 tree func = get_callee_fndecl (exp);
3122
3123 /* First check the number of bytes to be written against the maximum
3124 object size. */
3125 if (range[0] && tree_int_cst_lt (maxobjsize, range[0]))
3126 {
3127 location_t loc = tree_nonartificial_location (exp);
3128 loc = expansion_point_location_if_in_system_header (loc);
3129
3130 if (range[0] == range[1])
3131 warning_at (loc, opt,
3132 "%K%qD specified size %E "
3133 "exceeds maximum object size %E",
3134 exp, func, range[0], maxobjsize);
3135 else
3136 warning_at (loc, opt,
3137 "%K%qD specified size between %E and %E "
3138 "exceeds maximum object size %E",
3139 exp, func,
3140 range[0], range[1], maxobjsize);
3141 return false;
3142 }
3143
3144 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3145 constant, and in range of unsigned HOST_WIDE_INT. */
3146 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3147
3148 /* Next check the number of bytes to be written against the destination
3149 object size. */
3150 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3151 {
3152 if (range[0]
3153 && ((tree_fits_uhwi_p (dstsize)
3154 && tree_int_cst_lt (dstsize, range[0]))
3155 || (tree_fits_uhwi_p (dstwrite)
3156 && tree_int_cst_lt (dstwrite, range[0]))))
3157 {
3158 if (TREE_NO_WARNING (exp))
3159 return false;
3160
3161 location_t loc = tree_nonartificial_location (exp);
3162 loc = expansion_point_location_if_in_system_header (loc);
3163
3164 if (dstwrite == slen && at_least_one)
3165 {
3166 /* This is a call to strcpy with a destination of 0 size
3167 and a source of unknown length. The call will write
3168 at least one byte past the end of the destination. */
3169 warning_at (loc, opt,
3170 "%K%qD writing %E or more bytes into a region "
3171 "of size %E overflows the destination",
3172 exp, func, range[0], dstsize);
3173 }
3174 else if (tree_int_cst_equal (range[0], range[1]))
3175 warning_n (loc, opt, tree_to_uhwi (range[0]),
3176 "%K%qD writing %E byte into a region "
3177 "of size %E overflows the destination",
3178 "%K%qD writing %E bytes into a region "
3179 "of size %E overflows the destination",
3180 exp, func, range[0], dstsize);
3181 else if (tree_int_cst_sign_bit (range[1]))
3182 {
3183 /* Avoid printing the upper bound if it's invalid. */
3184 warning_at (loc, opt,
3185 "%K%qD writing %E or more bytes into a region "
3186 "of size %E overflows the destination",
3187 exp, func, range[0], dstsize);
3188 }
3189 else
3190 warning_at (loc, opt,
3191 "%K%qD writing between %E and %E bytes into "
3192 "a region of size %E overflows the destination",
3193 exp, func, range[0], range[1],
3194 dstsize);
3195
3196 /* Return error when an overflow has been detected. */
3197 return false;
3198 }
3199 }
3200
3201 /* Check the maximum length of the source sequence against the size
3202 of the destination object if known, or against the maximum size
3203 of an object. */
3204 if (maxread)
3205 {
3206 get_size_range (maxread, range);
3207
3208 /* Use the lower end for MAXREAD from now on. */
3209 if (range[0])
3210 maxread = range[0];
3211
3212 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3213 {
3214 location_t loc = tree_nonartificial_location (exp);
3215 loc = expansion_point_location_if_in_system_header (loc);
3216
3217 if (tree_int_cst_lt (maxobjsize, range[0]))
3218 {
3219 if (TREE_NO_WARNING (exp))
3220 return false;
3221
3222 /* Warn about crazy big sizes first since that's more
3223 likely to be meaningful than saying that the bound
3224 is greater than the object size if both are big. */
3225 if (range[0] == range[1])
3226 warning_at (loc, opt,
3227 "%K%qD specified bound %E "
3228 "exceeds maximum object size %E",
3229 exp, func,
3230 range[0], maxobjsize);
3231 else
3232 warning_at (loc, opt,
3233 "%K%qD specified bound between %E and %E "
3234 "exceeds maximum object size %E",
3235 exp, func,
3236 range[0], range[1], maxobjsize);
3237
3238 return false;
3239 }
3240
3241 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3242 {
3243 if (TREE_NO_WARNING (exp))
3244 return false;
3245
3246 if (tree_int_cst_equal (range[0], range[1]))
3247 warning_at (loc, opt,
3248 "%K%qD specified bound %E "
3249 "exceeds destination size %E",
3250 exp, func,
3251 range[0], dstsize);
3252 else
3253 warning_at (loc, opt,
3254 "%K%qD specified bound between %E and %E "
3255 "exceeds destination size %E",
3256 exp, func,
3257 range[0], range[1], dstsize);
3258 return false;
3259 }
3260 }
3261 }
3262
3263 /* Check for reading past the end of SRC. */
3264 if (slen
3265 && slen == srcstr
3266 && dstwrite && range[0]
3267 && tree_int_cst_lt (slen, range[0]))
3268 {
3269 if (TREE_NO_WARNING (exp))
3270 return false;
3271
3272 location_t loc = tree_nonartificial_location (exp);
3273
3274 if (tree_int_cst_equal (range[0], range[1]))
3275 warning_n (loc, opt, tree_to_uhwi (range[0]),
3276 "%K%qD reading %E byte from a region of size %E",
3277 "%K%qD reading %E bytes from a region of size %E",
3278 exp, func, range[0], slen);
3279 else if (tree_int_cst_sign_bit (range[1]))
3280 {
3281 /* Avoid printing the upper bound if it's invalid. */
3282 warning_at (loc, opt,
3283 "%K%qD reading %E or more bytes from a region "
3284 "of size %E",
3285 exp, func, range[0], slen);
3286 }
3287 else
3288 warning_at (loc, opt,
3289 "%K%qD reading between %E and %E bytes from a region "
3290 "of size %E",
3291 exp, func, range[0], range[1], slen);
3292 return false;
3293 }
3294
3295 return true;
3296 }
3297
3298 /* Helper to compute the size of the object referenced by the DEST
3299 expression which must have pointer type, using Object Size type
3300 OSTYPE (only the least significant 2 bits are used). Return
3301 an estimate of the size of the object if successful or NULL when
3302 the size cannot be determined. When the referenced object involves
3303 a non-constant offset in some range the returned value represents
3304 the largest size given the smallest non-negative offset in the
3305 range. The function is intended for diagnostics and should not
3306 be used to influence code generation or optimization. */
3307
3308 tree
compute_objsize(tree dest,int ostype)3309 compute_objsize (tree dest, int ostype)
3310 {
3311 unsigned HOST_WIDE_INT size;
3312
3313 /* Only the two least significant bits are meaningful. */
3314 ostype &= 3;
3315
3316 if (compute_builtin_object_size (dest, ostype, &size))
3317 return build_int_cst (sizetype, size);
3318
3319 if (TREE_CODE (dest) == SSA_NAME)
3320 {
3321 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3322 if (!is_gimple_assign (stmt))
3323 return NULL_TREE;
3324
3325 dest = gimple_assign_rhs1 (stmt);
3326
3327 tree_code code = gimple_assign_rhs_code (stmt);
3328 if (code == POINTER_PLUS_EXPR)
3329 {
3330 /* compute_builtin_object_size fails for addresses with
3331 non-constant offsets. Try to determine the range of
3332 such an offset here and use it to adjus the constant
3333 size. */
3334 tree off = gimple_assign_rhs2 (stmt);
3335 if (TREE_CODE (off) == SSA_NAME
3336 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3337 {
3338 wide_int min, max;
3339 enum value_range_type rng = get_range_info (off, &min, &max);
3340
3341 if (rng == VR_RANGE)
3342 {
3343 if (tree size = compute_objsize (dest, ostype))
3344 {
3345 wide_int wisiz = wi::to_wide (size);
3346
3347 /* Ignore negative offsets for now. For others,
3348 use the lower bound as the most optimistic
3349 estimate of the (remaining)size. */
3350 if (wi::sign_mask (min))
3351 ;
3352 else if (wi::ltu_p (min, wisiz))
3353 return wide_int_to_tree (TREE_TYPE (size),
3354 wi::sub (wisiz, min));
3355 else
3356 return size_zero_node;
3357 }
3358 }
3359 }
3360 }
3361 else if (code != ADDR_EXPR)
3362 return NULL_TREE;
3363 }
3364
3365 /* Unless computing the largest size (for memcpy and other raw memory
3366 functions), try to determine the size of the object from its type. */
3367 if (!ostype)
3368 return NULL_TREE;
3369
3370 if (TREE_CODE (dest) != ADDR_EXPR)
3371 return NULL_TREE;
3372
3373 tree type = TREE_TYPE (dest);
3374 if (TREE_CODE (type) == POINTER_TYPE)
3375 type = TREE_TYPE (type);
3376
3377 type = TYPE_MAIN_VARIANT (type);
3378
3379 if (TREE_CODE (type) == ARRAY_TYPE
3380 && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3381 {
3382 /* Return the constant size unless it's zero (that's a zero-length
3383 array likely at the end of a struct). */
3384 tree size = TYPE_SIZE_UNIT (type);
3385 if (size && TREE_CODE (size) == INTEGER_CST
3386 && !integer_zerop (size))
3387 return size;
3388 }
3389
3390 return NULL_TREE;
3391 }
3392
3393 /* Helper to determine and check the sizes of the source and the destination
3394 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
3395 call expression, DEST is the destination argument, SRC is the source
3396 argument or null, and LEN is the number of bytes. Use Object Size type-0
3397 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
3398 (no overflow or invalid sizes), false otherwise. */
3399
3400 static bool
check_memop_access(tree exp,tree dest,tree src,tree size)3401 check_memop_access (tree exp, tree dest, tree src, tree size)
3402 {
3403 /* For functions like memset and memcpy that operate on raw memory
3404 try to determine the size of the largest source and destination
3405 object using type-0 Object Size regardless of the object size
3406 type specified by the option. */
3407 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3408 tree dstsize = compute_objsize (dest, 0);
3409
3410 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3411 srcsize, dstsize);
3412 }
3413
3414 /* Validate memchr arguments without performing any expansion.
3415 Return NULL_RTX. */
3416
3417 static rtx
expand_builtin_memchr(tree exp,rtx)3418 expand_builtin_memchr (tree exp, rtx)
3419 {
3420 if (!validate_arglist (exp,
3421 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3422 return NULL_RTX;
3423
3424 tree arg1 = CALL_EXPR_ARG (exp, 0);
3425 tree len = CALL_EXPR_ARG (exp, 2);
3426
3427 /* Diagnose calls where the specified length exceeds the size
3428 of the object. */
3429 if (warn_stringop_overflow)
3430 {
3431 tree size = compute_objsize (arg1, 0);
3432 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3433 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3434 }
3435
3436 return NULL_RTX;
3437 }
3438
3439 /* Expand a call EXP to the memcpy builtin.
3440 Return NULL_RTX if we failed, the caller should emit a normal call,
3441 otherwise try to get the result in TARGET, if convenient (and in
3442 mode MODE if that's convenient). */
3443
3444 static rtx
expand_builtin_memcpy(tree exp,rtx target)3445 expand_builtin_memcpy (tree exp, rtx target)
3446 {
3447 if (!validate_arglist (exp,
3448 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3449 return NULL_RTX;
3450
3451 tree dest = CALL_EXPR_ARG (exp, 0);
3452 tree src = CALL_EXPR_ARG (exp, 1);
3453 tree len = CALL_EXPR_ARG (exp, 2);
3454
3455 check_memop_access (exp, dest, src, len);
3456
3457 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3458 /*endp=*/ 0);
3459 }
3460
3461 /* Check a call EXP to the memmove built-in for validity.
3462 Return NULL_RTX on both success and failure. */
3463
3464 static rtx
expand_builtin_memmove(tree exp,rtx)3465 expand_builtin_memmove (tree exp, rtx)
3466 {
3467 if (!validate_arglist (exp,
3468 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3469 return NULL_RTX;
3470
3471 tree dest = CALL_EXPR_ARG (exp, 0);
3472 tree src = CALL_EXPR_ARG (exp, 1);
3473 tree len = CALL_EXPR_ARG (exp, 2);
3474
3475 check_memop_access (exp, dest, src, len);
3476
3477 return NULL_RTX;
3478 }
3479
3480 /* Expand an instrumented call EXP to the memcpy builtin.
3481 Return NULL_RTX if we failed, the caller should emit a normal call,
3482 otherwise try to get the result in TARGET, if convenient (and in
3483 mode MODE if that's convenient). */
3484
3485 static rtx
expand_builtin_memcpy_with_bounds(tree exp,rtx target)3486 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3487 {
3488 if (!validate_arglist (exp,
3489 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3490 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3491 INTEGER_TYPE, VOID_TYPE))
3492 return NULL_RTX;
3493 else
3494 {
3495 tree dest = CALL_EXPR_ARG (exp, 0);
3496 tree src = CALL_EXPR_ARG (exp, 2);
3497 tree len = CALL_EXPR_ARG (exp, 4);
3498 rtx res = expand_builtin_memory_copy_args (dest, src, len, target, exp,
3499 /*end_p=*/ 0);
3500
3501 /* Return src bounds with the result. */
3502 if (res)
3503 {
3504 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3505 expand_normal (CALL_EXPR_ARG (exp, 1)));
3506 res = chkp_join_splitted_slot (res, bnd);
3507 }
3508 return res;
3509 }
3510 }
3511
3512 /* Expand a call EXP to the mempcpy builtin.
3513 Return NULL_RTX if we failed; the caller should emit a normal call,
3514 otherwise try to get the result in TARGET, if convenient (and in
3515 mode MODE if that's convenient). If ENDP is 0 return the
3516 destination pointer, if ENDP is 1 return the end pointer ala
3517 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3518 stpcpy. */
3519
3520 static rtx
expand_builtin_mempcpy(tree exp,rtx target)3521 expand_builtin_mempcpy (tree exp, rtx target)
3522 {
3523 if (!validate_arglist (exp,
3524 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3525 return NULL_RTX;
3526
3527 tree dest = CALL_EXPR_ARG (exp, 0);
3528 tree src = CALL_EXPR_ARG (exp, 1);
3529 tree len = CALL_EXPR_ARG (exp, 2);
3530
3531 /* Policy does not generally allow using compute_objsize (which
3532 is used internally by check_memop_size) to change code generation
3533 or drive optimization decisions.
3534
3535 In this instance it is safe because the code we generate has
3536 the same semantics regardless of the return value of
3537 check_memop_sizes. Exactly the same amount of data is copied
3538 and the return value is exactly the same in both cases.
3539
3540 Furthermore, check_memop_size always uses mode 0 for the call to
3541 compute_objsize, so the imprecise nature of compute_objsize is
3542 avoided. */
3543
3544 /* Avoid expanding mempcpy into memcpy when the call is determined
3545 to overflow the buffer. This also prevents the same overflow
3546 from being diagnosed again when expanding memcpy. */
3547 if (!check_memop_access (exp, dest, src, len))
3548 return NULL_RTX;
3549
3550 return expand_builtin_mempcpy_args (dest, src, len,
3551 target, exp, /*endp=*/ 1);
3552 }
3553
3554 /* Expand an instrumented call EXP to the mempcpy builtin.
3555 Return NULL_RTX if we failed, the caller should emit a normal call,
3556 otherwise try to get the result in TARGET, if convenient (and in
3557 mode MODE if that's convenient). */
3558
3559 static rtx
expand_builtin_mempcpy_with_bounds(tree exp,rtx target)3560 expand_builtin_mempcpy_with_bounds (tree exp, rtx target)
3561 {
3562 if (!validate_arglist (exp,
3563 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3564 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3565 INTEGER_TYPE, VOID_TYPE))
3566 return NULL_RTX;
3567 else
3568 {
3569 tree dest = CALL_EXPR_ARG (exp, 0);
3570 tree src = CALL_EXPR_ARG (exp, 2);
3571 tree len = CALL_EXPR_ARG (exp, 4);
3572 rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3573 exp, 1);
3574
3575 /* Return src bounds with the result. */
3576 if (res)
3577 {
3578 rtx bnd = force_reg (targetm.chkp_bound_mode (),
3579 expand_normal (CALL_EXPR_ARG (exp, 1)));
3580 res = chkp_join_splitted_slot (res, bnd);
3581 }
3582 return res;
3583 }
3584 }
3585
3586 /* Helper function to do the actual work for expand of memory copy family
3587 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
3588 of memory from SRC to DEST and assign to TARGET if convenient.
3589 If ENDP is 0 return the
3590 destination pointer, if ENDP is 1 return the end pointer ala
3591 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3592 stpcpy. */
3593
3594 static rtx
expand_builtin_memory_copy_args(tree dest,tree src,tree len,rtx target,tree exp,int endp)3595 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3596 rtx target, tree exp, int endp)
3597 {
3598 const char *src_str;
3599 unsigned int src_align = get_pointer_alignment (src);
3600 unsigned int dest_align = get_pointer_alignment (dest);
3601 rtx dest_mem, src_mem, dest_addr, len_rtx;
3602 HOST_WIDE_INT expected_size = -1;
3603 unsigned int expected_align = 0;
3604 unsigned HOST_WIDE_INT min_size;
3605 unsigned HOST_WIDE_INT max_size;
3606 unsigned HOST_WIDE_INT probable_max_size;
3607
3608 /* If DEST is not a pointer type, call the normal function. */
3609 if (dest_align == 0)
3610 return NULL_RTX;
3611
3612 /* If either SRC is not a pointer type, don't do this
3613 operation in-line. */
3614 if (src_align == 0)
3615 return NULL_RTX;
3616
3617 if (currently_expanding_gimple_stmt)
3618 stringop_block_profile (currently_expanding_gimple_stmt,
3619 &expected_align, &expected_size);
3620
3621 if (expected_align < dest_align)
3622 expected_align = dest_align;
3623 dest_mem = get_memory_rtx (dest, len);
3624 set_mem_align (dest_mem, dest_align);
3625 len_rtx = expand_normal (len);
3626 determine_block_size (len, len_rtx, &min_size, &max_size,
3627 &probable_max_size);
3628 src_str = c_getstr (src);
3629
3630 /* If SRC is a string constant and block move would be done
3631 by pieces, we can avoid loading the string from memory
3632 and only stored the computed constants. */
3633 if (src_str
3634 && CONST_INT_P (len_rtx)
3635 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3636 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3637 CONST_CAST (char *, src_str),
3638 dest_align, false))
3639 {
3640 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3641 builtin_memcpy_read_str,
3642 CONST_CAST (char *, src_str),
3643 dest_align, false, endp);
3644 dest_mem = force_operand (XEXP (dest_mem, 0), target);
3645 dest_mem = convert_memory_address (ptr_mode, dest_mem);
3646 return dest_mem;
3647 }
3648
3649 src_mem = get_memory_rtx (src, len);
3650 set_mem_align (src_mem, src_align);
3651
3652 /* Copy word part most expediently. */
3653 enum block_op_methods method = BLOCK_OP_NORMAL;
3654 if (CALL_EXPR_TAILCALL (exp) && (endp == 0 || target == const0_rtx))
3655 method = BLOCK_OP_TAILCALL;
3656 if (endp == 1 && target != const0_rtx)
3657 method = BLOCK_OP_NO_LIBCALL_RET;
3658 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3659 expected_align, expected_size,
3660 min_size, max_size, probable_max_size);
3661 if (dest_addr == pc_rtx)
3662 return NULL_RTX;
3663
3664 if (dest_addr == 0)
3665 {
3666 dest_addr = force_operand (XEXP (dest_mem, 0), target);
3667 dest_addr = convert_memory_address (ptr_mode, dest_addr);
3668 }
3669
3670 if (endp && target != const0_rtx)
3671 {
3672 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3673 /* stpcpy pointer to last byte. */
3674 if (endp == 2)
3675 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3676 }
3677
3678 return dest_addr;
3679 }
3680
3681 static rtx
expand_builtin_mempcpy_args(tree dest,tree src,tree len,rtx target,tree orig_exp,int endp)3682 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3683 rtx target, tree orig_exp, int endp)
3684 {
3685 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3686 endp);
3687 }
3688
3689 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
3690 we failed, the caller should emit a normal call, otherwise try to
3691 get the result in TARGET, if convenient. If ENDP is 0 return the
3692 destination pointer, if ENDP is 1 return the end pointer ala
3693 mempcpy, and if ENDP is 2 return the end pointer minus one ala
3694 stpcpy. */
3695
3696 static rtx
expand_movstr(tree dest,tree src,rtx target,int endp)3697 expand_movstr (tree dest, tree src, rtx target, int endp)
3698 {
3699 struct expand_operand ops[3];
3700 rtx dest_mem;
3701 rtx src_mem;
3702
3703 if (!targetm.have_movstr ())
3704 return NULL_RTX;
3705
3706 dest_mem = get_memory_rtx (dest, NULL);
3707 src_mem = get_memory_rtx (src, NULL);
3708 if (!endp)
3709 {
3710 target = force_reg (Pmode, XEXP (dest_mem, 0));
3711 dest_mem = replace_equiv_address (dest_mem, target);
3712 }
3713
3714 create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3715 create_fixed_operand (&ops[1], dest_mem);
3716 create_fixed_operand (&ops[2], src_mem);
3717 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3718 return NULL_RTX;
3719
3720 if (endp && target != const0_rtx)
3721 {
3722 target = ops[0].value;
3723 /* movstr is supposed to set end to the address of the NUL
3724 terminator. If the caller requested a mempcpy-like return value,
3725 adjust it. */
3726 if (endp == 1)
3727 {
3728 rtx tem = plus_constant (GET_MODE (target),
3729 gen_lowpart (GET_MODE (target), target), 1);
3730 emit_move_insn (target, force_operand (tem, NULL_RTX));
3731 }
3732 }
3733 return target;
3734 }
3735
3736 /* Do some very basic size validation of a call to the strcpy builtin
3737 given by EXP. Return NULL_RTX to have the built-in expand to a call
3738 to the library function. */
3739
3740 static rtx
expand_builtin_strcat(tree exp,rtx)3741 expand_builtin_strcat (tree exp, rtx)
3742 {
3743 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3744 || !warn_stringop_overflow)
3745 return NULL_RTX;
3746
3747 tree dest = CALL_EXPR_ARG (exp, 0);
3748 tree src = CALL_EXPR_ARG (exp, 1);
3749
3750 /* There is no way here to determine the length of the string in
3751 the destination to which the SRC string is being appended so
3752 just diagnose cases when the souce string is longer than
3753 the destination object. */
3754
3755 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3756
3757 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3758 destsize);
3759
3760 return NULL_RTX;
3761 }
3762
3763 /* Expand expression EXP, which is a call to the strcpy builtin. Return
3764 NULL_RTX if we failed the caller should emit a normal call, otherwise
3765 try to get the result in TARGET, if convenient (and in mode MODE if that's
3766 convenient). */
3767
3768 static rtx
expand_builtin_strcpy(tree exp,rtx target)3769 expand_builtin_strcpy (tree exp, rtx target)
3770 {
3771 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3772 return NULL_RTX;
3773
3774 tree dest = CALL_EXPR_ARG (exp, 0);
3775 tree src = CALL_EXPR_ARG (exp, 1);
3776
3777 if (warn_stringop_overflow)
3778 {
3779 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3780 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3781 src, destsize);
3782 }
3783
3784 return expand_builtin_strcpy_args (dest, src, target);
3785 }
3786
3787 /* Helper function to do the actual work for expand_builtin_strcpy. The
3788 arguments to the builtin_strcpy call DEST and SRC are broken out
3789 so that this can also be called without constructing an actual CALL_EXPR.
3790 The other arguments and return value are the same as for
3791 expand_builtin_strcpy. */
3792
3793 static rtx
expand_builtin_strcpy_args(tree dest,tree src,rtx target)3794 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3795 {
3796 return expand_movstr (dest, src, target, /*endp=*/0);
3797 }
3798
3799 /* Expand a call EXP to the stpcpy builtin.
3800 Return NULL_RTX if we failed the caller should emit a normal call,
3801 otherwise try to get the result in TARGET, if convenient (and in
3802 mode MODE if that's convenient). */
3803
3804 static rtx
expand_builtin_stpcpy(tree exp,rtx target,machine_mode mode)3805 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3806 {
3807 tree dst, src;
3808 location_t loc = EXPR_LOCATION (exp);
3809
3810 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3811 return NULL_RTX;
3812
3813 dst = CALL_EXPR_ARG (exp, 0);
3814 src = CALL_EXPR_ARG (exp, 1);
3815
3816 if (warn_stringop_overflow)
3817 {
3818 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
3819 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
3820 src, destsize);
3821 }
3822
3823 /* If return value is ignored, transform stpcpy into strcpy. */
3824 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3825 {
3826 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3827 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3828 return expand_expr (result, target, mode, EXPAND_NORMAL);
3829 }
3830 else
3831 {
3832 tree len, lenp1;
3833 rtx ret;
3834
3835 /* Ensure we get an actual string whose length can be evaluated at
3836 compile-time, not an expression containing a string. This is
3837 because the latter will potentially produce pessimized code
3838 when used to produce the return value. */
3839 if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3840 return expand_movstr (dst, src, target, /*endp=*/2);
3841
3842 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3843 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3844 target, exp, /*endp=*/2);
3845
3846 if (ret)
3847 return ret;
3848
3849 if (TREE_CODE (len) == INTEGER_CST)
3850 {
3851 rtx len_rtx = expand_normal (len);
3852
3853 if (CONST_INT_P (len_rtx))
3854 {
3855 ret = expand_builtin_strcpy_args (dst, src, target);
3856
3857 if (ret)
3858 {
3859 if (! target)
3860 {
3861 if (mode != VOIDmode)
3862 target = gen_reg_rtx (mode);
3863 else
3864 target = gen_reg_rtx (GET_MODE (ret));
3865 }
3866 if (GET_MODE (target) != GET_MODE (ret))
3867 ret = gen_lowpart (GET_MODE (target), ret);
3868
3869 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3870 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3871 gcc_assert (ret);
3872
3873 return target;
3874 }
3875 }
3876 }
3877
3878 return expand_movstr (dst, src, target, /*endp=*/2);
3879 }
3880 }
3881
3882 /* Check a call EXP to the stpncpy built-in for validity.
3883 Return NULL_RTX on both success and failure. */
3884
3885 static rtx
expand_builtin_stpncpy(tree exp,rtx)3886 expand_builtin_stpncpy (tree exp, rtx)
3887 {
3888 if (!validate_arglist (exp,
3889 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3890 || !warn_stringop_overflow)
3891 return NULL_RTX;
3892
3893 /* The source and destination of the call. */
3894 tree dest = CALL_EXPR_ARG (exp, 0);
3895 tree src = CALL_EXPR_ARG (exp, 1);
3896
3897 /* The exact number of bytes to write (not the maximum). */
3898 tree len = CALL_EXPR_ARG (exp, 2);
3899
3900 /* The size of the destination object. */
3901 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3902
3903 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
3904
3905 return NULL_RTX;
3906 }
3907
3908 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3909 bytes from constant string DATA + OFFSET and return it as target
3910 constant. */
3911
3912 rtx
builtin_strncpy_read_str(void * data,HOST_WIDE_INT offset,scalar_int_mode mode)3913 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3914 scalar_int_mode mode)
3915 {
3916 const char *str = (const char *) data;
3917
3918 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3919 return const0_rtx;
3920
3921 return c_readstr (str + offset, mode);
3922 }
3923
3924 /* Helper to check the sizes of sequences and the destination of calls
3925 to __builtin_strncat and __builtin___strncat_chk. Returns true on
3926 success (no overflow or invalid sizes), false otherwise. */
3927
3928 static bool
check_strncat_sizes(tree exp,tree objsize)3929 check_strncat_sizes (tree exp, tree objsize)
3930 {
3931 tree dest = CALL_EXPR_ARG (exp, 0);
3932 tree src = CALL_EXPR_ARG (exp, 1);
3933 tree maxread = CALL_EXPR_ARG (exp, 2);
3934
3935 /* Try to determine the range of lengths that the source expression
3936 refers to. */
3937 tree lenrange[2];
3938 get_range_strlen (src, lenrange);
3939
3940 /* Try to verify that the destination is big enough for the shortest
3941 string. */
3942
3943 if (!objsize && warn_stringop_overflow)
3944 {
3945 /* If it hasn't been provided by __strncat_chk, try to determine
3946 the size of the destination object into which the source is
3947 being copied. */
3948 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
3949 }
3950
3951 /* Add one for the terminating nul. */
3952 tree srclen = (lenrange[0]
3953 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
3954 size_one_node)
3955 : NULL_TREE);
3956
3957 /* The strncat function copies at most MAXREAD bytes and always appends
3958 the terminating nul so the specified upper bound should never be equal
3959 to (or greater than) the size of the destination. */
3960 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3961 && tree_int_cst_equal (objsize, maxread))
3962 {
3963 location_t loc = tree_nonartificial_location (exp);
3964 loc = expansion_point_location_if_in_system_header (loc);
3965
3966 warning_at (loc, OPT_Wstringop_overflow_,
3967 "%K%qD specified bound %E equals destination size",
3968 exp, get_callee_fndecl (exp), maxread);
3969
3970 return false;
3971 }
3972
3973 if (!srclen
3974 || (maxread && tree_fits_uhwi_p (maxread)
3975 && tree_fits_uhwi_p (srclen)
3976 && tree_int_cst_lt (maxread, srclen)))
3977 srclen = maxread;
3978
3979 /* The number of bytes to write is LEN but check_access will also
3980 check SRCLEN if LEN's value isn't known. */
3981 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
3982 objsize);
3983 }
3984
3985 /* Similar to expand_builtin_strcat, do some very basic size validation
3986 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
3987 the built-in expand to a call to the library function. */
3988
3989 static rtx
expand_builtin_strncat(tree exp,rtx)3990 expand_builtin_strncat (tree exp, rtx)
3991 {
3992 if (!validate_arglist (exp,
3993 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3994 || !warn_stringop_overflow)
3995 return NULL_RTX;
3996
3997 tree dest = CALL_EXPR_ARG (exp, 0);
3998 tree src = CALL_EXPR_ARG (exp, 1);
3999 /* The upper bound on the number of bytes to write. */
4000 tree maxread = CALL_EXPR_ARG (exp, 2);
4001 /* The length of the source sequence. */
4002 tree slen = c_strlen (src, 1);
4003
4004 /* Try to determine the range of lengths that the source expression
4005 refers to. */
4006 tree lenrange[2];
4007 if (slen)
4008 lenrange[0] = lenrange[1] = slen;
4009 else
4010 get_range_strlen (src, lenrange);
4011
4012 /* Try to verify that the destination is big enough for the shortest
4013 string. First try to determine the size of the destination object
4014 into which the source is being copied. */
4015 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4016
4017 /* Add one for the terminating nul. */
4018 tree srclen = (lenrange[0]
4019 ? fold_build2 (PLUS_EXPR, size_type_node, lenrange[0],
4020 size_one_node)
4021 : NULL_TREE);
4022
4023 /* The strncat function copies at most MAXREAD bytes and always appends
4024 the terminating nul so the specified upper bound should never be equal
4025 to (or greater than) the size of the destination. */
4026 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4027 && tree_int_cst_equal (destsize, maxread))
4028 {
4029 location_t loc = tree_nonartificial_location (exp);
4030 loc = expansion_point_location_if_in_system_header (loc);
4031
4032 warning_at (loc, OPT_Wstringop_overflow_,
4033 "%K%qD specified bound %E equals destination size",
4034 exp, get_callee_fndecl (exp), maxread);
4035
4036 return NULL_RTX;
4037 }
4038
4039 if (!srclen
4040 || (maxread && tree_fits_uhwi_p (maxread)
4041 && tree_fits_uhwi_p (srclen)
4042 && tree_int_cst_lt (maxread, srclen)))
4043 srclen = maxread;
4044
4045 /* The number of bytes to write is SRCLEN. */
4046 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4047
4048 return NULL_RTX;
4049 }
4050
4051 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4052 NULL_RTX if we failed the caller should emit a normal call. */
4053
4054 static rtx
expand_builtin_strncpy(tree exp,rtx target)4055 expand_builtin_strncpy (tree exp, rtx target)
4056 {
4057 location_t loc = EXPR_LOCATION (exp);
4058
4059 if (validate_arglist (exp,
4060 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4061 {
4062 tree dest = CALL_EXPR_ARG (exp, 0);
4063 tree src = CALL_EXPR_ARG (exp, 1);
4064 /* The number of bytes to write (not the maximum). */
4065 tree len = CALL_EXPR_ARG (exp, 2);
4066 /* The length of the source sequence. */
4067 tree slen = c_strlen (src, 1);
4068
4069 if (warn_stringop_overflow)
4070 {
4071 tree destsize = compute_objsize (dest,
4072 warn_stringop_overflow - 1);
4073
4074 /* The number of bytes to write is LEN but check_access will also
4075 check SLEN if LEN's value isn't known. */
4076 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4077 destsize);
4078 }
4079
4080 /* We must be passed a constant len and src parameter. */
4081 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4082 return NULL_RTX;
4083
4084 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4085
4086 /* We're required to pad with trailing zeros if the requested
4087 len is greater than strlen(s2)+1. In that case try to
4088 use store_by_pieces, if it fails, punt. */
4089 if (tree_int_cst_lt (slen, len))
4090 {
4091 unsigned int dest_align = get_pointer_alignment (dest);
4092 const char *p = c_getstr (src);
4093 rtx dest_mem;
4094
4095 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4096 || !can_store_by_pieces (tree_to_uhwi (len),
4097 builtin_strncpy_read_str,
4098 CONST_CAST (char *, p),
4099 dest_align, false))
4100 return NULL_RTX;
4101
4102 dest_mem = get_memory_rtx (dest, len);
4103 store_by_pieces (dest_mem, tree_to_uhwi (len),
4104 builtin_strncpy_read_str,
4105 CONST_CAST (char *, p), dest_align, false, 0);
4106 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4107 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4108 return dest_mem;
4109 }
4110 }
4111 return NULL_RTX;
4112 }
4113
4114 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4115 bytes from constant string DATA + OFFSET and return it as target
4116 constant. */
4117
4118 rtx
builtin_memset_read_str(void * data,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,scalar_int_mode mode)4119 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4120 scalar_int_mode mode)
4121 {
4122 const char *c = (const char *) data;
4123 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4124
4125 memset (p, *c, GET_MODE_SIZE (mode));
4126
4127 return c_readstr (p, mode);
4128 }
4129
4130 /* Callback routine for store_by_pieces. Return the RTL of a register
4131 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4132 char value given in the RTL register data. For example, if mode is
4133 4 bytes wide, return the RTL for 0x01010101*data. */
4134
4135 static rtx
builtin_memset_gen_str(void * data,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,scalar_int_mode mode)4136 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4137 scalar_int_mode mode)
4138 {
4139 rtx target, coeff;
4140 size_t size;
4141 char *p;
4142
4143 size = GET_MODE_SIZE (mode);
4144 if (size == 1)
4145 return (rtx) data;
4146
4147 p = XALLOCAVEC (char, size);
4148 memset (p, 1, size);
4149 coeff = c_readstr (p, mode);
4150
4151 target = convert_to_mode (mode, (rtx) data, 1);
4152 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4153 return force_reg (mode, target);
4154 }
4155
4156 /* Expand expression EXP, which is a call to the memset builtin. Return
4157 NULL_RTX if we failed the caller should emit a normal call, otherwise
4158 try to get the result in TARGET, if convenient (and in mode MODE if that's
4159 convenient). */
4160
4161 static rtx
expand_builtin_memset(tree exp,rtx target,machine_mode mode)4162 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4163 {
4164 if (!validate_arglist (exp,
4165 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4166 return NULL_RTX;
4167
4168 tree dest = CALL_EXPR_ARG (exp, 0);
4169 tree val = CALL_EXPR_ARG (exp, 1);
4170 tree len = CALL_EXPR_ARG (exp, 2);
4171
4172 check_memop_access (exp, dest, NULL_TREE, len);
4173
4174 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4175 }
4176
4177 /* Expand expression EXP, which is an instrumented call to the memset builtin.
4178 Return NULL_RTX if we failed the caller should emit a normal call, otherwise
4179 try to get the result in TARGET, if convenient (and in mode MODE if that's
4180 convenient). */
4181
4182 static rtx
expand_builtin_memset_with_bounds(tree exp,rtx target,machine_mode mode)4183 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
4184 {
4185 if (!validate_arglist (exp,
4186 POINTER_TYPE, POINTER_BOUNDS_TYPE,
4187 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4188 return NULL_RTX;
4189 else
4190 {
4191 tree dest = CALL_EXPR_ARG (exp, 0);
4192 tree val = CALL_EXPR_ARG (exp, 2);
4193 tree len = CALL_EXPR_ARG (exp, 3);
4194 rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
4195
4196 /* Return src bounds with the result. */
4197 if (res)
4198 {
4199 rtx bnd = force_reg (targetm.chkp_bound_mode (),
4200 expand_normal (CALL_EXPR_ARG (exp, 1)));
4201 res = chkp_join_splitted_slot (res, bnd);
4202 }
4203 return res;
4204 }
4205 }
4206
4207 /* Helper function to do the actual work for expand_builtin_memset. The
4208 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4209 so that this can also be called without constructing an actual CALL_EXPR.
4210 The other arguments and return value are the same as for
4211 expand_builtin_memset. */
4212
4213 static rtx
expand_builtin_memset_args(tree dest,tree val,tree len,rtx target,machine_mode mode,tree orig_exp)4214 expand_builtin_memset_args (tree dest, tree val, tree len,
4215 rtx target, machine_mode mode, tree orig_exp)
4216 {
4217 tree fndecl, fn;
4218 enum built_in_function fcode;
4219 machine_mode val_mode;
4220 char c;
4221 unsigned int dest_align;
4222 rtx dest_mem, dest_addr, len_rtx;
4223 HOST_WIDE_INT expected_size = -1;
4224 unsigned int expected_align = 0;
4225 unsigned HOST_WIDE_INT min_size;
4226 unsigned HOST_WIDE_INT max_size;
4227 unsigned HOST_WIDE_INT probable_max_size;
4228
4229 dest_align = get_pointer_alignment (dest);
4230
4231 /* If DEST is not a pointer type, don't do this operation in-line. */
4232 if (dest_align == 0)
4233 return NULL_RTX;
4234
4235 if (currently_expanding_gimple_stmt)
4236 stringop_block_profile (currently_expanding_gimple_stmt,
4237 &expected_align, &expected_size);
4238
4239 if (expected_align < dest_align)
4240 expected_align = dest_align;
4241
4242 /* If the LEN parameter is zero, return DEST. */
4243 if (integer_zerop (len))
4244 {
4245 /* Evaluate and ignore VAL in case it has side-effects. */
4246 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4247 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4248 }
4249
4250 /* Stabilize the arguments in case we fail. */
4251 dest = builtin_save_expr (dest);
4252 val = builtin_save_expr (val);
4253 len = builtin_save_expr (len);
4254
4255 len_rtx = expand_normal (len);
4256 determine_block_size (len, len_rtx, &min_size, &max_size,
4257 &probable_max_size);
4258 dest_mem = get_memory_rtx (dest, len);
4259 val_mode = TYPE_MODE (unsigned_char_type_node);
4260
4261 if (TREE_CODE (val) != INTEGER_CST)
4262 {
4263 rtx val_rtx;
4264
4265 val_rtx = expand_normal (val);
4266 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4267
4268 /* Assume that we can memset by pieces if we can store
4269 * the coefficients by pieces (in the required modes).
4270 * We can't pass builtin_memset_gen_str as that emits RTL. */
4271 c = 1;
4272 if (tree_fits_uhwi_p (len)
4273 && can_store_by_pieces (tree_to_uhwi (len),
4274 builtin_memset_read_str, &c, dest_align,
4275 true))
4276 {
4277 val_rtx = force_reg (val_mode, val_rtx);
4278 store_by_pieces (dest_mem, tree_to_uhwi (len),
4279 builtin_memset_gen_str, val_rtx, dest_align,
4280 true, 0);
4281 }
4282 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4283 dest_align, expected_align,
4284 expected_size, min_size, max_size,
4285 probable_max_size))
4286 goto do_libcall;
4287
4288 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4289 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4290 return dest_mem;
4291 }
4292
4293 if (target_char_cast (val, &c))
4294 goto do_libcall;
4295
4296 if (c)
4297 {
4298 if (tree_fits_uhwi_p (len)
4299 && can_store_by_pieces (tree_to_uhwi (len),
4300 builtin_memset_read_str, &c, dest_align,
4301 true))
4302 store_by_pieces (dest_mem, tree_to_uhwi (len),
4303 builtin_memset_read_str, &c, dest_align, true, 0);
4304 else if (!set_storage_via_setmem (dest_mem, len_rtx,
4305 gen_int_mode (c, val_mode),
4306 dest_align, expected_align,
4307 expected_size, min_size, max_size,
4308 probable_max_size))
4309 goto do_libcall;
4310
4311 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4312 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4313 return dest_mem;
4314 }
4315
4316 set_mem_align (dest_mem, dest_align);
4317 dest_addr = clear_storage_hints (dest_mem, len_rtx,
4318 CALL_EXPR_TAILCALL (orig_exp)
4319 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4320 expected_align, expected_size,
4321 min_size, max_size,
4322 probable_max_size);
4323
4324 if (dest_addr == 0)
4325 {
4326 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4327 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4328 }
4329
4330 return dest_addr;
4331
4332 do_libcall:
4333 fndecl = get_callee_fndecl (orig_exp);
4334 fcode = DECL_FUNCTION_CODE (fndecl);
4335 if (fcode == BUILT_IN_MEMSET
4336 || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
4337 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4338 dest, val, len);
4339 else if (fcode == BUILT_IN_BZERO)
4340 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4341 dest, len);
4342 else
4343 gcc_unreachable ();
4344 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4345 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4346 return expand_call (fn, target, target == const0_rtx);
4347 }
4348
4349 /* Expand expression EXP, which is a call to the bzero builtin. Return
4350 NULL_RTX if we failed the caller should emit a normal call. */
4351
4352 static rtx
expand_builtin_bzero(tree exp)4353 expand_builtin_bzero (tree exp)
4354 {
4355 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4356 return NULL_RTX;
4357
4358 tree dest = CALL_EXPR_ARG (exp, 0);
4359 tree size = CALL_EXPR_ARG (exp, 1);
4360
4361 check_memop_access (exp, dest, NULL_TREE, size);
4362
4363 /* New argument list transforming bzero(ptr x, int y) to
4364 memset(ptr x, int 0, size_t y). This is done this way
4365 so that if it isn't expanded inline, we fallback to
4366 calling bzero instead of memset. */
4367
4368 location_t loc = EXPR_LOCATION (exp);
4369
4370 return expand_builtin_memset_args (dest, integer_zero_node,
4371 fold_convert_loc (loc,
4372 size_type_node, size),
4373 const0_rtx, VOIDmode, exp);
4374 }
4375
4376 /* Try to expand cmpstr operation ICODE with the given operands.
4377 Return the result rtx on success, otherwise return null. */
4378
4379 static rtx
expand_cmpstr(insn_code icode,rtx target,rtx arg1_rtx,rtx arg2_rtx,HOST_WIDE_INT align)4380 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4381 HOST_WIDE_INT align)
4382 {
4383 machine_mode insn_mode = insn_data[icode].operand[0].mode;
4384
4385 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4386 target = NULL_RTX;
4387
4388 struct expand_operand ops[4];
4389 create_output_operand (&ops[0], target, insn_mode);
4390 create_fixed_operand (&ops[1], arg1_rtx);
4391 create_fixed_operand (&ops[2], arg2_rtx);
4392 create_integer_operand (&ops[3], align);
4393 if (maybe_expand_insn (icode, 4, ops))
4394 return ops[0].value;
4395 return NULL_RTX;
4396 }
4397
4398 /* Expand expression EXP, which is a call to the memcmp built-in function.
4399 Return NULL_RTX if we failed and the caller should emit a normal call,
4400 otherwise try to get the result in TARGET, if convenient.
4401 RESULT_EQ is true if we can relax the returned value to be either zero
4402 or nonzero, without caring about the sign. */
4403
4404 static rtx
expand_builtin_memcmp(tree exp,rtx target,bool result_eq)4405 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4406 {
4407 if (!validate_arglist (exp,
4408 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4409 return NULL_RTX;
4410
4411 tree arg1 = CALL_EXPR_ARG (exp, 0);
4412 tree arg2 = CALL_EXPR_ARG (exp, 1);
4413 tree len = CALL_EXPR_ARG (exp, 2);
4414
4415 /* Diagnose calls where the specified length exceeds the size of either
4416 object. */
4417 if (warn_stringop_overflow)
4418 {
4419 tree size = compute_objsize (arg1, 0);
4420 if (check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4421 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE))
4422 {
4423 size = compute_objsize (arg2, 0);
4424 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4425 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
4426 }
4427 }
4428
4429 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4430 location_t loc = EXPR_LOCATION (exp);
4431
4432 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4433 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4434
4435 /* If we don't have POINTER_TYPE, call the function. */
4436 if (arg1_align == 0 || arg2_align == 0)
4437 return NULL_RTX;
4438
4439 rtx arg1_rtx = get_memory_rtx (arg1, len);
4440 rtx arg2_rtx = get_memory_rtx (arg2, len);
4441 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4442
4443 /* Set MEM_SIZE as appropriate. */
4444 if (CONST_INT_P (len_rtx))
4445 {
4446 set_mem_size (arg1_rtx, INTVAL (len_rtx));
4447 set_mem_size (arg2_rtx, INTVAL (len_rtx));
4448 }
4449
4450 by_pieces_constfn constfn = NULL;
4451
4452 const char *src_str = c_getstr (arg2);
4453 if (result_eq && src_str == NULL)
4454 {
4455 src_str = c_getstr (arg1);
4456 if (src_str != NULL)
4457 std::swap (arg1_rtx, arg2_rtx);
4458 }
4459
4460 /* If SRC is a string constant and block move would be done
4461 by pieces, we can avoid loading the string from memory
4462 and only stored the computed constants. */
4463 if (src_str
4464 && CONST_INT_P (len_rtx)
4465 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4466 constfn = builtin_memcpy_read_str;
4467
4468 rtx result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4469 TREE_TYPE (len), target,
4470 result_eq, constfn,
4471 CONST_CAST (char *, src_str));
4472
4473 if (result)
4474 {
4475 /* Return the value in the proper mode for this function. */
4476 if (GET_MODE (result) == mode)
4477 return result;
4478
4479 if (target != 0)
4480 {
4481 convert_move (target, result, 0);
4482 return target;
4483 }
4484
4485 return convert_to_mode (mode, result, 0);
4486 }
4487
4488 return NULL_RTX;
4489 }
4490
4491 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
4492 if we failed the caller should emit a normal call, otherwise try to get
4493 the result in TARGET, if convenient. */
4494
4495 static rtx
expand_builtin_strcmp(tree exp,ATTRIBUTE_UNUSED rtx target)4496 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4497 {
4498 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4499 return NULL_RTX;
4500
4501 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4502 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4503 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4504 return NULL_RTX;
4505
4506 tree arg1 = CALL_EXPR_ARG (exp, 0);
4507 tree arg2 = CALL_EXPR_ARG (exp, 1);
4508
4509 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4510 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4511
4512 /* If we don't have POINTER_TYPE, call the function. */
4513 if (arg1_align == 0 || arg2_align == 0)
4514 return NULL_RTX;
4515
4516 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
4517 arg1 = builtin_save_expr (arg1);
4518 arg2 = builtin_save_expr (arg2);
4519
4520 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4521 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4522
4523 rtx result = NULL_RTX;
4524 /* Try to call cmpstrsi. */
4525 if (cmpstr_icode != CODE_FOR_nothing)
4526 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4527 MIN (arg1_align, arg2_align));
4528
4529 /* Try to determine at least one length and call cmpstrnsi. */
4530 if (!result && cmpstrn_icode != CODE_FOR_nothing)
4531 {
4532 tree len;
4533 rtx arg3_rtx;
4534
4535 tree len1 = c_strlen (arg1, 1);
4536 tree len2 = c_strlen (arg2, 1);
4537
4538 if (len1)
4539 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4540 if (len2)
4541 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4542
4543 /* If we don't have a constant length for the first, use the length
4544 of the second, if we know it. We don't require a constant for
4545 this case; some cost analysis could be done if both are available
4546 but neither is constant. For now, assume they're equally cheap,
4547 unless one has side effects. If both strings have constant lengths,
4548 use the smaller. */
4549
4550 if (!len1)
4551 len = len2;
4552 else if (!len2)
4553 len = len1;
4554 else if (TREE_SIDE_EFFECTS (len1))
4555 len = len2;
4556 else if (TREE_SIDE_EFFECTS (len2))
4557 len = len1;
4558 else if (TREE_CODE (len1) != INTEGER_CST)
4559 len = len2;
4560 else if (TREE_CODE (len2) != INTEGER_CST)
4561 len = len1;
4562 else if (tree_int_cst_lt (len1, len2))
4563 len = len1;
4564 else
4565 len = len2;
4566
4567 /* If both arguments have side effects, we cannot optimize. */
4568 if (len && !TREE_SIDE_EFFECTS (len))
4569 {
4570 arg3_rtx = expand_normal (len);
4571 result = expand_cmpstrn_or_cmpmem
4572 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4573 arg3_rtx, MIN (arg1_align, arg2_align));
4574 }
4575 }
4576
4577 /* Check to see if the argument was declared attribute nonstring
4578 and if so, issue a warning since at this point it's not known
4579 to be nul-terminated. */
4580 tree fndecl = get_callee_fndecl (exp);
4581 maybe_warn_nonstring_arg (fndecl, exp);
4582
4583 if (result)
4584 {
4585 /* Return the value in the proper mode for this function. */
4586 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4587 if (GET_MODE (result) == mode)
4588 return result;
4589 if (target == 0)
4590 return convert_to_mode (mode, result, 0);
4591 convert_move (target, result, 0);
4592 return target;
4593 }
4594
4595 /* Expand the library call ourselves using a stabilized argument
4596 list to avoid re-evaluating the function's arguments twice. */
4597 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4598 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4599 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4600 return expand_call (fn, target, target == const0_rtx);
4601 }
4602
4603 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4604 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4605 the result in TARGET, if convenient. */
4606
4607 static rtx
expand_builtin_strncmp(tree exp,ATTRIBUTE_UNUSED rtx target,ATTRIBUTE_UNUSED machine_mode mode)4608 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4609 ATTRIBUTE_UNUSED machine_mode mode)
4610 {
4611 if (!validate_arglist (exp,
4612 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4613 return NULL_RTX;
4614
4615 /* If c_strlen can determine an expression for one of the string
4616 lengths, and it doesn't have side effects, then emit cmpstrnsi
4617 using length MIN(strlen(string)+1, arg3). */
4618 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4619 if (cmpstrn_icode == CODE_FOR_nothing)
4620 return NULL_RTX;
4621
4622 tree len;
4623
4624 tree arg1 = CALL_EXPR_ARG (exp, 0);
4625 tree arg2 = CALL_EXPR_ARG (exp, 1);
4626 tree arg3 = CALL_EXPR_ARG (exp, 2);
4627
4628 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4629 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4630
4631 tree len1 = c_strlen (arg1, 1);
4632 tree len2 = c_strlen (arg2, 1);
4633
4634 location_t loc = EXPR_LOCATION (exp);
4635
4636 if (len1)
4637 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4638 if (len2)
4639 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4640
4641 tree len3 = fold_convert_loc (loc, sizetype, arg3);
4642
4643 /* If we don't have a constant length for the first, use the length
4644 of the second, if we know it. If neither string is constant length,
4645 use the given length argument. We don't require a constant for
4646 this case; some cost analysis could be done if both are available
4647 but neither is constant. For now, assume they're equally cheap,
4648 unless one has side effects. If both strings have constant lengths,
4649 use the smaller. */
4650
4651 if (!len1 && !len2)
4652 len = len3;
4653 else if (!len1)
4654 len = len2;
4655 else if (!len2)
4656 len = len1;
4657 else if (TREE_SIDE_EFFECTS (len1))
4658 len = len2;
4659 else if (TREE_SIDE_EFFECTS (len2))
4660 len = len1;
4661 else if (TREE_CODE (len1) != INTEGER_CST)
4662 len = len2;
4663 else if (TREE_CODE (len2) != INTEGER_CST)
4664 len = len1;
4665 else if (tree_int_cst_lt (len1, len2))
4666 len = len1;
4667 else
4668 len = len2;
4669
4670 /* If we are not using the given length, we must incorporate it here.
4671 The actual new length parameter will be MIN(len,arg3) in this case. */
4672 if (len != len3)
4673 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4674 rtx arg1_rtx = get_memory_rtx (arg1, len);
4675 rtx arg2_rtx = get_memory_rtx (arg2, len);
4676 rtx arg3_rtx = expand_normal (len);
4677 rtx result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4678 arg2_rtx, TREE_TYPE (len), arg3_rtx,
4679 MIN (arg1_align, arg2_align));
4680
4681 /* Check to see if the argument was declared attribute nonstring
4682 and if so, issue a warning since at this point it's not known
4683 to be nul-terminated. */
4684 tree fndecl = get_callee_fndecl (exp);
4685 maybe_warn_nonstring_arg (fndecl, exp);
4686
4687 if (result)
4688 {
4689 /* Return the value in the proper mode for this function. */
4690 mode = TYPE_MODE (TREE_TYPE (exp));
4691 if (GET_MODE (result) == mode)
4692 return result;
4693 if (target == 0)
4694 return convert_to_mode (mode, result, 0);
4695 convert_move (target, result, 0);
4696 return target;
4697 }
4698
4699 /* Expand the library call ourselves using a stabilized argument
4700 list to avoid re-evaluating the function's arguments twice. */
4701 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4702 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4703 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4704 return expand_call (fn, target, target == const0_rtx);
4705 }
4706
4707 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4708 if that's convenient. */
4709
4710 rtx
expand_builtin_saveregs(void)4711 expand_builtin_saveregs (void)
4712 {
4713 rtx val;
4714 rtx_insn *seq;
4715
4716 /* Don't do __builtin_saveregs more than once in a function.
4717 Save the result of the first call and reuse it. */
4718 if (saveregs_value != 0)
4719 return saveregs_value;
4720
4721 /* When this function is called, it means that registers must be
4722 saved on entry to this function. So we migrate the call to the
4723 first insn of this function. */
4724
4725 start_sequence ();
4726
4727 /* Do whatever the machine needs done in this case. */
4728 val = targetm.calls.expand_builtin_saveregs ();
4729
4730 seq = get_insns ();
4731 end_sequence ();
4732
4733 saveregs_value = val;
4734
4735 /* Put the insns after the NOTE that starts the function. If this
4736 is inside a start_sequence, make the outer-level insn chain current, so
4737 the code is placed at the start of the function. */
4738 push_topmost_sequence ();
4739 emit_insn_after (seq, entry_of_function ());
4740 pop_topmost_sequence ();
4741
4742 return val;
4743 }
4744
4745 /* Expand a call to __builtin_next_arg. */
4746
4747 static rtx
expand_builtin_next_arg(void)4748 expand_builtin_next_arg (void)
4749 {
4750 /* Checking arguments is already done in fold_builtin_next_arg
4751 that must be called before this function. */
4752 return expand_binop (ptr_mode, add_optab,
4753 crtl->args.internal_arg_pointer,
4754 crtl->args.arg_offset_rtx,
4755 NULL_RTX, 0, OPTAB_LIB_WIDEN);
4756 }
4757
4758 /* Make it easier for the backends by protecting the valist argument
4759 from multiple evaluations. */
4760
4761 static tree
stabilize_va_list_loc(location_t loc,tree valist,int needs_lvalue)4762 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4763 {
4764 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4765
4766 /* The current way of determining the type of valist is completely
4767 bogus. We should have the information on the va builtin instead. */
4768 if (!vatype)
4769 vatype = targetm.fn_abi_va_list (cfun->decl);
4770
4771 if (TREE_CODE (vatype) == ARRAY_TYPE)
4772 {
4773 if (TREE_SIDE_EFFECTS (valist))
4774 valist = save_expr (valist);
4775
4776 /* For this case, the backends will be expecting a pointer to
4777 vatype, but it's possible we've actually been given an array
4778 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4779 So fix it. */
4780 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4781 {
4782 tree p1 = build_pointer_type (TREE_TYPE (vatype));
4783 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4784 }
4785 }
4786 else
4787 {
4788 tree pt = build_pointer_type (vatype);
4789
4790 if (! needs_lvalue)
4791 {
4792 if (! TREE_SIDE_EFFECTS (valist))
4793 return valist;
4794
4795 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4796 TREE_SIDE_EFFECTS (valist) = 1;
4797 }
4798
4799 if (TREE_SIDE_EFFECTS (valist))
4800 valist = save_expr (valist);
4801 valist = fold_build2_loc (loc, MEM_REF,
4802 vatype, valist, build_int_cst (pt, 0));
4803 }
4804
4805 return valist;
4806 }
4807
4808 /* The "standard" definition of va_list is void*. */
4809
4810 tree
std_build_builtin_va_list(void)4811 std_build_builtin_va_list (void)
4812 {
4813 return ptr_type_node;
4814 }
4815
4816 /* The "standard" abi va_list is va_list_type_node. */
4817
4818 tree
std_fn_abi_va_list(tree fndecl ATTRIBUTE_UNUSED)4819 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4820 {
4821 return va_list_type_node;
4822 }
4823
4824 /* The "standard" type of va_list is va_list_type_node. */
4825
4826 tree
std_canonical_va_list_type(tree type)4827 std_canonical_va_list_type (tree type)
4828 {
4829 tree wtype, htype;
4830
4831 wtype = va_list_type_node;
4832 htype = type;
4833
4834 if (TREE_CODE (wtype) == ARRAY_TYPE)
4835 {
4836 /* If va_list is an array type, the argument may have decayed
4837 to a pointer type, e.g. by being passed to another function.
4838 In that case, unwrap both types so that we can compare the
4839 underlying records. */
4840 if (TREE_CODE (htype) == ARRAY_TYPE
4841 || POINTER_TYPE_P (htype))
4842 {
4843 wtype = TREE_TYPE (wtype);
4844 htype = TREE_TYPE (htype);
4845 }
4846 }
4847 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4848 return va_list_type_node;
4849
4850 return NULL_TREE;
4851 }
4852
4853 /* The "standard" implementation of va_start: just assign `nextarg' to
4854 the variable. */
4855
4856 void
std_expand_builtin_va_start(tree valist,rtx nextarg)4857 std_expand_builtin_va_start (tree valist, rtx nextarg)
4858 {
4859 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4860 convert_move (va_r, nextarg, 0);
4861
4862 /* We do not have any valid bounds for the pointer, so
4863 just store zero bounds for it. */
4864 if (chkp_function_instrumented_p (current_function_decl))
4865 chkp_expand_bounds_reset_for_mem (valist,
4866 make_tree (TREE_TYPE (valist),
4867 nextarg));
4868 }
4869
4870 /* Expand EXP, a call to __builtin_va_start. */
4871
4872 static rtx
expand_builtin_va_start(tree exp)4873 expand_builtin_va_start (tree exp)
4874 {
4875 rtx nextarg;
4876 tree valist;
4877 location_t loc = EXPR_LOCATION (exp);
4878
4879 if (call_expr_nargs (exp) < 2)
4880 {
4881 error_at (loc, "too few arguments to function %<va_start%>");
4882 return const0_rtx;
4883 }
4884
4885 if (fold_builtin_next_arg (exp, true))
4886 return const0_rtx;
4887
4888 nextarg = expand_builtin_next_arg ();
4889 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4890
4891 if (targetm.expand_builtin_va_start)
4892 targetm.expand_builtin_va_start (valist, nextarg);
4893 else
4894 std_expand_builtin_va_start (valist, nextarg);
4895
4896 return const0_rtx;
4897 }
4898
4899 /* Expand EXP, a call to __builtin_va_end. */
4900
4901 static rtx
expand_builtin_va_end(tree exp)4902 expand_builtin_va_end (tree exp)
4903 {
4904 tree valist = CALL_EXPR_ARG (exp, 0);
4905
4906 /* Evaluate for side effects, if needed. I hate macros that don't
4907 do that. */
4908 if (TREE_SIDE_EFFECTS (valist))
4909 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4910
4911 return const0_rtx;
4912 }
4913
4914 /* Expand EXP, a call to __builtin_va_copy. We do this as a
4915 builtin rather than just as an assignment in stdarg.h because of the
4916 nastiness of array-type va_list types. */
4917
4918 static rtx
expand_builtin_va_copy(tree exp)4919 expand_builtin_va_copy (tree exp)
4920 {
4921 tree dst, src, t;
4922 location_t loc = EXPR_LOCATION (exp);
4923
4924 dst = CALL_EXPR_ARG (exp, 0);
4925 src = CALL_EXPR_ARG (exp, 1);
4926
4927 dst = stabilize_va_list_loc (loc, dst, 1);
4928 src = stabilize_va_list_loc (loc, src, 0);
4929
4930 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4931
4932 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4933 {
4934 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4935 TREE_SIDE_EFFECTS (t) = 1;
4936 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4937 }
4938 else
4939 {
4940 rtx dstb, srcb, size;
4941
4942 /* Evaluate to pointers. */
4943 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4944 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4945 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4946 NULL_RTX, VOIDmode, EXPAND_NORMAL);
4947
4948 dstb = convert_memory_address (Pmode, dstb);
4949 srcb = convert_memory_address (Pmode, srcb);
4950
4951 /* "Dereference" to BLKmode memories. */
4952 dstb = gen_rtx_MEM (BLKmode, dstb);
4953 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4954 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4955 srcb = gen_rtx_MEM (BLKmode, srcb);
4956 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4957 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4958
4959 /* Copy. */
4960 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4961 }
4962
4963 return const0_rtx;
4964 }
4965
4966 /* Expand a call to one of the builtin functions __builtin_frame_address or
4967 __builtin_return_address. */
4968
4969 static rtx
expand_builtin_frame_address(tree fndecl,tree exp)4970 expand_builtin_frame_address (tree fndecl, tree exp)
4971 {
4972 /* The argument must be a nonnegative integer constant.
4973 It counts the number of frames to scan up the stack.
4974 The value is either the frame pointer value or the return
4975 address saved in that frame. */
4976 if (call_expr_nargs (exp) == 0)
4977 /* Warning about missing arg was already issued. */
4978 return const0_rtx;
4979 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4980 {
4981 error ("invalid argument to %qD", fndecl);
4982 return const0_rtx;
4983 }
4984 else
4985 {
4986 /* Number of frames to scan up the stack. */
4987 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4988
4989 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4990
4991 /* Some ports cannot access arbitrary stack frames. */
4992 if (tem == NULL)
4993 {
4994 warning (0, "unsupported argument to %qD", fndecl);
4995 return const0_rtx;
4996 }
4997
4998 if (count)
4999 {
5000 /* Warn since no effort is made to ensure that any frame
5001 beyond the current one exists or can be safely reached. */
5002 warning (OPT_Wframe_address, "calling %qD with "
5003 "a nonzero argument is unsafe", fndecl);
5004 }
5005
5006 /* For __builtin_frame_address, return what we've got. */
5007 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5008 return tem;
5009
5010 if (!REG_P (tem)
5011 && ! CONSTANT_P (tem))
5012 tem = copy_addr_to_reg (tem);
5013 return tem;
5014 }
5015 }
5016
5017 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5018 failed and the caller should emit a normal call. */
5019
5020 static rtx
expand_builtin_alloca(tree exp)5021 expand_builtin_alloca (tree exp)
5022 {
5023 rtx op0;
5024 rtx result;
5025 unsigned int align;
5026 tree fndecl = get_callee_fndecl (exp);
5027 HOST_WIDE_INT max_size;
5028 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5029 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5030 bool valid_arglist
5031 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5032 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5033 VOID_TYPE)
5034 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5035 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5036 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5037
5038 if (!valid_arglist)
5039 return NULL_RTX;
5040
5041 if ((alloca_for_var && !warn_vla_limit)
5042 || (!alloca_for_var && !warn_alloca_limit))
5043 {
5044 /* -Walloca-larger-than and -Wvla-larger-than settings override
5045 the more general -Walloc-size-larger-than so unless either of
5046 the former options is specified check the alloca arguments for
5047 overflow. */
5048 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5049 int idx[] = { 0, -1 };
5050 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5051 }
5052
5053 /* Compute the argument. */
5054 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5055
5056 /* Compute the alignment. */
5057 align = (fcode == BUILT_IN_ALLOCA
5058 ? BIGGEST_ALIGNMENT
5059 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5060
5061 /* Compute the maximum size. */
5062 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5063 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5064 : -1);
5065
5066 /* Allocate the desired space. If the allocation stems from the declaration
5067 of a variable-sized object, it cannot accumulate. */
5068 result
5069 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5070 result = convert_memory_address (ptr_mode, result);
5071
5072 return result;
5073 }
5074
5075 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5076 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5077 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5078 handle_builtin_stack_restore function. */
5079
5080 static rtx
expand_asan_emit_allocas_unpoison(tree exp)5081 expand_asan_emit_allocas_unpoison (tree exp)
5082 {
5083 tree arg0 = CALL_EXPR_ARG (exp, 0);
5084 tree arg1 = CALL_EXPR_ARG (exp, 1);
5085 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5086 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5087 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5088 stack_pointer_rtx, NULL_RTX, 0,
5089 OPTAB_LIB_WIDEN);
5090 off = convert_modes (ptr_mode, Pmode, off, 0);
5091 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5092 OPTAB_LIB_WIDEN);
5093 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5094 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5095 top, ptr_mode, bot, ptr_mode);
5096 return ret;
5097 }
5098
5099 /* Expand a call to bswap builtin in EXP.
5100 Return NULL_RTX if a normal call should be emitted rather than expanding the
5101 function in-line. If convenient, the result should be placed in TARGET.
5102 SUBTARGET may be used as the target for computing one of EXP's operands. */
5103
5104 static rtx
expand_builtin_bswap(machine_mode target_mode,tree exp,rtx target,rtx subtarget)5105 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5106 rtx subtarget)
5107 {
5108 tree arg;
5109 rtx op0;
5110
5111 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5112 return NULL_RTX;
5113
5114 arg = CALL_EXPR_ARG (exp, 0);
5115 op0 = expand_expr (arg,
5116 subtarget && GET_MODE (subtarget) == target_mode
5117 ? subtarget : NULL_RTX,
5118 target_mode, EXPAND_NORMAL);
5119 if (GET_MODE (op0) != target_mode)
5120 op0 = convert_to_mode (target_mode, op0, 1);
5121
5122 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5123
5124 gcc_assert (target);
5125
5126 return convert_to_mode (target_mode, target, 1);
5127 }
5128
5129 /* Expand a call to a unary builtin in EXP.
5130 Return NULL_RTX if a normal call should be emitted rather than expanding the
5131 function in-line. If convenient, the result should be placed in TARGET.
5132 SUBTARGET may be used as the target for computing one of EXP's operands. */
5133
5134 static rtx
expand_builtin_unop(machine_mode target_mode,tree exp,rtx target,rtx subtarget,optab op_optab)5135 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5136 rtx subtarget, optab op_optab)
5137 {
5138 rtx op0;
5139
5140 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5141 return NULL_RTX;
5142
5143 /* Compute the argument. */
5144 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5145 (subtarget
5146 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5147 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5148 VOIDmode, EXPAND_NORMAL);
5149 /* Compute op, into TARGET if possible.
5150 Set TARGET to wherever the result comes back. */
5151 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5152 op_optab, op0, target, op_optab != clrsb_optab);
5153 gcc_assert (target);
5154
5155 return convert_to_mode (target_mode, target, 0);
5156 }
5157
5158 /* Expand a call to __builtin_expect. We just return our argument
5159 as the builtin_expect semantic should've been already executed by
5160 tree branch prediction pass. */
5161
5162 static rtx
expand_builtin_expect(tree exp,rtx target)5163 expand_builtin_expect (tree exp, rtx target)
5164 {
5165 tree arg;
5166
5167 if (call_expr_nargs (exp) < 2)
5168 return const0_rtx;
5169 arg = CALL_EXPR_ARG (exp, 0);
5170
5171 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5172 /* When guessing was done, the hints should be already stripped away. */
5173 gcc_assert (!flag_guess_branch_prob
5174 || optimize == 0 || seen_error ());
5175 return target;
5176 }
5177
5178 /* Expand a call to __builtin_assume_aligned. We just return our first
5179 argument as the builtin_assume_aligned semantic should've been already
5180 executed by CCP. */
5181
5182 static rtx
expand_builtin_assume_aligned(tree exp,rtx target)5183 expand_builtin_assume_aligned (tree exp, rtx target)
5184 {
5185 if (call_expr_nargs (exp) < 2)
5186 return const0_rtx;
5187 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5188 EXPAND_NORMAL);
5189 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5190 && (call_expr_nargs (exp) < 3
5191 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5192 return target;
5193 }
5194
5195 void
expand_builtin_trap(void)5196 expand_builtin_trap (void)
5197 {
5198 if (targetm.have_trap ())
5199 {
5200 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5201 /* For trap insns when not accumulating outgoing args force
5202 REG_ARGS_SIZE note to prevent crossjumping of calls with
5203 different args sizes. */
5204 if (!ACCUMULATE_OUTGOING_ARGS)
5205 add_args_size_note (insn, stack_pointer_delta);
5206 }
5207 else
5208 {
5209 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5210 tree call_expr = build_call_expr (fn, 0);
5211 expand_call (call_expr, NULL_RTX, false);
5212 }
5213
5214 emit_barrier ();
5215 }
5216
5217 /* Expand a call to __builtin_unreachable. We do nothing except emit
5218 a barrier saying that control flow will not pass here.
5219
5220 It is the responsibility of the program being compiled to ensure
5221 that control flow does never reach __builtin_unreachable. */
5222 static void
expand_builtin_unreachable(void)5223 expand_builtin_unreachable (void)
5224 {
5225 emit_barrier ();
5226 }
5227
5228 /* Expand EXP, a call to fabs, fabsf or fabsl.
5229 Return NULL_RTX if a normal call should be emitted rather than expanding
5230 the function inline. If convenient, the result should be placed
5231 in TARGET. SUBTARGET may be used as the target for computing
5232 the operand. */
5233
5234 static rtx
expand_builtin_fabs(tree exp,rtx target,rtx subtarget)5235 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5236 {
5237 machine_mode mode;
5238 tree arg;
5239 rtx op0;
5240
5241 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5242 return NULL_RTX;
5243
5244 arg = CALL_EXPR_ARG (exp, 0);
5245 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5246 mode = TYPE_MODE (TREE_TYPE (arg));
5247 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5248 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5249 }
5250
5251 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5252 Return NULL is a normal call should be emitted rather than expanding the
5253 function inline. If convenient, the result should be placed in TARGET.
5254 SUBTARGET may be used as the target for computing the operand. */
5255
5256 static rtx
expand_builtin_copysign(tree exp,rtx target,rtx subtarget)5257 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5258 {
5259 rtx op0, op1;
5260 tree arg;
5261
5262 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5263 return NULL_RTX;
5264
5265 arg = CALL_EXPR_ARG (exp, 0);
5266 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5267
5268 arg = CALL_EXPR_ARG (exp, 1);
5269 op1 = expand_normal (arg);
5270
5271 return expand_copysign (op0, op1, target);
5272 }
5273
5274 /* Expand a call to __builtin___clear_cache. */
5275
5276 static rtx
expand_builtin___clear_cache(tree exp)5277 expand_builtin___clear_cache (tree exp)
5278 {
5279 if (!targetm.code_for_clear_cache)
5280 {
5281 #ifdef CLEAR_INSN_CACHE
5282 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5283 does something. Just do the default expansion to a call to
5284 __clear_cache(). */
5285 return NULL_RTX;
5286 #else
5287 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5288 does nothing. There is no need to call it. Do nothing. */
5289 return const0_rtx;
5290 #endif /* CLEAR_INSN_CACHE */
5291 }
5292
5293 /* We have a "clear_cache" insn, and it will handle everything. */
5294 tree begin, end;
5295 rtx begin_rtx, end_rtx;
5296
5297 /* We must not expand to a library call. If we did, any
5298 fallback library function in libgcc that might contain a call to
5299 __builtin___clear_cache() would recurse infinitely. */
5300 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5301 {
5302 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5303 return const0_rtx;
5304 }
5305
5306 if (targetm.have_clear_cache ())
5307 {
5308 struct expand_operand ops[2];
5309
5310 begin = CALL_EXPR_ARG (exp, 0);
5311 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5312
5313 end = CALL_EXPR_ARG (exp, 1);
5314 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5315
5316 create_address_operand (&ops[0], begin_rtx);
5317 create_address_operand (&ops[1], end_rtx);
5318 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5319 return const0_rtx;
5320 }
5321 return const0_rtx;
5322 }
5323
5324 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
5325
5326 static rtx
round_trampoline_addr(rtx tramp)5327 round_trampoline_addr (rtx tramp)
5328 {
5329 rtx temp, addend, mask;
5330
5331 /* If we don't need too much alignment, we'll have been guaranteed
5332 proper alignment by get_trampoline_type. */
5333 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5334 return tramp;
5335
5336 /* Round address up to desired boundary. */
5337 temp = gen_reg_rtx (Pmode);
5338 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5339 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5340
5341 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
5342 temp, 0, OPTAB_LIB_WIDEN);
5343 tramp = expand_simple_binop (Pmode, AND, temp, mask,
5344 temp, 0, OPTAB_LIB_WIDEN);
5345
5346 return tramp;
5347 }
5348
5349 static rtx
expand_builtin_init_trampoline(tree exp,bool onstack)5350 expand_builtin_init_trampoline (tree exp, bool onstack)
5351 {
5352 tree t_tramp, t_func, t_chain;
5353 rtx m_tramp, r_tramp, r_chain, tmp;
5354
5355 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5356 POINTER_TYPE, VOID_TYPE))
5357 return NULL_RTX;
5358
5359 t_tramp = CALL_EXPR_ARG (exp, 0);
5360 t_func = CALL_EXPR_ARG (exp, 1);
5361 t_chain = CALL_EXPR_ARG (exp, 2);
5362
5363 r_tramp = expand_normal (t_tramp);
5364 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5365 MEM_NOTRAP_P (m_tramp) = 1;
5366
5367 /* If ONSTACK, the TRAMP argument should be the address of a field
5368 within the local function's FRAME decl. Either way, let's see if
5369 we can fill in the MEM_ATTRs for this memory. */
5370 if (TREE_CODE (t_tramp) == ADDR_EXPR)
5371 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5372
5373 /* Creator of a heap trampoline is responsible for making sure the
5374 address is aligned to at least STACK_BOUNDARY. Normally malloc
5375 will ensure this anyhow. */
5376 tmp = round_trampoline_addr (r_tramp);
5377 if (tmp != r_tramp)
5378 {
5379 m_tramp = change_address (m_tramp, BLKmode, tmp);
5380 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5381 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5382 }
5383
5384 /* The FUNC argument should be the address of the nested function.
5385 Extract the actual function decl to pass to the hook. */
5386 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5387 t_func = TREE_OPERAND (t_func, 0);
5388 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5389
5390 r_chain = expand_normal (t_chain);
5391
5392 /* Generate insns to initialize the trampoline. */
5393 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5394
5395 if (onstack)
5396 {
5397 trampolines_created = 1;
5398
5399 if (targetm.calls.custom_function_descriptors != 0)
5400 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5401 "trampoline generated for nested function %qD", t_func);
5402 }
5403
5404 return const0_rtx;
5405 }
5406
5407 static rtx
expand_builtin_adjust_trampoline(tree exp)5408 expand_builtin_adjust_trampoline (tree exp)
5409 {
5410 rtx tramp;
5411
5412 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5413 return NULL_RTX;
5414
5415 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5416 tramp = round_trampoline_addr (tramp);
5417 if (targetm.calls.trampoline_adjust_address)
5418 tramp = targetm.calls.trampoline_adjust_address (tramp);
5419
5420 return tramp;
5421 }
5422
5423 /* Expand a call to the builtin descriptor initialization routine.
5424 A descriptor is made up of a couple of pointers to the static
5425 chain and the code entry in this order. */
5426
5427 static rtx
expand_builtin_init_descriptor(tree exp)5428 expand_builtin_init_descriptor (tree exp)
5429 {
5430 tree t_descr, t_func, t_chain;
5431 rtx m_descr, r_descr, r_func, r_chain;
5432
5433 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5434 VOID_TYPE))
5435 return NULL_RTX;
5436
5437 t_descr = CALL_EXPR_ARG (exp, 0);
5438 t_func = CALL_EXPR_ARG (exp, 1);
5439 t_chain = CALL_EXPR_ARG (exp, 2);
5440
5441 r_descr = expand_normal (t_descr);
5442 m_descr = gen_rtx_MEM (BLKmode, r_descr);
5443 MEM_NOTRAP_P (m_descr) = 1;
5444
5445 r_func = expand_normal (t_func);
5446 r_chain = expand_normal (t_chain);
5447
5448 /* Generate insns to initialize the descriptor. */
5449 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5450 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5451 POINTER_SIZE / BITS_PER_UNIT), r_func);
5452
5453 return const0_rtx;
5454 }
5455
5456 /* Expand a call to the builtin descriptor adjustment routine. */
5457
5458 static rtx
expand_builtin_adjust_descriptor(tree exp)5459 expand_builtin_adjust_descriptor (tree exp)
5460 {
5461 rtx tramp;
5462
5463 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5464 return NULL_RTX;
5465
5466 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5467
5468 /* Unalign the descriptor to allow runtime identification. */
5469 tramp = plus_constant (ptr_mode, tramp,
5470 targetm.calls.custom_function_descriptors);
5471
5472 return force_operand (tramp, NULL_RTX);
5473 }
5474
5475 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5476 function. The function first checks whether the back end provides
5477 an insn to implement signbit for the respective mode. If not, it
5478 checks whether the floating point format of the value is such that
5479 the sign bit can be extracted. If that is not the case, error out.
5480 EXP is the expression that is a call to the builtin function; if
5481 convenient, the result should be placed in TARGET. */
5482 static rtx
expand_builtin_signbit(tree exp,rtx target)5483 expand_builtin_signbit (tree exp, rtx target)
5484 {
5485 const struct real_format *fmt;
5486 scalar_float_mode fmode;
5487 scalar_int_mode rmode, imode;
5488 tree arg;
5489 int word, bitpos;
5490 enum insn_code icode;
5491 rtx temp;
5492 location_t loc = EXPR_LOCATION (exp);
5493
5494 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5495 return NULL_RTX;
5496
5497 arg = CALL_EXPR_ARG (exp, 0);
5498 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5499 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5500 fmt = REAL_MODE_FORMAT (fmode);
5501
5502 arg = builtin_save_expr (arg);
5503
5504 /* Expand the argument yielding a RTX expression. */
5505 temp = expand_normal (arg);
5506
5507 /* Check if the back end provides an insn that handles signbit for the
5508 argument's mode. */
5509 icode = optab_handler (signbit_optab, fmode);
5510 if (icode != CODE_FOR_nothing)
5511 {
5512 rtx_insn *last = get_last_insn ();
5513 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5514 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5515 return target;
5516 delete_insns_since (last);
5517 }
5518
5519 /* For floating point formats without a sign bit, implement signbit
5520 as "ARG < 0.0". */
5521 bitpos = fmt->signbit_ro;
5522 if (bitpos < 0)
5523 {
5524 /* But we can't do this if the format supports signed zero. */
5525 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5526
5527 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5528 build_real (TREE_TYPE (arg), dconst0));
5529 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5530 }
5531
5532 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5533 {
5534 imode = int_mode_for_mode (fmode).require ();
5535 temp = gen_lowpart (imode, temp);
5536 }
5537 else
5538 {
5539 imode = word_mode;
5540 /* Handle targets with different FP word orders. */
5541 if (FLOAT_WORDS_BIG_ENDIAN)
5542 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5543 else
5544 word = bitpos / BITS_PER_WORD;
5545 temp = operand_subword_force (temp, word, fmode);
5546 bitpos = bitpos % BITS_PER_WORD;
5547 }
5548
5549 /* Force the intermediate word_mode (or narrower) result into a
5550 register. This avoids attempting to create paradoxical SUBREGs
5551 of floating point modes below. */
5552 temp = force_reg (imode, temp);
5553
5554 /* If the bitpos is within the "result mode" lowpart, the operation
5555 can be implement with a single bitwise AND. Otherwise, we need
5556 a right shift and an AND. */
5557
5558 if (bitpos < GET_MODE_BITSIZE (rmode))
5559 {
5560 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5561
5562 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5563 temp = gen_lowpart (rmode, temp);
5564 temp = expand_binop (rmode, and_optab, temp,
5565 immed_wide_int_const (mask, rmode),
5566 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5567 }
5568 else
5569 {
5570 /* Perform a logical right shift to place the signbit in the least
5571 significant bit, then truncate the result to the desired mode
5572 and mask just this bit. */
5573 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5574 temp = gen_lowpart (rmode, temp);
5575 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5576 NULL_RTX, 1, OPTAB_LIB_WIDEN);
5577 }
5578
5579 return temp;
5580 }
5581
5582 /* Expand fork or exec calls. TARGET is the desired target of the
5583 call. EXP is the call. FN is the
5584 identificator of the actual function. IGNORE is nonzero if the
5585 value is to be ignored. */
5586
5587 static rtx
expand_builtin_fork_or_exec(tree fn,tree exp,rtx target,int ignore)5588 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5589 {
5590 tree id, decl;
5591 tree call;
5592
5593 /* If we are not profiling, just call the function. */
5594 if (!profile_arc_flag)
5595 return NULL_RTX;
5596
5597 /* Otherwise call the wrapper. This should be equivalent for the rest of
5598 compiler, so the code does not diverge, and the wrapper may run the
5599 code necessary for keeping the profiling sane. */
5600
5601 switch (DECL_FUNCTION_CODE (fn))
5602 {
5603 case BUILT_IN_FORK:
5604 id = get_identifier ("__gcov_fork");
5605 break;
5606
5607 case BUILT_IN_EXECL:
5608 id = get_identifier ("__gcov_execl");
5609 break;
5610
5611 case BUILT_IN_EXECV:
5612 id = get_identifier ("__gcov_execv");
5613 break;
5614
5615 case BUILT_IN_EXECLP:
5616 id = get_identifier ("__gcov_execlp");
5617 break;
5618
5619 case BUILT_IN_EXECLE:
5620 id = get_identifier ("__gcov_execle");
5621 break;
5622
5623 case BUILT_IN_EXECVP:
5624 id = get_identifier ("__gcov_execvp");
5625 break;
5626
5627 case BUILT_IN_EXECVE:
5628 id = get_identifier ("__gcov_execve");
5629 break;
5630
5631 default:
5632 gcc_unreachable ();
5633 }
5634
5635 decl = build_decl (DECL_SOURCE_LOCATION (fn),
5636 FUNCTION_DECL, id, TREE_TYPE (fn));
5637 DECL_EXTERNAL (decl) = 1;
5638 TREE_PUBLIC (decl) = 1;
5639 DECL_ARTIFICIAL (decl) = 1;
5640 TREE_NOTHROW (decl) = 1;
5641 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5642 DECL_VISIBILITY_SPECIFIED (decl) = 1;
5643 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5644 return expand_call (call, target, ignore);
5645 }
5646
5647
5648
5649 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
5650 the pointer in these functions is void*, the tree optimizers may remove
5651 casts. The mode computed in expand_builtin isn't reliable either, due
5652 to __sync_bool_compare_and_swap.
5653
5654 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5655 group of builtins. This gives us log2 of the mode size. */
5656
5657 static inline machine_mode
get_builtin_sync_mode(int fcode_diff)5658 get_builtin_sync_mode (int fcode_diff)
5659 {
5660 /* The size is not negotiable, so ask not to get BLKmode in return
5661 if the target indicates that a smaller size would be better. */
5662 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5663 }
5664
5665 /* Expand the memory expression LOC and return the appropriate memory operand
5666 for the builtin_sync operations. */
5667
5668 static rtx
get_builtin_sync_mem(tree loc,machine_mode mode)5669 get_builtin_sync_mem (tree loc, machine_mode mode)
5670 {
5671 rtx addr, mem;
5672
5673 addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
5674 addr = convert_memory_address (Pmode, addr);
5675
5676 /* Note that we explicitly do not want any alias information for this
5677 memory, so that we kill all other live memories. Otherwise we don't
5678 satisfy the full barrier semantics of the intrinsic. */
5679 mem = validize_mem (gen_rtx_MEM (mode, addr));
5680
5681 /* The alignment needs to be at least according to that of the mode. */
5682 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5683 get_pointer_alignment (loc)));
5684 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5685 MEM_VOLATILE_P (mem) = 1;
5686
5687 return mem;
5688 }
5689
5690 /* Make sure an argument is in the right mode.
5691 EXP is the tree argument.
5692 MODE is the mode it should be in. */
5693
5694 static rtx
expand_expr_force_mode(tree exp,machine_mode mode)5695 expand_expr_force_mode (tree exp, machine_mode mode)
5696 {
5697 rtx val;
5698 machine_mode old_mode;
5699
5700 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5701 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
5702 of CONST_INTs, where we know the old_mode only from the call argument. */
5703
5704 old_mode = GET_MODE (val);
5705 if (old_mode == VOIDmode)
5706 old_mode = TYPE_MODE (TREE_TYPE (exp));
5707 val = convert_modes (mode, old_mode, val, 1);
5708 return val;
5709 }
5710
5711
5712 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5713 EXP is the CALL_EXPR. CODE is the rtx code
5714 that corresponds to the arithmetic or logical operation from the name;
5715 an exception here is that NOT actually means NAND. TARGET is an optional
5716 place for us to store the results; AFTER is true if this is the
5717 fetch_and_xxx form. */
5718
5719 static rtx
expand_builtin_sync_operation(machine_mode mode,tree exp,enum rtx_code code,bool after,rtx target)5720 expand_builtin_sync_operation (machine_mode mode, tree exp,
5721 enum rtx_code code, bool after,
5722 rtx target)
5723 {
5724 rtx val, mem;
5725 location_t loc = EXPR_LOCATION (exp);
5726
5727 if (code == NOT && warn_sync_nand)
5728 {
5729 tree fndecl = get_callee_fndecl (exp);
5730 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5731
5732 static bool warned_f_a_n, warned_n_a_f;
5733
5734 switch (fcode)
5735 {
5736 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5737 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5738 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5739 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5740 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5741 if (warned_f_a_n)
5742 break;
5743
5744 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5745 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5746 warned_f_a_n = true;
5747 break;
5748
5749 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5750 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5751 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5752 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5753 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5754 if (warned_n_a_f)
5755 break;
5756
5757 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5758 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5759 warned_n_a_f = true;
5760 break;
5761
5762 default:
5763 gcc_unreachable ();
5764 }
5765 }
5766
5767 /* Expand the operands. */
5768 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5769 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5770
5771 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5772 after);
5773 }
5774
5775 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5776 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
5777 true if this is the boolean form. TARGET is a place for us to store the
5778 results; this is NOT optional if IS_BOOL is true. */
5779
5780 static rtx
expand_builtin_compare_and_swap(machine_mode mode,tree exp,bool is_bool,rtx target)5781 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5782 bool is_bool, rtx target)
5783 {
5784 rtx old_val, new_val, mem;
5785 rtx *pbool, *poval;
5786
5787 /* Expand the operands. */
5788 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5789 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5790 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5791
5792 pbool = poval = NULL;
5793 if (target != const0_rtx)
5794 {
5795 if (is_bool)
5796 pbool = ⌖
5797 else
5798 poval = ⌖
5799 }
5800 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5801 false, MEMMODEL_SYNC_SEQ_CST,
5802 MEMMODEL_SYNC_SEQ_CST))
5803 return NULL_RTX;
5804
5805 return target;
5806 }
5807
5808 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
5809 general form is actually an atomic exchange, and some targets only
5810 support a reduced form with the second argument being a constant 1.
5811 EXP is the CALL_EXPR; TARGET is an optional place for us to store
5812 the results. */
5813
5814 static rtx
expand_builtin_sync_lock_test_and_set(machine_mode mode,tree exp,rtx target)5815 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5816 rtx target)
5817 {
5818 rtx val, mem;
5819
5820 /* Expand the operands. */
5821 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5822 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5823
5824 return expand_sync_lock_test_and_set (target, mem, val);
5825 }
5826
5827 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
5828
5829 static void
expand_builtin_sync_lock_release(machine_mode mode,tree exp)5830 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5831 {
5832 rtx mem;
5833
5834 /* Expand the operands. */
5835 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5836
5837 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5838 }
5839
5840 /* Given an integer representing an ``enum memmodel'', verify its
5841 correctness and return the memory model enum. */
5842
5843 static enum memmodel
get_memmodel(tree exp)5844 get_memmodel (tree exp)
5845 {
5846 rtx op;
5847 unsigned HOST_WIDE_INT val;
5848 source_location loc
5849 = expansion_point_location_if_in_system_header (input_location);
5850
5851 /* If the parameter is not a constant, it's a run time value so we'll just
5852 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
5853 if (TREE_CODE (exp) != INTEGER_CST)
5854 return MEMMODEL_SEQ_CST;
5855
5856 op = expand_normal (exp);
5857
5858 val = INTVAL (op);
5859 if (targetm.memmodel_check)
5860 val = targetm.memmodel_check (val);
5861 else if (val & ~MEMMODEL_MASK)
5862 {
5863 warning_at (loc, OPT_Winvalid_memory_model,
5864 "unknown architecture specifier in memory model to builtin");
5865 return MEMMODEL_SEQ_CST;
5866 }
5867
5868 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5869 if (memmodel_base (val) >= MEMMODEL_LAST)
5870 {
5871 warning_at (loc, OPT_Winvalid_memory_model,
5872 "invalid memory model argument to builtin");
5873 return MEMMODEL_SEQ_CST;
5874 }
5875
5876 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5877 be conservative and promote consume to acquire. */
5878 if (val == MEMMODEL_CONSUME)
5879 val = MEMMODEL_ACQUIRE;
5880
5881 return (enum memmodel) val;
5882 }
5883
5884 /* Expand the __atomic_exchange intrinsic:
5885 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5886 EXP is the CALL_EXPR.
5887 TARGET is an optional place for us to store the results. */
5888
5889 static rtx
expand_builtin_atomic_exchange(machine_mode mode,tree exp,rtx target)5890 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5891 {
5892 rtx val, mem;
5893 enum memmodel model;
5894
5895 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5896
5897 if (!flag_inline_atomics)
5898 return NULL_RTX;
5899
5900 /* Expand the operands. */
5901 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5902 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5903
5904 return expand_atomic_exchange (target, mem, val, model);
5905 }
5906
5907 /* Expand the __atomic_compare_exchange intrinsic:
5908 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5909 TYPE desired, BOOL weak,
5910 enum memmodel success,
5911 enum memmodel failure)
5912 EXP is the CALL_EXPR.
5913 TARGET is an optional place for us to store the results. */
5914
5915 static rtx
expand_builtin_atomic_compare_exchange(machine_mode mode,tree exp,rtx target)5916 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5917 rtx target)
5918 {
5919 rtx expect, desired, mem, oldval;
5920 rtx_code_label *label;
5921 enum memmodel success, failure;
5922 tree weak;
5923 bool is_weak;
5924 source_location loc
5925 = expansion_point_location_if_in_system_header (input_location);
5926
5927 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5928 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5929
5930 if (failure > success)
5931 {
5932 warning_at (loc, OPT_Winvalid_memory_model,
5933 "failure memory model cannot be stronger than success "
5934 "memory model for %<__atomic_compare_exchange%>");
5935 success = MEMMODEL_SEQ_CST;
5936 }
5937
5938 if (is_mm_release (failure) || is_mm_acq_rel (failure))
5939 {
5940 warning_at (loc, OPT_Winvalid_memory_model,
5941 "invalid failure memory model for "
5942 "%<__atomic_compare_exchange%>");
5943 failure = MEMMODEL_SEQ_CST;
5944 success = MEMMODEL_SEQ_CST;
5945 }
5946
5947
5948 if (!flag_inline_atomics)
5949 return NULL_RTX;
5950
5951 /* Expand the operands. */
5952 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5953
5954 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5955 expect = convert_memory_address (Pmode, expect);
5956 expect = gen_rtx_MEM (mode, expect);
5957 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5958
5959 weak = CALL_EXPR_ARG (exp, 3);
5960 is_weak = false;
5961 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5962 is_weak = true;
5963
5964 if (target == const0_rtx)
5965 target = NULL;
5966
5967 /* Lest the rtl backend create a race condition with an imporoper store
5968 to memory, always create a new pseudo for OLDVAL. */
5969 oldval = NULL;
5970
5971 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5972 is_weak, success, failure))
5973 return NULL_RTX;
5974
5975 /* Conditionally store back to EXPECT, lest we create a race condition
5976 with an improper store to memory. */
5977 /* ??? With a rearrangement of atomics at the gimple level, we can handle
5978 the normal case where EXPECT is totally private, i.e. a register. At
5979 which point the store can be unconditional. */
5980 label = gen_label_rtx ();
5981 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5982 GET_MODE (target), 1, label);
5983 emit_move_insn (expect, oldval);
5984 emit_label (label);
5985
5986 return target;
5987 }
5988
5989 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5990 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5991 call. The weak parameter must be dropped to match the expected parameter
5992 list and the expected argument changed from value to pointer to memory
5993 slot. */
5994
5995 static void
expand_ifn_atomic_compare_exchange_into_call(gcall * call,machine_mode mode)5996 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5997 {
5998 unsigned int z;
5999 vec<tree, va_gc> *vec;
6000
6001 vec_alloc (vec, 5);
6002 vec->quick_push (gimple_call_arg (call, 0));
6003 tree expected = gimple_call_arg (call, 1);
6004 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6005 TREE_TYPE (expected));
6006 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6007 if (expd != x)
6008 emit_move_insn (x, expd);
6009 tree v = make_tree (TREE_TYPE (expected), x);
6010 vec->quick_push (build1 (ADDR_EXPR,
6011 build_pointer_type (TREE_TYPE (expected)), v));
6012 vec->quick_push (gimple_call_arg (call, 2));
6013 /* Skip the boolean weak parameter. */
6014 for (z = 4; z < 6; z++)
6015 vec->quick_push (gimple_call_arg (call, z));
6016 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6017 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6018 gcc_assert (bytes_log2 < 5);
6019 built_in_function fncode
6020 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6021 + bytes_log2);
6022 tree fndecl = builtin_decl_explicit (fncode);
6023 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6024 fndecl);
6025 tree exp = build_call_vec (boolean_type_node, fn, vec);
6026 tree lhs = gimple_call_lhs (call);
6027 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6028 if (lhs)
6029 {
6030 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6031 if (GET_MODE (boolret) != mode)
6032 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6033 x = force_reg (mode, x);
6034 write_complex_part (target, boolret, true);
6035 write_complex_part (target, x, false);
6036 }
6037 }
6038
6039 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6040
6041 void
expand_ifn_atomic_compare_exchange(gcall * call)6042 expand_ifn_atomic_compare_exchange (gcall *call)
6043 {
6044 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6045 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6046 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6047 rtx expect, desired, mem, oldval, boolret;
6048 enum memmodel success, failure;
6049 tree lhs;
6050 bool is_weak;
6051 source_location loc
6052 = expansion_point_location_if_in_system_header (gimple_location (call));
6053
6054 success = get_memmodel (gimple_call_arg (call, 4));
6055 failure = get_memmodel (gimple_call_arg (call, 5));
6056
6057 if (failure > success)
6058 {
6059 warning_at (loc, OPT_Winvalid_memory_model,
6060 "failure memory model cannot be stronger than success "
6061 "memory model for %<__atomic_compare_exchange%>");
6062 success = MEMMODEL_SEQ_CST;
6063 }
6064
6065 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6066 {
6067 warning_at (loc, OPT_Winvalid_memory_model,
6068 "invalid failure memory model for "
6069 "%<__atomic_compare_exchange%>");
6070 failure = MEMMODEL_SEQ_CST;
6071 success = MEMMODEL_SEQ_CST;
6072 }
6073
6074 if (!flag_inline_atomics)
6075 {
6076 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6077 return;
6078 }
6079
6080 /* Expand the operands. */
6081 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6082
6083 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6084 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6085
6086 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6087
6088 boolret = NULL;
6089 oldval = NULL;
6090
6091 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6092 is_weak, success, failure))
6093 {
6094 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6095 return;
6096 }
6097
6098 lhs = gimple_call_lhs (call);
6099 if (lhs)
6100 {
6101 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6102 if (GET_MODE (boolret) != mode)
6103 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6104 write_complex_part (target, boolret, true);
6105 write_complex_part (target, oldval, false);
6106 }
6107 }
6108
6109 /* Expand the __atomic_load intrinsic:
6110 TYPE __atomic_load (TYPE *object, enum memmodel)
6111 EXP is the CALL_EXPR.
6112 TARGET is an optional place for us to store the results. */
6113
6114 static rtx
expand_builtin_atomic_load(machine_mode mode,tree exp,rtx target)6115 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6116 {
6117 rtx mem;
6118 enum memmodel model;
6119
6120 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6121 if (is_mm_release (model) || is_mm_acq_rel (model))
6122 {
6123 source_location loc
6124 = expansion_point_location_if_in_system_header (input_location);
6125 warning_at (loc, OPT_Winvalid_memory_model,
6126 "invalid memory model for %<__atomic_load%>");
6127 model = MEMMODEL_SEQ_CST;
6128 }
6129
6130 if (!flag_inline_atomics)
6131 return NULL_RTX;
6132
6133 /* Expand the operand. */
6134 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6135
6136 return expand_atomic_load (target, mem, model);
6137 }
6138
6139
6140 /* Expand the __atomic_store intrinsic:
6141 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6142 EXP is the CALL_EXPR.
6143 TARGET is an optional place for us to store the results. */
6144
6145 static rtx
expand_builtin_atomic_store(machine_mode mode,tree exp)6146 expand_builtin_atomic_store (machine_mode mode, tree exp)
6147 {
6148 rtx mem, val;
6149 enum memmodel model;
6150
6151 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6152 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6153 || is_mm_release (model)))
6154 {
6155 source_location loc
6156 = expansion_point_location_if_in_system_header (input_location);
6157 warning_at (loc, OPT_Winvalid_memory_model,
6158 "invalid memory model for %<__atomic_store%>");
6159 model = MEMMODEL_SEQ_CST;
6160 }
6161
6162 if (!flag_inline_atomics)
6163 return NULL_RTX;
6164
6165 /* Expand the operands. */
6166 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6167 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6168
6169 return expand_atomic_store (mem, val, model, false);
6170 }
6171
6172 /* Expand the __atomic_fetch_XXX intrinsic:
6173 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6174 EXP is the CALL_EXPR.
6175 TARGET is an optional place for us to store the results.
6176 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6177 FETCH_AFTER is true if returning the result of the operation.
6178 FETCH_AFTER is false if returning the value before the operation.
6179 IGNORE is true if the result is not used.
6180 EXT_CALL is the correct builtin for an external call if this cannot be
6181 resolved to an instruction sequence. */
6182
6183 static rtx
expand_builtin_atomic_fetch_op(machine_mode mode,tree exp,rtx target,enum rtx_code code,bool fetch_after,bool ignore,enum built_in_function ext_call)6184 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6185 enum rtx_code code, bool fetch_after,
6186 bool ignore, enum built_in_function ext_call)
6187 {
6188 rtx val, mem, ret;
6189 enum memmodel model;
6190 tree fndecl;
6191 tree addr;
6192
6193 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6194
6195 /* Expand the operands. */
6196 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6197 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6198
6199 /* Only try generating instructions if inlining is turned on. */
6200 if (flag_inline_atomics)
6201 {
6202 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6203 if (ret)
6204 return ret;
6205 }
6206
6207 /* Return if a different routine isn't needed for the library call. */
6208 if (ext_call == BUILT_IN_NONE)
6209 return NULL_RTX;
6210
6211 /* Change the call to the specified function. */
6212 fndecl = get_callee_fndecl (exp);
6213 addr = CALL_EXPR_FN (exp);
6214 STRIP_NOPS (addr);
6215
6216 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6217 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6218
6219 /* If we will emit code after the call, the call can not be a tail call.
6220 If it is emitted as a tail call, a barrier is emitted after it, and
6221 then all trailing code is removed. */
6222 if (!ignore)
6223 CALL_EXPR_TAILCALL (exp) = 0;
6224
6225 /* Expand the call here so we can emit trailing code. */
6226 ret = expand_call (exp, target, ignore);
6227
6228 /* Replace the original function just in case it matters. */
6229 TREE_OPERAND (addr, 0) = fndecl;
6230
6231 /* Then issue the arithmetic correction to return the right result. */
6232 if (!ignore)
6233 {
6234 if (code == NOT)
6235 {
6236 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6237 OPTAB_LIB_WIDEN);
6238 ret = expand_simple_unop (mode, NOT, ret, target, true);
6239 }
6240 else
6241 ret = expand_simple_binop (mode, code, ret, val, target, true,
6242 OPTAB_LIB_WIDEN);
6243 }
6244 return ret;
6245 }
6246
6247 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
6248
6249 void
expand_ifn_atomic_bit_test_and(gcall * call)6250 expand_ifn_atomic_bit_test_and (gcall *call)
6251 {
6252 tree ptr = gimple_call_arg (call, 0);
6253 tree bit = gimple_call_arg (call, 1);
6254 tree flag = gimple_call_arg (call, 2);
6255 tree lhs = gimple_call_lhs (call);
6256 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6257 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6258 enum rtx_code code;
6259 optab optab;
6260 struct expand_operand ops[5];
6261
6262 gcc_assert (flag_inline_atomics);
6263
6264 if (gimple_call_num_args (call) == 4)
6265 model = get_memmodel (gimple_call_arg (call, 3));
6266
6267 rtx mem = get_builtin_sync_mem (ptr, mode);
6268 rtx val = expand_expr_force_mode (bit, mode);
6269
6270 switch (gimple_call_internal_fn (call))
6271 {
6272 case IFN_ATOMIC_BIT_TEST_AND_SET:
6273 code = IOR;
6274 optab = atomic_bit_test_and_set_optab;
6275 break;
6276 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6277 code = XOR;
6278 optab = atomic_bit_test_and_complement_optab;
6279 break;
6280 case IFN_ATOMIC_BIT_TEST_AND_RESET:
6281 code = AND;
6282 optab = atomic_bit_test_and_reset_optab;
6283 break;
6284 default:
6285 gcc_unreachable ();
6286 }
6287
6288 if (lhs == NULL_TREE)
6289 {
6290 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6291 val, NULL_RTX, true, OPTAB_DIRECT);
6292 if (code == AND)
6293 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6294 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6295 return;
6296 }
6297
6298 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6299 enum insn_code icode = direct_optab_handler (optab, mode);
6300 gcc_assert (icode != CODE_FOR_nothing);
6301 create_output_operand (&ops[0], target, mode);
6302 create_fixed_operand (&ops[1], mem);
6303 create_convert_operand_to (&ops[2], val, mode, true);
6304 create_integer_operand (&ops[3], model);
6305 create_integer_operand (&ops[4], integer_onep (flag));
6306 if (maybe_expand_insn (icode, 5, ops))
6307 return;
6308
6309 rtx bitval = val;
6310 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6311 val, NULL_RTX, true, OPTAB_DIRECT);
6312 rtx maskval = val;
6313 if (code == AND)
6314 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6315 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6316 code, model, false);
6317 if (integer_onep (flag))
6318 {
6319 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6320 NULL_RTX, true, OPTAB_DIRECT);
6321 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6322 true, OPTAB_DIRECT);
6323 }
6324 else
6325 result = expand_simple_binop (mode, AND, result, maskval, target, true,
6326 OPTAB_DIRECT);
6327 if (result != target)
6328 emit_move_insn (target, result);
6329 }
6330
6331 /* Expand an atomic clear operation.
6332 void _atomic_clear (BOOL *obj, enum memmodel)
6333 EXP is the call expression. */
6334
6335 static rtx
expand_builtin_atomic_clear(tree exp)6336 expand_builtin_atomic_clear (tree exp)
6337 {
6338 machine_mode mode;
6339 rtx mem, ret;
6340 enum memmodel model;
6341
6342 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6343 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6344 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6345
6346 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6347 {
6348 source_location loc
6349 = expansion_point_location_if_in_system_header (input_location);
6350 warning_at (loc, OPT_Winvalid_memory_model,
6351 "invalid memory model for %<__atomic_store%>");
6352 model = MEMMODEL_SEQ_CST;
6353 }
6354
6355 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6356 Failing that, a store is issued by __atomic_store. The only way this can
6357 fail is if the bool type is larger than a word size. Unlikely, but
6358 handle it anyway for completeness. Assume a single threaded model since
6359 there is no atomic support in this case, and no barriers are required. */
6360 ret = expand_atomic_store (mem, const0_rtx, model, true);
6361 if (!ret)
6362 emit_move_insn (mem, const0_rtx);
6363 return const0_rtx;
6364 }
6365
6366 /* Expand an atomic test_and_set operation.
6367 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6368 EXP is the call expression. */
6369
6370 static rtx
expand_builtin_atomic_test_and_set(tree exp,rtx target)6371 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6372 {
6373 rtx mem;
6374 enum memmodel model;
6375 machine_mode mode;
6376
6377 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6378 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6379 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6380
6381 return expand_atomic_test_and_set (target, mem, model);
6382 }
6383
6384
6385 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6386 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
6387
6388 static tree
fold_builtin_atomic_always_lock_free(tree arg0,tree arg1)6389 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6390 {
6391 int size;
6392 machine_mode mode;
6393 unsigned int mode_align, type_align;
6394
6395 if (TREE_CODE (arg0) != INTEGER_CST)
6396 return NULL_TREE;
6397
6398 /* We need a corresponding integer mode for the access to be lock-free. */
6399 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6400 if (!int_mode_for_size (size, 0).exists (&mode))
6401 return boolean_false_node;
6402
6403 mode_align = GET_MODE_ALIGNMENT (mode);
6404
6405 if (TREE_CODE (arg1) == INTEGER_CST)
6406 {
6407 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6408
6409 /* Either this argument is null, or it's a fake pointer encoding
6410 the alignment of the object. */
6411 val = least_bit_hwi (val);
6412 val *= BITS_PER_UNIT;
6413
6414 if (val == 0 || mode_align < val)
6415 type_align = mode_align;
6416 else
6417 type_align = val;
6418 }
6419 else
6420 {
6421 tree ttype = TREE_TYPE (arg1);
6422
6423 /* This function is usually invoked and folded immediately by the front
6424 end before anything else has a chance to look at it. The pointer
6425 parameter at this point is usually cast to a void *, so check for that
6426 and look past the cast. */
6427 if (CONVERT_EXPR_P (arg1)
6428 && POINTER_TYPE_P (ttype)
6429 && VOID_TYPE_P (TREE_TYPE (ttype))
6430 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6431 arg1 = TREE_OPERAND (arg1, 0);
6432
6433 ttype = TREE_TYPE (arg1);
6434 gcc_assert (POINTER_TYPE_P (ttype));
6435
6436 /* Get the underlying type of the object. */
6437 ttype = TREE_TYPE (ttype);
6438 type_align = TYPE_ALIGN (ttype);
6439 }
6440
6441 /* If the object has smaller alignment, the lock free routines cannot
6442 be used. */
6443 if (type_align < mode_align)
6444 return boolean_false_node;
6445
6446 /* Check if a compare_and_swap pattern exists for the mode which represents
6447 the required size. The pattern is not allowed to fail, so the existence
6448 of the pattern indicates support is present. Also require that an
6449 atomic load exists for the required size. */
6450 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6451 return boolean_true_node;
6452 else
6453 return boolean_false_node;
6454 }
6455
6456 /* Return true if the parameters to call EXP represent an object which will
6457 always generate lock free instructions. The first argument represents the
6458 size of the object, and the second parameter is a pointer to the object
6459 itself. If NULL is passed for the object, then the result is based on
6460 typical alignment for an object of the specified size. Otherwise return
6461 false. */
6462
6463 static rtx
expand_builtin_atomic_always_lock_free(tree exp)6464 expand_builtin_atomic_always_lock_free (tree exp)
6465 {
6466 tree size;
6467 tree arg0 = CALL_EXPR_ARG (exp, 0);
6468 tree arg1 = CALL_EXPR_ARG (exp, 1);
6469
6470 if (TREE_CODE (arg0) != INTEGER_CST)
6471 {
6472 error ("non-constant argument 1 to __atomic_always_lock_free");
6473 return const0_rtx;
6474 }
6475
6476 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6477 if (size == boolean_true_node)
6478 return const1_rtx;
6479 return const0_rtx;
6480 }
6481
6482 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6483 is lock free on this architecture. */
6484
6485 static tree
fold_builtin_atomic_is_lock_free(tree arg0,tree arg1)6486 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6487 {
6488 if (!flag_inline_atomics)
6489 return NULL_TREE;
6490
6491 /* If it isn't always lock free, don't generate a result. */
6492 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6493 return boolean_true_node;
6494
6495 return NULL_TREE;
6496 }
6497
6498 /* Return true if the parameters to call EXP represent an object which will
6499 always generate lock free instructions. The first argument represents the
6500 size of the object, and the second parameter is a pointer to the object
6501 itself. If NULL is passed for the object, then the result is based on
6502 typical alignment for an object of the specified size. Otherwise return
6503 NULL*/
6504
6505 static rtx
expand_builtin_atomic_is_lock_free(tree exp)6506 expand_builtin_atomic_is_lock_free (tree exp)
6507 {
6508 tree size;
6509 tree arg0 = CALL_EXPR_ARG (exp, 0);
6510 tree arg1 = CALL_EXPR_ARG (exp, 1);
6511
6512 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6513 {
6514 error ("non-integer argument 1 to __atomic_is_lock_free");
6515 return NULL_RTX;
6516 }
6517
6518 if (!flag_inline_atomics)
6519 return NULL_RTX;
6520
6521 /* If the value is known at compile time, return the RTX for it. */
6522 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6523 if (size == boolean_true_node)
6524 return const1_rtx;
6525
6526 return NULL_RTX;
6527 }
6528
6529 /* Expand the __atomic_thread_fence intrinsic:
6530 void __atomic_thread_fence (enum memmodel)
6531 EXP is the CALL_EXPR. */
6532
6533 static void
expand_builtin_atomic_thread_fence(tree exp)6534 expand_builtin_atomic_thread_fence (tree exp)
6535 {
6536 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6537 expand_mem_thread_fence (model);
6538 }
6539
6540 /* Expand the __atomic_signal_fence intrinsic:
6541 void __atomic_signal_fence (enum memmodel)
6542 EXP is the CALL_EXPR. */
6543
6544 static void
expand_builtin_atomic_signal_fence(tree exp)6545 expand_builtin_atomic_signal_fence (tree exp)
6546 {
6547 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6548 expand_mem_signal_fence (model);
6549 }
6550
6551 /* Expand the __sync_synchronize intrinsic. */
6552
6553 static void
expand_builtin_sync_synchronize(void)6554 expand_builtin_sync_synchronize (void)
6555 {
6556 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6557 }
6558
6559 static rtx
expand_builtin_thread_pointer(tree exp,rtx target)6560 expand_builtin_thread_pointer (tree exp, rtx target)
6561 {
6562 enum insn_code icode;
6563 if (!validate_arglist (exp, VOID_TYPE))
6564 return const0_rtx;
6565 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6566 if (icode != CODE_FOR_nothing)
6567 {
6568 struct expand_operand op;
6569 /* If the target is not sutitable then create a new target. */
6570 if (target == NULL_RTX
6571 || !REG_P (target)
6572 || GET_MODE (target) != Pmode)
6573 target = gen_reg_rtx (Pmode);
6574 create_output_operand (&op, target, Pmode);
6575 expand_insn (icode, 1, &op);
6576 return target;
6577 }
6578 error ("__builtin_thread_pointer is not supported on this target");
6579 return const0_rtx;
6580 }
6581
6582 static void
expand_builtin_set_thread_pointer(tree exp)6583 expand_builtin_set_thread_pointer (tree exp)
6584 {
6585 enum insn_code icode;
6586 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6587 return;
6588 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6589 if (icode != CODE_FOR_nothing)
6590 {
6591 struct expand_operand op;
6592 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6593 Pmode, EXPAND_NORMAL);
6594 create_input_operand (&op, val, Pmode);
6595 expand_insn (icode, 1, &op);
6596 return;
6597 }
6598 error ("__builtin_set_thread_pointer is not supported on this target");
6599 }
6600
6601
6602 /* Emit code to restore the current value of stack. */
6603
6604 static void
expand_stack_restore(tree var)6605 expand_stack_restore (tree var)
6606 {
6607 rtx_insn *prev;
6608 rtx sa = expand_normal (var);
6609
6610 sa = convert_memory_address (Pmode, sa);
6611
6612 prev = get_last_insn ();
6613 emit_stack_restore (SAVE_BLOCK, sa);
6614
6615 record_new_stack_level ();
6616
6617 fixup_args_size_notes (prev, get_last_insn (), 0);
6618 }
6619
6620 /* Emit code to save the current value of stack. */
6621
6622 static rtx
expand_stack_save(void)6623 expand_stack_save (void)
6624 {
6625 rtx ret = NULL_RTX;
6626
6627 emit_stack_save (SAVE_BLOCK, &ret);
6628 return ret;
6629 }
6630
6631
6632 /* Expand an expression EXP that calls a built-in function,
6633 with result going to TARGET if that's convenient
6634 (and in mode MODE if that's convenient).
6635 SUBTARGET may be used as the target for computing one of EXP's operands.
6636 IGNORE is nonzero if the value is to be ignored. */
6637
6638 rtx
expand_builtin(tree exp,rtx target,rtx subtarget,machine_mode mode,int ignore)6639 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6640 int ignore)
6641 {
6642 tree fndecl = get_callee_fndecl (exp);
6643 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6644 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6645 int flags;
6646
6647 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6648 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6649
6650 /* When ASan is enabled, we don't want to expand some memory/string
6651 builtins and rely on libsanitizer's hooks. This allows us to avoid
6652 redundant checks and be sure, that possible overflow will be detected
6653 by ASan. */
6654
6655 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6656 return expand_call (exp, target, ignore);
6657
6658 /* When not optimizing, generate calls to library functions for a certain
6659 set of builtins. */
6660 if (!optimize
6661 && !called_as_built_in (fndecl)
6662 && fcode != BUILT_IN_FORK
6663 && fcode != BUILT_IN_EXECL
6664 && fcode != BUILT_IN_EXECV
6665 && fcode != BUILT_IN_EXECLP
6666 && fcode != BUILT_IN_EXECLE
6667 && fcode != BUILT_IN_EXECVP
6668 && fcode != BUILT_IN_EXECVE
6669 && !ALLOCA_FUNCTION_CODE_P (fcode)
6670 && fcode != BUILT_IN_FREE
6671 && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
6672 && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
6673 && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
6674 && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
6675 && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6676 && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
6677 && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
6678 && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
6679 && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
6680 && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
6681 && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
6682 && fcode != BUILT_IN_CHKP_BNDRET)
6683 return expand_call (exp, target, ignore);
6684
6685 /* The built-in function expanders test for target == const0_rtx
6686 to determine whether the function's result will be ignored. */
6687 if (ignore)
6688 target = const0_rtx;
6689
6690 /* If the result of a pure or const built-in function is ignored, and
6691 none of its arguments are volatile, we can avoid expanding the
6692 built-in call and just evaluate the arguments for side-effects. */
6693 if (target == const0_rtx
6694 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6695 && !(flags & ECF_LOOPING_CONST_OR_PURE))
6696 {
6697 bool volatilep = false;
6698 tree arg;
6699 call_expr_arg_iterator iter;
6700
6701 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6702 if (TREE_THIS_VOLATILE (arg))
6703 {
6704 volatilep = true;
6705 break;
6706 }
6707
6708 if (! volatilep)
6709 {
6710 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6711 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6712 return const0_rtx;
6713 }
6714 }
6715
6716 /* expand_builtin_with_bounds is supposed to be used for
6717 instrumented builtin calls. */
6718 gcc_assert (!CALL_WITH_BOUNDS_P (exp));
6719
6720 switch (fcode)
6721 {
6722 CASE_FLT_FN (BUILT_IN_FABS):
6723 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6724 case BUILT_IN_FABSD32:
6725 case BUILT_IN_FABSD64:
6726 case BUILT_IN_FABSD128:
6727 target = expand_builtin_fabs (exp, target, subtarget);
6728 if (target)
6729 return target;
6730 break;
6731
6732 CASE_FLT_FN (BUILT_IN_COPYSIGN):
6733 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6734 target = expand_builtin_copysign (exp, target, subtarget);
6735 if (target)
6736 return target;
6737 break;
6738
6739 /* Just do a normal library call if we were unable to fold
6740 the values. */
6741 CASE_FLT_FN (BUILT_IN_CABS):
6742 break;
6743
6744 CASE_FLT_FN (BUILT_IN_FMA):
6745 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
6746 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6747 if (target)
6748 return target;
6749 break;
6750
6751 CASE_FLT_FN (BUILT_IN_ILOGB):
6752 if (! flag_unsafe_math_optimizations)
6753 break;
6754 gcc_fallthrough ();
6755 CASE_FLT_FN (BUILT_IN_ISINF):
6756 CASE_FLT_FN (BUILT_IN_FINITE):
6757 case BUILT_IN_ISFINITE:
6758 case BUILT_IN_ISNORMAL:
6759 target = expand_builtin_interclass_mathfn (exp, target);
6760 if (target)
6761 return target;
6762 break;
6763
6764 CASE_FLT_FN (BUILT_IN_ICEIL):
6765 CASE_FLT_FN (BUILT_IN_LCEIL):
6766 CASE_FLT_FN (BUILT_IN_LLCEIL):
6767 CASE_FLT_FN (BUILT_IN_LFLOOR):
6768 CASE_FLT_FN (BUILT_IN_IFLOOR):
6769 CASE_FLT_FN (BUILT_IN_LLFLOOR):
6770 target = expand_builtin_int_roundingfn (exp, target);
6771 if (target)
6772 return target;
6773 break;
6774
6775 CASE_FLT_FN (BUILT_IN_IRINT):
6776 CASE_FLT_FN (BUILT_IN_LRINT):
6777 CASE_FLT_FN (BUILT_IN_LLRINT):
6778 CASE_FLT_FN (BUILT_IN_IROUND):
6779 CASE_FLT_FN (BUILT_IN_LROUND):
6780 CASE_FLT_FN (BUILT_IN_LLROUND):
6781 target = expand_builtin_int_roundingfn_2 (exp, target);
6782 if (target)
6783 return target;
6784 break;
6785
6786 CASE_FLT_FN (BUILT_IN_POWI):
6787 target = expand_builtin_powi (exp, target);
6788 if (target)
6789 return target;
6790 break;
6791
6792 CASE_FLT_FN (BUILT_IN_CEXPI):
6793 target = expand_builtin_cexpi (exp, target);
6794 gcc_assert (target);
6795 return target;
6796
6797 CASE_FLT_FN (BUILT_IN_SIN):
6798 CASE_FLT_FN (BUILT_IN_COS):
6799 if (! flag_unsafe_math_optimizations)
6800 break;
6801 target = expand_builtin_mathfn_3 (exp, target, subtarget);
6802 if (target)
6803 return target;
6804 break;
6805
6806 CASE_FLT_FN (BUILT_IN_SINCOS):
6807 if (! flag_unsafe_math_optimizations)
6808 break;
6809 target = expand_builtin_sincos (exp);
6810 if (target)
6811 return target;
6812 break;
6813
6814 case BUILT_IN_APPLY_ARGS:
6815 return expand_builtin_apply_args ();
6816
6817 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6818 FUNCTION with a copy of the parameters described by
6819 ARGUMENTS, and ARGSIZE. It returns a block of memory
6820 allocated on the stack into which is stored all the registers
6821 that might possibly be used for returning the result of a
6822 function. ARGUMENTS is the value returned by
6823 __builtin_apply_args. ARGSIZE is the number of bytes of
6824 arguments that must be copied. ??? How should this value be
6825 computed? We'll also need a safe worst case value for varargs
6826 functions. */
6827 case BUILT_IN_APPLY:
6828 if (!validate_arglist (exp, POINTER_TYPE,
6829 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6830 && !validate_arglist (exp, REFERENCE_TYPE,
6831 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6832 return const0_rtx;
6833 else
6834 {
6835 rtx ops[3];
6836
6837 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
6838 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
6839 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
6840
6841 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6842 }
6843
6844 /* __builtin_return (RESULT) causes the function to return the
6845 value described by RESULT. RESULT is address of the block of
6846 memory returned by __builtin_apply. */
6847 case BUILT_IN_RETURN:
6848 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6849 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
6850 return const0_rtx;
6851
6852 case BUILT_IN_SAVEREGS:
6853 return expand_builtin_saveregs ();
6854
6855 case BUILT_IN_VA_ARG_PACK:
6856 /* All valid uses of __builtin_va_arg_pack () are removed during
6857 inlining. */
6858 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
6859 return const0_rtx;
6860
6861 case BUILT_IN_VA_ARG_PACK_LEN:
6862 /* All valid uses of __builtin_va_arg_pack_len () are removed during
6863 inlining. */
6864 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
6865 return const0_rtx;
6866
6867 /* Return the address of the first anonymous stack arg. */
6868 case BUILT_IN_NEXT_ARG:
6869 if (fold_builtin_next_arg (exp, false))
6870 return const0_rtx;
6871 return expand_builtin_next_arg ();
6872
6873 case BUILT_IN_CLEAR_CACHE:
6874 target = expand_builtin___clear_cache (exp);
6875 if (target)
6876 return target;
6877 break;
6878
6879 case BUILT_IN_CLASSIFY_TYPE:
6880 return expand_builtin_classify_type (exp);
6881
6882 case BUILT_IN_CONSTANT_P:
6883 return const0_rtx;
6884
6885 case BUILT_IN_FRAME_ADDRESS:
6886 case BUILT_IN_RETURN_ADDRESS:
6887 return expand_builtin_frame_address (fndecl, exp);
6888
6889 /* Returns the address of the area where the structure is returned.
6890 0 otherwise. */
6891 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
6892 if (call_expr_nargs (exp) != 0
6893 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
6894 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
6895 return const0_rtx;
6896 else
6897 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
6898
6899 CASE_BUILT_IN_ALLOCA:
6900 target = expand_builtin_alloca (exp);
6901 if (target)
6902 return target;
6903 break;
6904
6905 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
6906 return expand_asan_emit_allocas_unpoison (exp);
6907
6908 case BUILT_IN_STACK_SAVE:
6909 return expand_stack_save ();
6910
6911 case BUILT_IN_STACK_RESTORE:
6912 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
6913 return const0_rtx;
6914
6915 case BUILT_IN_BSWAP16:
6916 case BUILT_IN_BSWAP32:
6917 case BUILT_IN_BSWAP64:
6918 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
6919 if (target)
6920 return target;
6921 break;
6922
6923 CASE_INT_FN (BUILT_IN_FFS):
6924 target = expand_builtin_unop (target_mode, exp, target,
6925 subtarget, ffs_optab);
6926 if (target)
6927 return target;
6928 break;
6929
6930 CASE_INT_FN (BUILT_IN_CLZ):
6931 target = expand_builtin_unop (target_mode, exp, target,
6932 subtarget, clz_optab);
6933 if (target)
6934 return target;
6935 break;
6936
6937 CASE_INT_FN (BUILT_IN_CTZ):
6938 target = expand_builtin_unop (target_mode, exp, target,
6939 subtarget, ctz_optab);
6940 if (target)
6941 return target;
6942 break;
6943
6944 CASE_INT_FN (BUILT_IN_CLRSB):
6945 target = expand_builtin_unop (target_mode, exp, target,
6946 subtarget, clrsb_optab);
6947 if (target)
6948 return target;
6949 break;
6950
6951 CASE_INT_FN (BUILT_IN_POPCOUNT):
6952 target = expand_builtin_unop (target_mode, exp, target,
6953 subtarget, popcount_optab);
6954 if (target)
6955 return target;
6956 break;
6957
6958 CASE_INT_FN (BUILT_IN_PARITY):
6959 target = expand_builtin_unop (target_mode, exp, target,
6960 subtarget, parity_optab);
6961 if (target)
6962 return target;
6963 break;
6964
6965 case BUILT_IN_STRLEN:
6966 target = expand_builtin_strlen (exp, target, target_mode);
6967 if (target)
6968 return target;
6969 break;
6970
6971 case BUILT_IN_STRCAT:
6972 target = expand_builtin_strcat (exp, target);
6973 if (target)
6974 return target;
6975 break;
6976
6977 case BUILT_IN_STRCPY:
6978 target = expand_builtin_strcpy (exp, target);
6979 if (target)
6980 return target;
6981 break;
6982
6983 case BUILT_IN_STRNCAT:
6984 target = expand_builtin_strncat (exp, target);
6985 if (target)
6986 return target;
6987 break;
6988
6989 case BUILT_IN_STRNCPY:
6990 target = expand_builtin_strncpy (exp, target);
6991 if (target)
6992 return target;
6993 break;
6994
6995 case BUILT_IN_STPCPY:
6996 target = expand_builtin_stpcpy (exp, target, mode);
6997 if (target)
6998 return target;
6999 break;
7000
7001 case BUILT_IN_STPNCPY:
7002 target = expand_builtin_stpncpy (exp, target);
7003 if (target)
7004 return target;
7005 break;
7006
7007 case BUILT_IN_MEMCHR:
7008 target = expand_builtin_memchr (exp, target);
7009 if (target)
7010 return target;
7011 break;
7012
7013 case BUILT_IN_MEMCPY:
7014 target = expand_builtin_memcpy (exp, target);
7015 if (target)
7016 return target;
7017 break;
7018
7019 case BUILT_IN_MEMMOVE:
7020 target = expand_builtin_memmove (exp, target);
7021 if (target)
7022 return target;
7023 break;
7024
7025 case BUILT_IN_MEMPCPY:
7026 target = expand_builtin_mempcpy (exp, target);
7027 if (target)
7028 return target;
7029 break;
7030
7031 case BUILT_IN_MEMSET:
7032 target = expand_builtin_memset (exp, target, mode);
7033 if (target)
7034 return target;
7035 break;
7036
7037 case BUILT_IN_BZERO:
7038 target = expand_builtin_bzero (exp);
7039 if (target)
7040 return target;
7041 break;
7042
7043 case BUILT_IN_STRCMP:
7044 target = expand_builtin_strcmp (exp, target);
7045 if (target)
7046 return target;
7047 break;
7048
7049 case BUILT_IN_STRNCMP:
7050 target = expand_builtin_strncmp (exp, target, mode);
7051 if (target)
7052 return target;
7053 break;
7054
7055 case BUILT_IN_BCMP:
7056 case BUILT_IN_MEMCMP:
7057 case BUILT_IN_MEMCMP_EQ:
7058 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7059 if (target)
7060 return target;
7061 if (fcode == BUILT_IN_MEMCMP_EQ)
7062 {
7063 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7064 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7065 }
7066 break;
7067
7068 case BUILT_IN_SETJMP:
7069 /* This should have been lowered to the builtins below. */
7070 gcc_unreachable ();
7071
7072 case BUILT_IN_SETJMP_SETUP:
7073 /* __builtin_setjmp_setup is passed a pointer to an array of five words
7074 and the receiver label. */
7075 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7076 {
7077 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7078 VOIDmode, EXPAND_NORMAL);
7079 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7080 rtx_insn *label_r = label_rtx (label);
7081
7082 /* This is copied from the handling of non-local gotos. */
7083 expand_builtin_setjmp_setup (buf_addr, label_r);
7084 nonlocal_goto_handler_labels
7085 = gen_rtx_INSN_LIST (VOIDmode, label_r,
7086 nonlocal_goto_handler_labels);
7087 /* ??? Do not let expand_label treat us as such since we would
7088 not want to be both on the list of non-local labels and on
7089 the list of forced labels. */
7090 FORCED_LABEL (label) = 0;
7091 return const0_rtx;
7092 }
7093 break;
7094
7095 case BUILT_IN_SETJMP_RECEIVER:
7096 /* __builtin_setjmp_receiver is passed the receiver label. */
7097 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7098 {
7099 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7100 rtx_insn *label_r = label_rtx (label);
7101
7102 expand_builtin_setjmp_receiver (label_r);
7103 return const0_rtx;
7104 }
7105 break;
7106
7107 /* __builtin_longjmp is passed a pointer to an array of five words.
7108 It's similar to the C library longjmp function but works with
7109 __builtin_setjmp above. */
7110 case BUILT_IN_LONGJMP:
7111 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7112 {
7113 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7114 VOIDmode, EXPAND_NORMAL);
7115 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7116
7117 if (value != const1_rtx)
7118 {
7119 error ("%<__builtin_longjmp%> second argument must be 1");
7120 return const0_rtx;
7121 }
7122
7123 expand_builtin_longjmp (buf_addr, value);
7124 return const0_rtx;
7125 }
7126 break;
7127
7128 case BUILT_IN_NONLOCAL_GOTO:
7129 target = expand_builtin_nonlocal_goto (exp);
7130 if (target)
7131 return target;
7132 break;
7133
7134 /* This updates the setjmp buffer that is its argument with the value
7135 of the current stack pointer. */
7136 case BUILT_IN_UPDATE_SETJMP_BUF:
7137 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7138 {
7139 rtx buf_addr
7140 = expand_normal (CALL_EXPR_ARG (exp, 0));
7141
7142 expand_builtin_update_setjmp_buf (buf_addr);
7143 return const0_rtx;
7144 }
7145 break;
7146
7147 case BUILT_IN_TRAP:
7148 expand_builtin_trap ();
7149 return const0_rtx;
7150
7151 case BUILT_IN_UNREACHABLE:
7152 expand_builtin_unreachable ();
7153 return const0_rtx;
7154
7155 CASE_FLT_FN (BUILT_IN_SIGNBIT):
7156 case BUILT_IN_SIGNBITD32:
7157 case BUILT_IN_SIGNBITD64:
7158 case BUILT_IN_SIGNBITD128:
7159 target = expand_builtin_signbit (exp, target);
7160 if (target)
7161 return target;
7162 break;
7163
7164 /* Various hooks for the DWARF 2 __throw routine. */
7165 case BUILT_IN_UNWIND_INIT:
7166 expand_builtin_unwind_init ();
7167 return const0_rtx;
7168 case BUILT_IN_DWARF_CFA:
7169 return virtual_cfa_rtx;
7170 #ifdef DWARF2_UNWIND_INFO
7171 case BUILT_IN_DWARF_SP_COLUMN:
7172 return expand_builtin_dwarf_sp_column ();
7173 case BUILT_IN_INIT_DWARF_REG_SIZES:
7174 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7175 return const0_rtx;
7176 #endif
7177 case BUILT_IN_FROB_RETURN_ADDR:
7178 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7179 case BUILT_IN_EXTRACT_RETURN_ADDR:
7180 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7181 case BUILT_IN_EH_RETURN:
7182 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7183 CALL_EXPR_ARG (exp, 1));
7184 return const0_rtx;
7185 case BUILT_IN_EH_RETURN_DATA_REGNO:
7186 return expand_builtin_eh_return_data_regno (exp);
7187 case BUILT_IN_EXTEND_POINTER:
7188 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7189 case BUILT_IN_EH_POINTER:
7190 return expand_builtin_eh_pointer (exp);
7191 case BUILT_IN_EH_FILTER:
7192 return expand_builtin_eh_filter (exp);
7193 case BUILT_IN_EH_COPY_VALUES:
7194 return expand_builtin_eh_copy_values (exp);
7195
7196 case BUILT_IN_VA_START:
7197 return expand_builtin_va_start (exp);
7198 case BUILT_IN_VA_END:
7199 return expand_builtin_va_end (exp);
7200 case BUILT_IN_VA_COPY:
7201 return expand_builtin_va_copy (exp);
7202 case BUILT_IN_EXPECT:
7203 return expand_builtin_expect (exp, target);
7204 case BUILT_IN_ASSUME_ALIGNED:
7205 return expand_builtin_assume_aligned (exp, target);
7206 case BUILT_IN_PREFETCH:
7207 expand_builtin_prefetch (exp);
7208 return const0_rtx;
7209
7210 case BUILT_IN_INIT_TRAMPOLINE:
7211 return expand_builtin_init_trampoline (exp, true);
7212 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7213 return expand_builtin_init_trampoline (exp, false);
7214 case BUILT_IN_ADJUST_TRAMPOLINE:
7215 return expand_builtin_adjust_trampoline (exp);
7216
7217 case BUILT_IN_INIT_DESCRIPTOR:
7218 return expand_builtin_init_descriptor (exp);
7219 case BUILT_IN_ADJUST_DESCRIPTOR:
7220 return expand_builtin_adjust_descriptor (exp);
7221
7222 case BUILT_IN_FORK:
7223 case BUILT_IN_EXECL:
7224 case BUILT_IN_EXECV:
7225 case BUILT_IN_EXECLP:
7226 case BUILT_IN_EXECLE:
7227 case BUILT_IN_EXECVP:
7228 case BUILT_IN_EXECVE:
7229 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7230 if (target)
7231 return target;
7232 break;
7233
7234 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7235 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7236 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7237 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7238 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7239 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7240 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7241 if (target)
7242 return target;
7243 break;
7244
7245 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7246 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7247 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7248 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7249 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7250 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7251 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7252 if (target)
7253 return target;
7254 break;
7255
7256 case BUILT_IN_SYNC_FETCH_AND_OR_1:
7257 case BUILT_IN_SYNC_FETCH_AND_OR_2:
7258 case BUILT_IN_SYNC_FETCH_AND_OR_4:
7259 case BUILT_IN_SYNC_FETCH_AND_OR_8:
7260 case BUILT_IN_SYNC_FETCH_AND_OR_16:
7261 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7262 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7263 if (target)
7264 return target;
7265 break;
7266
7267 case BUILT_IN_SYNC_FETCH_AND_AND_1:
7268 case BUILT_IN_SYNC_FETCH_AND_AND_2:
7269 case BUILT_IN_SYNC_FETCH_AND_AND_4:
7270 case BUILT_IN_SYNC_FETCH_AND_AND_8:
7271 case BUILT_IN_SYNC_FETCH_AND_AND_16:
7272 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7273 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7274 if (target)
7275 return target;
7276 break;
7277
7278 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7279 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7280 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7281 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7282 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7283 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7284 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7285 if (target)
7286 return target;
7287 break;
7288
7289 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7290 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7291 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7292 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7293 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7294 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7295 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7296 if (target)
7297 return target;
7298 break;
7299
7300 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7301 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7302 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7303 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7304 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7305 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7306 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7307 if (target)
7308 return target;
7309 break;
7310
7311 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7312 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7313 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7314 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7315 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7316 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7317 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7318 if (target)
7319 return target;
7320 break;
7321
7322 case BUILT_IN_SYNC_OR_AND_FETCH_1:
7323 case BUILT_IN_SYNC_OR_AND_FETCH_2:
7324 case BUILT_IN_SYNC_OR_AND_FETCH_4:
7325 case BUILT_IN_SYNC_OR_AND_FETCH_8:
7326 case BUILT_IN_SYNC_OR_AND_FETCH_16:
7327 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7328 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7329 if (target)
7330 return target;
7331 break;
7332
7333 case BUILT_IN_SYNC_AND_AND_FETCH_1:
7334 case BUILT_IN_SYNC_AND_AND_FETCH_2:
7335 case BUILT_IN_SYNC_AND_AND_FETCH_4:
7336 case BUILT_IN_SYNC_AND_AND_FETCH_8:
7337 case BUILT_IN_SYNC_AND_AND_FETCH_16:
7338 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7339 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7340 if (target)
7341 return target;
7342 break;
7343
7344 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7345 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7346 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7347 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7348 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7349 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7350 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7351 if (target)
7352 return target;
7353 break;
7354
7355 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7356 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7357 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7358 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7359 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7360 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7361 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7362 if (target)
7363 return target;
7364 break;
7365
7366 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7367 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7368 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7369 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7370 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7371 if (mode == VOIDmode)
7372 mode = TYPE_MODE (boolean_type_node);
7373 if (!target || !register_operand (target, mode))
7374 target = gen_reg_rtx (mode);
7375
7376 mode = get_builtin_sync_mode
7377 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7378 target = expand_builtin_compare_and_swap (mode, exp, true, target);
7379 if (target)
7380 return target;
7381 break;
7382
7383 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7384 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7385 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7386 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7387 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7388 mode = get_builtin_sync_mode
7389 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7390 target = expand_builtin_compare_and_swap (mode, exp, false, target);
7391 if (target)
7392 return target;
7393 break;
7394
7395 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7396 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7397 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7398 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7399 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7400 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7401 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7402 if (target)
7403 return target;
7404 break;
7405
7406 case BUILT_IN_SYNC_LOCK_RELEASE_1:
7407 case BUILT_IN_SYNC_LOCK_RELEASE_2:
7408 case BUILT_IN_SYNC_LOCK_RELEASE_4:
7409 case BUILT_IN_SYNC_LOCK_RELEASE_8:
7410 case BUILT_IN_SYNC_LOCK_RELEASE_16:
7411 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7412 expand_builtin_sync_lock_release (mode, exp);
7413 return const0_rtx;
7414
7415 case BUILT_IN_SYNC_SYNCHRONIZE:
7416 expand_builtin_sync_synchronize ();
7417 return const0_rtx;
7418
7419 case BUILT_IN_ATOMIC_EXCHANGE_1:
7420 case BUILT_IN_ATOMIC_EXCHANGE_2:
7421 case BUILT_IN_ATOMIC_EXCHANGE_4:
7422 case BUILT_IN_ATOMIC_EXCHANGE_8:
7423 case BUILT_IN_ATOMIC_EXCHANGE_16:
7424 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7425 target = expand_builtin_atomic_exchange (mode, exp, target);
7426 if (target)
7427 return target;
7428 break;
7429
7430 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7431 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7432 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7433 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7434 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7435 {
7436 unsigned int nargs, z;
7437 vec<tree, va_gc> *vec;
7438
7439 mode =
7440 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7441 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7442 if (target)
7443 return target;
7444
7445 /* If this is turned into an external library call, the weak parameter
7446 must be dropped to match the expected parameter list. */
7447 nargs = call_expr_nargs (exp);
7448 vec_alloc (vec, nargs - 1);
7449 for (z = 0; z < 3; z++)
7450 vec->quick_push (CALL_EXPR_ARG (exp, z));
7451 /* Skip the boolean weak parameter. */
7452 for (z = 4; z < 6; z++)
7453 vec->quick_push (CALL_EXPR_ARG (exp, z));
7454 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7455 break;
7456 }
7457
7458 case BUILT_IN_ATOMIC_LOAD_1:
7459 case BUILT_IN_ATOMIC_LOAD_2:
7460 case BUILT_IN_ATOMIC_LOAD_4:
7461 case BUILT_IN_ATOMIC_LOAD_8:
7462 case BUILT_IN_ATOMIC_LOAD_16:
7463 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7464 target = expand_builtin_atomic_load (mode, exp, target);
7465 if (target)
7466 return target;
7467 break;
7468
7469 case BUILT_IN_ATOMIC_STORE_1:
7470 case BUILT_IN_ATOMIC_STORE_2:
7471 case BUILT_IN_ATOMIC_STORE_4:
7472 case BUILT_IN_ATOMIC_STORE_8:
7473 case BUILT_IN_ATOMIC_STORE_16:
7474 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7475 target = expand_builtin_atomic_store (mode, exp);
7476 if (target)
7477 return const0_rtx;
7478 break;
7479
7480 case BUILT_IN_ATOMIC_ADD_FETCH_1:
7481 case BUILT_IN_ATOMIC_ADD_FETCH_2:
7482 case BUILT_IN_ATOMIC_ADD_FETCH_4:
7483 case BUILT_IN_ATOMIC_ADD_FETCH_8:
7484 case BUILT_IN_ATOMIC_ADD_FETCH_16:
7485 {
7486 enum built_in_function lib;
7487 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7488 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7489 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7490 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7491 ignore, lib);
7492 if (target)
7493 return target;
7494 break;
7495 }
7496 case BUILT_IN_ATOMIC_SUB_FETCH_1:
7497 case BUILT_IN_ATOMIC_SUB_FETCH_2:
7498 case BUILT_IN_ATOMIC_SUB_FETCH_4:
7499 case BUILT_IN_ATOMIC_SUB_FETCH_8:
7500 case BUILT_IN_ATOMIC_SUB_FETCH_16:
7501 {
7502 enum built_in_function lib;
7503 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7504 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7505 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7506 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7507 ignore, lib);
7508 if (target)
7509 return target;
7510 break;
7511 }
7512 case BUILT_IN_ATOMIC_AND_FETCH_1:
7513 case BUILT_IN_ATOMIC_AND_FETCH_2:
7514 case BUILT_IN_ATOMIC_AND_FETCH_4:
7515 case BUILT_IN_ATOMIC_AND_FETCH_8:
7516 case BUILT_IN_ATOMIC_AND_FETCH_16:
7517 {
7518 enum built_in_function lib;
7519 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7520 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7521 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7522 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7523 ignore, lib);
7524 if (target)
7525 return target;
7526 break;
7527 }
7528 case BUILT_IN_ATOMIC_NAND_FETCH_1:
7529 case BUILT_IN_ATOMIC_NAND_FETCH_2:
7530 case BUILT_IN_ATOMIC_NAND_FETCH_4:
7531 case BUILT_IN_ATOMIC_NAND_FETCH_8:
7532 case BUILT_IN_ATOMIC_NAND_FETCH_16:
7533 {
7534 enum built_in_function lib;
7535 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7536 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7537 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7538 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7539 ignore, lib);
7540 if (target)
7541 return target;
7542 break;
7543 }
7544 case BUILT_IN_ATOMIC_XOR_FETCH_1:
7545 case BUILT_IN_ATOMIC_XOR_FETCH_2:
7546 case BUILT_IN_ATOMIC_XOR_FETCH_4:
7547 case BUILT_IN_ATOMIC_XOR_FETCH_8:
7548 case BUILT_IN_ATOMIC_XOR_FETCH_16:
7549 {
7550 enum built_in_function lib;
7551 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7552 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7553 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7554 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7555 ignore, lib);
7556 if (target)
7557 return target;
7558 break;
7559 }
7560 case BUILT_IN_ATOMIC_OR_FETCH_1:
7561 case BUILT_IN_ATOMIC_OR_FETCH_2:
7562 case BUILT_IN_ATOMIC_OR_FETCH_4:
7563 case BUILT_IN_ATOMIC_OR_FETCH_8:
7564 case BUILT_IN_ATOMIC_OR_FETCH_16:
7565 {
7566 enum built_in_function lib;
7567 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7568 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7569 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7570 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7571 ignore, lib);
7572 if (target)
7573 return target;
7574 break;
7575 }
7576 case BUILT_IN_ATOMIC_FETCH_ADD_1:
7577 case BUILT_IN_ATOMIC_FETCH_ADD_2:
7578 case BUILT_IN_ATOMIC_FETCH_ADD_4:
7579 case BUILT_IN_ATOMIC_FETCH_ADD_8:
7580 case BUILT_IN_ATOMIC_FETCH_ADD_16:
7581 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7582 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7583 ignore, BUILT_IN_NONE);
7584 if (target)
7585 return target;
7586 break;
7587
7588 case BUILT_IN_ATOMIC_FETCH_SUB_1:
7589 case BUILT_IN_ATOMIC_FETCH_SUB_2:
7590 case BUILT_IN_ATOMIC_FETCH_SUB_4:
7591 case BUILT_IN_ATOMIC_FETCH_SUB_8:
7592 case BUILT_IN_ATOMIC_FETCH_SUB_16:
7593 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7594 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7595 ignore, BUILT_IN_NONE);
7596 if (target)
7597 return target;
7598 break;
7599
7600 case BUILT_IN_ATOMIC_FETCH_AND_1:
7601 case BUILT_IN_ATOMIC_FETCH_AND_2:
7602 case BUILT_IN_ATOMIC_FETCH_AND_4:
7603 case BUILT_IN_ATOMIC_FETCH_AND_8:
7604 case BUILT_IN_ATOMIC_FETCH_AND_16:
7605 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7606 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7607 ignore, BUILT_IN_NONE);
7608 if (target)
7609 return target;
7610 break;
7611
7612 case BUILT_IN_ATOMIC_FETCH_NAND_1:
7613 case BUILT_IN_ATOMIC_FETCH_NAND_2:
7614 case BUILT_IN_ATOMIC_FETCH_NAND_4:
7615 case BUILT_IN_ATOMIC_FETCH_NAND_8:
7616 case BUILT_IN_ATOMIC_FETCH_NAND_16:
7617 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7618 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7619 ignore, BUILT_IN_NONE);
7620 if (target)
7621 return target;
7622 break;
7623
7624 case BUILT_IN_ATOMIC_FETCH_XOR_1:
7625 case BUILT_IN_ATOMIC_FETCH_XOR_2:
7626 case BUILT_IN_ATOMIC_FETCH_XOR_4:
7627 case BUILT_IN_ATOMIC_FETCH_XOR_8:
7628 case BUILT_IN_ATOMIC_FETCH_XOR_16:
7629 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7630 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7631 ignore, BUILT_IN_NONE);
7632 if (target)
7633 return target;
7634 break;
7635
7636 case BUILT_IN_ATOMIC_FETCH_OR_1:
7637 case BUILT_IN_ATOMIC_FETCH_OR_2:
7638 case BUILT_IN_ATOMIC_FETCH_OR_4:
7639 case BUILT_IN_ATOMIC_FETCH_OR_8:
7640 case BUILT_IN_ATOMIC_FETCH_OR_16:
7641 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7642 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7643 ignore, BUILT_IN_NONE);
7644 if (target)
7645 return target;
7646 break;
7647
7648 case BUILT_IN_ATOMIC_TEST_AND_SET:
7649 return expand_builtin_atomic_test_and_set (exp, target);
7650
7651 case BUILT_IN_ATOMIC_CLEAR:
7652 return expand_builtin_atomic_clear (exp);
7653
7654 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7655 return expand_builtin_atomic_always_lock_free (exp);
7656
7657 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7658 target = expand_builtin_atomic_is_lock_free (exp);
7659 if (target)
7660 return target;
7661 break;
7662
7663 case BUILT_IN_ATOMIC_THREAD_FENCE:
7664 expand_builtin_atomic_thread_fence (exp);
7665 return const0_rtx;
7666
7667 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7668 expand_builtin_atomic_signal_fence (exp);
7669 return const0_rtx;
7670
7671 case BUILT_IN_OBJECT_SIZE:
7672 return expand_builtin_object_size (exp);
7673
7674 case BUILT_IN_MEMCPY_CHK:
7675 case BUILT_IN_MEMPCPY_CHK:
7676 case BUILT_IN_MEMMOVE_CHK:
7677 case BUILT_IN_MEMSET_CHK:
7678 target = expand_builtin_memory_chk (exp, target, mode, fcode);
7679 if (target)
7680 return target;
7681 break;
7682
7683 case BUILT_IN_STRCPY_CHK:
7684 case BUILT_IN_STPCPY_CHK:
7685 case BUILT_IN_STRNCPY_CHK:
7686 case BUILT_IN_STPNCPY_CHK:
7687 case BUILT_IN_STRCAT_CHK:
7688 case BUILT_IN_STRNCAT_CHK:
7689 case BUILT_IN_SNPRINTF_CHK:
7690 case BUILT_IN_VSNPRINTF_CHK:
7691 maybe_emit_chk_warning (exp, fcode);
7692 break;
7693
7694 case BUILT_IN_SPRINTF_CHK:
7695 case BUILT_IN_VSPRINTF_CHK:
7696 maybe_emit_sprintf_chk_warning (exp, fcode);
7697 break;
7698
7699 case BUILT_IN_FREE:
7700 if (warn_free_nonheap_object)
7701 maybe_emit_free_warning (exp);
7702 break;
7703
7704 case BUILT_IN_THREAD_POINTER:
7705 return expand_builtin_thread_pointer (exp, target);
7706
7707 case BUILT_IN_SET_THREAD_POINTER:
7708 expand_builtin_set_thread_pointer (exp);
7709 return const0_rtx;
7710
7711 case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
7712 case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
7713 case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
7714 case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
7715 case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
7716 case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
7717 case BUILT_IN_CHKP_SET_PTR_BOUNDS:
7718 case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
7719 case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
7720 case BUILT_IN_CHKP_GET_PTR_LBOUND:
7721 case BUILT_IN_CHKP_GET_PTR_UBOUND:
7722 /* We allow user CHKP builtins if Pointer Bounds
7723 Checker is off. */
7724 if (!chkp_function_instrumented_p (current_function_decl))
7725 {
7726 if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
7727 || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
7728 || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
7729 || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
7730 || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
7731 return expand_normal (CALL_EXPR_ARG (exp, 0));
7732 else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
7733 return expand_normal (size_zero_node);
7734 else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
7735 return expand_normal (size_int (-1));
7736 else
7737 return const0_rtx;
7738 }
7739 /* FALLTHROUGH */
7740
7741 case BUILT_IN_CHKP_BNDMK:
7742 case BUILT_IN_CHKP_BNDSTX:
7743 case BUILT_IN_CHKP_BNDCL:
7744 case BUILT_IN_CHKP_BNDCU:
7745 case BUILT_IN_CHKP_BNDLDX:
7746 case BUILT_IN_CHKP_BNDRET:
7747 case BUILT_IN_CHKP_INTERSECT:
7748 case BUILT_IN_CHKP_NARROW:
7749 case BUILT_IN_CHKP_EXTRACT_LOWER:
7750 case BUILT_IN_CHKP_EXTRACT_UPPER:
7751 /* Software implementation of Pointer Bounds Checker is NYI.
7752 Target support is required. */
7753 error ("Your target platform does not support -fcheck-pointer-bounds");
7754 break;
7755
7756 case BUILT_IN_ACC_ON_DEVICE:
7757 /* Do library call, if we failed to expand the builtin when
7758 folding. */
7759 break;
7760
7761 default: /* just do library call, if unknown builtin */
7762 break;
7763 }
7764
7765 /* The switch statement above can drop through to cause the function
7766 to be called normally. */
7767 return expand_call (exp, target, ignore);
7768 }
7769
7770 /* Similar to expand_builtin but is used for instrumented calls. */
7771
7772 rtx
expand_builtin_with_bounds(tree exp,rtx target,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode,int ignore)7773 expand_builtin_with_bounds (tree exp, rtx target,
7774 rtx subtarget ATTRIBUTE_UNUSED,
7775 machine_mode mode, int ignore)
7776 {
7777 tree fndecl = get_callee_fndecl (exp);
7778 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7779
7780 gcc_assert (CALL_WITH_BOUNDS_P (exp));
7781
7782 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7783 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7784
7785 gcc_assert (fcode > BEGIN_CHKP_BUILTINS
7786 && fcode < END_CHKP_BUILTINS);
7787
7788 switch (fcode)
7789 {
7790 case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
7791 target = expand_builtin_memcpy_with_bounds (exp, target);
7792 if (target)
7793 return target;
7794 break;
7795
7796 case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
7797 target = expand_builtin_mempcpy_with_bounds (exp, target);
7798 if (target)
7799 return target;
7800 break;
7801
7802 case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
7803 target = expand_builtin_memset_with_bounds (exp, target, mode);
7804 if (target)
7805 return target;
7806 break;
7807
7808 case BUILT_IN_MEMCPY_CHKP:
7809 case BUILT_IN_MEMMOVE_CHKP:
7810 case BUILT_IN_MEMPCPY_CHKP:
7811 if (call_expr_nargs (exp) > 3)
7812 {
7813 /* memcpy_chkp (void *dst, size_t dstbnd,
7814 const void *src, size_t srcbnd, size_t n)
7815 and others take a pointer bound argument just after each
7816 pointer argument. */
7817 tree dest = CALL_EXPR_ARG (exp, 0);
7818 tree src = CALL_EXPR_ARG (exp, 2);
7819 tree len = CALL_EXPR_ARG (exp, 4);
7820
7821 check_memop_access (exp, dest, src, len);
7822 break;
7823 }
7824
7825 default:
7826 break;
7827 }
7828
7829 /* The switch statement above can drop through to cause the function
7830 to be called normally. */
7831 return expand_call (exp, target, ignore);
7832 }
7833
7834 /* Determine whether a tree node represents a call to a built-in
7835 function. If the tree T is a call to a built-in function with
7836 the right number of arguments of the appropriate types, return
7837 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7838 Otherwise the return value is END_BUILTINS. */
7839
7840 enum built_in_function
builtin_mathfn_code(const_tree t)7841 builtin_mathfn_code (const_tree t)
7842 {
7843 const_tree fndecl, arg, parmlist;
7844 const_tree argtype, parmtype;
7845 const_call_expr_arg_iterator iter;
7846
7847 if (TREE_CODE (t) != CALL_EXPR)
7848 return END_BUILTINS;
7849
7850 fndecl = get_callee_fndecl (t);
7851 if (fndecl == NULL_TREE
7852 || TREE_CODE (fndecl) != FUNCTION_DECL
7853 || ! DECL_BUILT_IN (fndecl)
7854 || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7855 return END_BUILTINS;
7856
7857 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7858 init_const_call_expr_arg_iterator (t, &iter);
7859 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7860 {
7861 /* If a function doesn't take a variable number of arguments,
7862 the last element in the list will have type `void'. */
7863 parmtype = TREE_VALUE (parmlist);
7864 if (VOID_TYPE_P (parmtype))
7865 {
7866 if (more_const_call_expr_args_p (&iter))
7867 return END_BUILTINS;
7868 return DECL_FUNCTION_CODE (fndecl);
7869 }
7870
7871 if (! more_const_call_expr_args_p (&iter))
7872 return END_BUILTINS;
7873
7874 arg = next_const_call_expr_arg (&iter);
7875 argtype = TREE_TYPE (arg);
7876
7877 if (SCALAR_FLOAT_TYPE_P (parmtype))
7878 {
7879 if (! SCALAR_FLOAT_TYPE_P (argtype))
7880 return END_BUILTINS;
7881 }
7882 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
7883 {
7884 if (! COMPLEX_FLOAT_TYPE_P (argtype))
7885 return END_BUILTINS;
7886 }
7887 else if (POINTER_TYPE_P (parmtype))
7888 {
7889 if (! POINTER_TYPE_P (argtype))
7890 return END_BUILTINS;
7891 }
7892 else if (INTEGRAL_TYPE_P (parmtype))
7893 {
7894 if (! INTEGRAL_TYPE_P (argtype))
7895 return END_BUILTINS;
7896 }
7897 else
7898 return END_BUILTINS;
7899 }
7900
7901 /* Variable-length argument list. */
7902 return DECL_FUNCTION_CODE (fndecl);
7903 }
7904
7905 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
7906 evaluate to a constant. */
7907
7908 static tree
fold_builtin_constant_p(tree arg)7909 fold_builtin_constant_p (tree arg)
7910 {
7911 /* We return 1 for a numeric type that's known to be a constant
7912 value at compile-time or for an aggregate type that's a
7913 literal constant. */
7914 STRIP_NOPS (arg);
7915
7916 /* If we know this is a constant, emit the constant of one. */
7917 if (CONSTANT_CLASS_P (arg)
7918 || (TREE_CODE (arg) == CONSTRUCTOR
7919 && TREE_CONSTANT (arg)))
7920 return integer_one_node;
7921 if (TREE_CODE (arg) == ADDR_EXPR)
7922 {
7923 tree op = TREE_OPERAND (arg, 0);
7924 if (TREE_CODE (op) == STRING_CST
7925 || (TREE_CODE (op) == ARRAY_REF
7926 && integer_zerop (TREE_OPERAND (op, 1))
7927 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
7928 return integer_one_node;
7929 }
7930
7931 /* If this expression has side effects, show we don't know it to be a
7932 constant. Likewise if it's a pointer or aggregate type since in
7933 those case we only want literals, since those are only optimized
7934 when generating RTL, not later.
7935 And finally, if we are compiling an initializer, not code, we
7936 need to return a definite result now; there's not going to be any
7937 more optimization done. */
7938 if (TREE_SIDE_EFFECTS (arg)
7939 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
7940 || POINTER_TYPE_P (TREE_TYPE (arg))
7941 || cfun == 0
7942 || folding_initializer
7943 || force_folding_builtin_constant_p)
7944 return integer_zero_node;
7945
7946 return NULL_TREE;
7947 }
7948
7949 /* Create builtin_expect with PRED and EXPECTED as its arguments and
7950 return it as a truthvalue. */
7951
7952 static tree
build_builtin_expect_predicate(location_t loc,tree pred,tree expected,tree predictor)7953 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
7954 tree predictor)
7955 {
7956 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
7957
7958 fn = builtin_decl_explicit (BUILT_IN_EXPECT);
7959 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
7960 ret_type = TREE_TYPE (TREE_TYPE (fn));
7961 pred_type = TREE_VALUE (arg_types);
7962 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
7963
7964 pred = fold_convert_loc (loc, pred_type, pred);
7965 expected = fold_convert_loc (loc, expected_type, expected);
7966 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
7967 predictor);
7968
7969 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
7970 build_int_cst (ret_type, 0));
7971 }
7972
7973 /* Fold a call to builtin_expect with arguments ARG0 and ARG1. Return
7974 NULL_TREE if no simplification is possible. */
7975
7976 tree
fold_builtin_expect(location_t loc,tree arg0,tree arg1,tree arg2)7977 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
7978 {
7979 tree inner, fndecl, inner_arg0;
7980 enum tree_code code;
7981
7982 /* Distribute the expected value over short-circuiting operators.
7983 See through the cast from truthvalue_type_node to long. */
7984 inner_arg0 = arg0;
7985 while (CONVERT_EXPR_P (inner_arg0)
7986 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
7987 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
7988 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
7989
7990 /* If this is a builtin_expect within a builtin_expect keep the
7991 inner one. See through a comparison against a constant. It
7992 might have been added to create a thruthvalue. */
7993 inner = inner_arg0;
7994
7995 if (COMPARISON_CLASS_P (inner)
7996 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
7997 inner = TREE_OPERAND (inner, 0);
7998
7999 if (TREE_CODE (inner) == CALL_EXPR
8000 && (fndecl = get_callee_fndecl (inner))
8001 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
8002 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
8003 return arg0;
8004
8005 inner = inner_arg0;
8006 code = TREE_CODE (inner);
8007 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8008 {
8009 tree op0 = TREE_OPERAND (inner, 0);
8010 tree op1 = TREE_OPERAND (inner, 1);
8011 arg1 = save_expr (arg1);
8012
8013 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
8014 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
8015 inner = build2 (code, TREE_TYPE (inner), op0, op1);
8016
8017 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8018 }
8019
8020 /* If the argument isn't invariant then there's nothing else we can do. */
8021 if (!TREE_CONSTANT (inner_arg0))
8022 return NULL_TREE;
8023
8024 /* If we expect that a comparison against the argument will fold to
8025 a constant return the constant. In practice, this means a true
8026 constant or the address of a non-weak symbol. */
8027 inner = inner_arg0;
8028 STRIP_NOPS (inner);
8029 if (TREE_CODE (inner) == ADDR_EXPR)
8030 {
8031 do
8032 {
8033 inner = TREE_OPERAND (inner, 0);
8034 }
8035 while (TREE_CODE (inner) == COMPONENT_REF
8036 || TREE_CODE (inner) == ARRAY_REF);
8037 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8038 return NULL_TREE;
8039 }
8040
8041 /* Otherwise, ARG0 already has the proper type for the return value. */
8042 return arg0;
8043 }
8044
8045 /* Fold a call to __builtin_classify_type with argument ARG. */
8046
8047 static tree
fold_builtin_classify_type(tree arg)8048 fold_builtin_classify_type (tree arg)
8049 {
8050 if (arg == 0)
8051 return build_int_cst (integer_type_node, no_type_class);
8052
8053 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8054 }
8055
8056 /* Fold a call to __builtin_strlen with argument ARG. */
8057
8058 static tree
fold_builtin_strlen(location_t loc,tree type,tree arg)8059 fold_builtin_strlen (location_t loc, tree type, tree arg)
8060 {
8061 if (!validate_arg (arg, POINTER_TYPE))
8062 return NULL_TREE;
8063 else
8064 {
8065 tree len = c_strlen (arg, 0);
8066
8067 if (len)
8068 return fold_convert_loc (loc, type, len);
8069
8070 return NULL_TREE;
8071 }
8072 }
8073
8074 /* Fold a call to __builtin_inf or __builtin_huge_val. */
8075
8076 static tree
fold_builtin_inf(location_t loc,tree type,int warn)8077 fold_builtin_inf (location_t loc, tree type, int warn)
8078 {
8079 REAL_VALUE_TYPE real;
8080
8081 /* __builtin_inff is intended to be usable to define INFINITY on all
8082 targets. If an infinity is not available, INFINITY expands "to a
8083 positive constant of type float that overflows at translation
8084 time", footnote "In this case, using INFINITY will violate the
8085 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8086 Thus we pedwarn to ensure this constraint violation is
8087 diagnosed. */
8088 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8089 pedwarn (loc, 0, "target format does not support infinity");
8090
8091 real_inf (&real);
8092 return build_real (type, real);
8093 }
8094
8095 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
8096 NULL_TREE if no simplification can be made. */
8097
8098 static tree
fold_builtin_sincos(location_t loc,tree arg0,tree arg1,tree arg2)8099 fold_builtin_sincos (location_t loc,
8100 tree arg0, tree arg1, tree arg2)
8101 {
8102 tree type;
8103 tree fndecl, call = NULL_TREE;
8104
8105 if (!validate_arg (arg0, REAL_TYPE)
8106 || !validate_arg (arg1, POINTER_TYPE)
8107 || !validate_arg (arg2, POINTER_TYPE))
8108 return NULL_TREE;
8109
8110 type = TREE_TYPE (arg0);
8111
8112 /* Calculate the result when the argument is a constant. */
8113 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8114 if (fn == END_BUILTINS)
8115 return NULL_TREE;
8116
8117 /* Canonicalize sincos to cexpi. */
8118 if (TREE_CODE (arg0) == REAL_CST)
8119 {
8120 tree complex_type = build_complex_type (type);
8121 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8122 }
8123 if (!call)
8124 {
8125 if (!targetm.libc_has_function (function_c99_math_complex)
8126 || !builtin_decl_implicit_p (fn))
8127 return NULL_TREE;
8128 fndecl = builtin_decl_explicit (fn);
8129 call = build_call_expr_loc (loc, fndecl, 1, arg0);
8130 call = builtin_save_expr (call);
8131 }
8132
8133 return build2 (COMPOUND_EXPR, void_type_node,
8134 build2 (MODIFY_EXPR, void_type_node,
8135 build_fold_indirect_ref_loc (loc, arg1),
8136 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8137 build2 (MODIFY_EXPR, void_type_node,
8138 build_fold_indirect_ref_loc (loc, arg2),
8139 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8140 }
8141
8142 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8143 Return NULL_TREE if no simplification can be made. */
8144
8145 static tree
fold_builtin_memcmp(location_t loc,tree arg1,tree arg2,tree len)8146 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8147 {
8148 if (!validate_arg (arg1, POINTER_TYPE)
8149 || !validate_arg (arg2, POINTER_TYPE)
8150 || !validate_arg (len, INTEGER_TYPE))
8151 return NULL_TREE;
8152
8153 /* If the LEN parameter is zero, return zero. */
8154 if (integer_zerop (len))
8155 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8156 arg1, arg2);
8157
8158 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
8159 if (operand_equal_p (arg1, arg2, 0))
8160 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8161
8162 /* If len parameter is one, return an expression corresponding to
8163 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
8164 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8165 {
8166 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8167 tree cst_uchar_ptr_node
8168 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8169
8170 tree ind1
8171 = fold_convert_loc (loc, integer_type_node,
8172 build1 (INDIRECT_REF, cst_uchar_node,
8173 fold_convert_loc (loc,
8174 cst_uchar_ptr_node,
8175 arg1)));
8176 tree ind2
8177 = fold_convert_loc (loc, integer_type_node,
8178 build1 (INDIRECT_REF, cst_uchar_node,
8179 fold_convert_loc (loc,
8180 cst_uchar_ptr_node,
8181 arg2)));
8182 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8183 }
8184
8185 return NULL_TREE;
8186 }
8187
8188 /* Fold a call to builtin isascii with argument ARG. */
8189
8190 static tree
fold_builtin_isascii(location_t loc,tree arg)8191 fold_builtin_isascii (location_t loc, tree arg)
8192 {
8193 if (!validate_arg (arg, INTEGER_TYPE))
8194 return NULL_TREE;
8195 else
8196 {
8197 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
8198 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8199 build_int_cst (integer_type_node,
8200 ~ (unsigned HOST_WIDE_INT) 0x7f));
8201 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8202 arg, integer_zero_node);
8203 }
8204 }
8205
8206 /* Fold a call to builtin toascii with argument ARG. */
8207
8208 static tree
fold_builtin_toascii(location_t loc,tree arg)8209 fold_builtin_toascii (location_t loc, tree arg)
8210 {
8211 if (!validate_arg (arg, INTEGER_TYPE))
8212 return NULL_TREE;
8213
8214 /* Transform toascii(c) -> (c & 0x7f). */
8215 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8216 build_int_cst (integer_type_node, 0x7f));
8217 }
8218
8219 /* Fold a call to builtin isdigit with argument ARG. */
8220
8221 static tree
fold_builtin_isdigit(location_t loc,tree arg)8222 fold_builtin_isdigit (location_t loc, tree arg)
8223 {
8224 if (!validate_arg (arg, INTEGER_TYPE))
8225 return NULL_TREE;
8226 else
8227 {
8228 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
8229 /* According to the C standard, isdigit is unaffected by locale.
8230 However, it definitely is affected by the target character set. */
8231 unsigned HOST_WIDE_INT target_digit0
8232 = lang_hooks.to_target_charset ('0');
8233
8234 if (target_digit0 == 0)
8235 return NULL_TREE;
8236
8237 arg = fold_convert_loc (loc, unsigned_type_node, arg);
8238 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8239 build_int_cst (unsigned_type_node, target_digit0));
8240 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8241 build_int_cst (unsigned_type_node, 9));
8242 }
8243 }
8244
8245 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
8246
8247 static tree
fold_builtin_fabs(location_t loc,tree arg,tree type)8248 fold_builtin_fabs (location_t loc, tree arg, tree type)
8249 {
8250 if (!validate_arg (arg, REAL_TYPE))
8251 return NULL_TREE;
8252
8253 arg = fold_convert_loc (loc, type, arg);
8254 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8255 }
8256
8257 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
8258
8259 static tree
fold_builtin_abs(location_t loc,tree arg,tree type)8260 fold_builtin_abs (location_t loc, tree arg, tree type)
8261 {
8262 if (!validate_arg (arg, INTEGER_TYPE))
8263 return NULL_TREE;
8264
8265 arg = fold_convert_loc (loc, type, arg);
8266 return fold_build1_loc (loc, ABS_EXPR, type, arg);
8267 }
8268
8269 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012]. */
8270
8271 static tree
fold_builtin_fma(location_t loc,tree arg0,tree arg1,tree arg2,tree type)8272 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
8273 {
8274 /* ??? Only expand to FMA_EXPR if it's directly supported. */
8275 if (validate_arg (arg0, REAL_TYPE)
8276 && validate_arg (arg1, REAL_TYPE)
8277 && validate_arg (arg2, REAL_TYPE)
8278 && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
8279 return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
8280
8281 return NULL_TREE;
8282 }
8283
8284 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
8285
8286 static tree
fold_builtin_carg(location_t loc,tree arg,tree type)8287 fold_builtin_carg (location_t loc, tree arg, tree type)
8288 {
8289 if (validate_arg (arg, COMPLEX_TYPE)
8290 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8291 {
8292 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8293
8294 if (atan2_fn)
8295 {
8296 tree new_arg = builtin_save_expr (arg);
8297 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8298 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8299 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8300 }
8301 }
8302
8303 return NULL_TREE;
8304 }
8305
8306 /* Fold a call to builtin frexp, we can assume the base is 2. */
8307
8308 static tree
fold_builtin_frexp(location_t loc,tree arg0,tree arg1,tree rettype)8309 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8310 {
8311 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8312 return NULL_TREE;
8313
8314 STRIP_NOPS (arg0);
8315
8316 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8317 return NULL_TREE;
8318
8319 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8320
8321 /* Proceed if a valid pointer type was passed in. */
8322 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8323 {
8324 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8325 tree frac, exp;
8326
8327 switch (value->cl)
8328 {
8329 case rvc_zero:
8330 /* For +-0, return (*exp = 0, +-0). */
8331 exp = integer_zero_node;
8332 frac = arg0;
8333 break;
8334 case rvc_nan:
8335 case rvc_inf:
8336 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
8337 return omit_one_operand_loc (loc, rettype, arg0, arg1);
8338 case rvc_normal:
8339 {
8340 /* Since the frexp function always expects base 2, and in
8341 GCC normalized significands are already in the range
8342 [0.5, 1.0), we have exactly what frexp wants. */
8343 REAL_VALUE_TYPE frac_rvt = *value;
8344 SET_REAL_EXP (&frac_rvt, 0);
8345 frac = build_real (rettype, frac_rvt);
8346 exp = build_int_cst (integer_type_node, REAL_EXP (value));
8347 }
8348 break;
8349 default:
8350 gcc_unreachable ();
8351 }
8352
8353 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8354 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8355 TREE_SIDE_EFFECTS (arg1) = 1;
8356 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8357 }
8358
8359 return NULL_TREE;
8360 }
8361
8362 /* Fold a call to builtin modf. */
8363
8364 static tree
fold_builtin_modf(location_t loc,tree arg0,tree arg1,tree rettype)8365 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8366 {
8367 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8368 return NULL_TREE;
8369
8370 STRIP_NOPS (arg0);
8371
8372 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8373 return NULL_TREE;
8374
8375 arg1 = build_fold_indirect_ref_loc (loc, arg1);
8376
8377 /* Proceed if a valid pointer type was passed in. */
8378 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8379 {
8380 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8381 REAL_VALUE_TYPE trunc, frac;
8382
8383 switch (value->cl)
8384 {
8385 case rvc_nan:
8386 case rvc_zero:
8387 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
8388 trunc = frac = *value;
8389 break;
8390 case rvc_inf:
8391 /* For +-Inf, return (*arg1 = arg0, +-0). */
8392 frac = dconst0;
8393 frac.sign = value->sign;
8394 trunc = *value;
8395 break;
8396 case rvc_normal:
8397 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
8398 real_trunc (&trunc, VOIDmode, value);
8399 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8400 /* If the original number was negative and already
8401 integral, then the fractional part is -0.0. */
8402 if (value->sign && frac.cl == rvc_zero)
8403 frac.sign = value->sign;
8404 break;
8405 }
8406
8407 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8408 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8409 build_real (rettype, trunc));
8410 TREE_SIDE_EFFECTS (arg1) = 1;
8411 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8412 build_real (rettype, frac));
8413 }
8414
8415 return NULL_TREE;
8416 }
8417
8418 /* Given a location LOC, an interclass builtin function decl FNDECL
8419 and its single argument ARG, return an folded expression computing
8420 the same, or NULL_TREE if we either couldn't or didn't want to fold
8421 (the latter happen if there's an RTL instruction available). */
8422
8423 static tree
fold_builtin_interclass_mathfn(location_t loc,tree fndecl,tree arg)8424 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8425 {
8426 machine_mode mode;
8427
8428 if (!validate_arg (arg, REAL_TYPE))
8429 return NULL_TREE;
8430
8431 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8432 return NULL_TREE;
8433
8434 mode = TYPE_MODE (TREE_TYPE (arg));
8435
8436 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8437
8438 /* If there is no optab, try generic code. */
8439 switch (DECL_FUNCTION_CODE (fndecl))
8440 {
8441 tree result;
8442
8443 CASE_FLT_FN (BUILT_IN_ISINF):
8444 {
8445 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
8446 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8447 tree type = TREE_TYPE (arg);
8448 REAL_VALUE_TYPE r;
8449 char buf[128];
8450
8451 if (is_ibm_extended)
8452 {
8453 /* NaN and Inf are encoded in the high-order double value
8454 only. The low-order value is not significant. */
8455 type = double_type_node;
8456 mode = DFmode;
8457 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8458 }
8459 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8460 real_from_string (&r, buf);
8461 result = build_call_expr (isgr_fn, 2,
8462 fold_build1_loc (loc, ABS_EXPR, type, arg),
8463 build_real (type, r));
8464 return result;
8465 }
8466 CASE_FLT_FN (BUILT_IN_FINITE):
8467 case BUILT_IN_ISFINITE:
8468 {
8469 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
8470 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8471 tree type = TREE_TYPE (arg);
8472 REAL_VALUE_TYPE r;
8473 char buf[128];
8474
8475 if (is_ibm_extended)
8476 {
8477 /* NaN and Inf are encoded in the high-order double value
8478 only. The low-order value is not significant. */
8479 type = double_type_node;
8480 mode = DFmode;
8481 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8482 }
8483 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8484 real_from_string (&r, buf);
8485 result = build_call_expr (isle_fn, 2,
8486 fold_build1_loc (loc, ABS_EXPR, type, arg),
8487 build_real (type, r));
8488 /*result = fold_build2_loc (loc, UNGT_EXPR,
8489 TREE_TYPE (TREE_TYPE (fndecl)),
8490 fold_build1_loc (loc, ABS_EXPR, type, arg),
8491 build_real (type, r));
8492 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8493 TREE_TYPE (TREE_TYPE (fndecl)),
8494 result);*/
8495 return result;
8496 }
8497 case BUILT_IN_ISNORMAL:
8498 {
8499 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8500 islessequal(fabs(x),DBL_MAX). */
8501 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8502 tree type = TREE_TYPE (arg);
8503 tree orig_arg, max_exp, min_exp;
8504 machine_mode orig_mode = mode;
8505 REAL_VALUE_TYPE rmax, rmin;
8506 char buf[128];
8507
8508 orig_arg = arg = builtin_save_expr (arg);
8509 if (is_ibm_extended)
8510 {
8511 /* Use double to test the normal range of IBM extended
8512 precision. Emin for IBM extended precision is
8513 different to emin for IEEE double, being 53 higher
8514 since the low double exponent is at least 53 lower
8515 than the high double exponent. */
8516 type = double_type_node;
8517 mode = DFmode;
8518 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8519 }
8520 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8521
8522 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8523 real_from_string (&rmax, buf);
8524 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8525 real_from_string (&rmin, buf);
8526 max_exp = build_real (type, rmax);
8527 min_exp = build_real (type, rmin);
8528
8529 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8530 if (is_ibm_extended)
8531 {
8532 /* Testing the high end of the range is done just using
8533 the high double, using the same test as isfinite().
8534 For the subnormal end of the range we first test the
8535 high double, then if its magnitude is equal to the
8536 limit of 0x1p-969, we test whether the low double is
8537 non-zero and opposite sign to the high double. */
8538 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8539 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8540 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8541 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8542 arg, min_exp);
8543 tree as_complex = build1 (VIEW_CONVERT_EXPR,
8544 complex_double_type_node, orig_arg);
8545 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8546 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8547 tree zero = build_real (type, dconst0);
8548 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8549 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8550 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8551 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8552 fold_build3 (COND_EXPR,
8553 integer_type_node,
8554 hilt, logt, lolt));
8555 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8556 eq_min, ok_lo);
8557 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8558 gt_min, eq_min);
8559 }
8560 else
8561 {
8562 tree const isge_fn
8563 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8564 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8565 }
8566 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8567 max_exp, min_exp);
8568 return result;
8569 }
8570 default:
8571 break;
8572 }
8573
8574 return NULL_TREE;
8575 }
8576
8577 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8578 ARG is the argument for the call. */
8579
8580 static tree
fold_builtin_classify(location_t loc,tree fndecl,tree arg,int builtin_index)8581 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8582 {
8583 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8584
8585 if (!validate_arg (arg, REAL_TYPE))
8586 return NULL_TREE;
8587
8588 switch (builtin_index)
8589 {
8590 case BUILT_IN_ISINF:
8591 if (!HONOR_INFINITIES (arg))
8592 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8593
8594 return NULL_TREE;
8595
8596 case BUILT_IN_ISINF_SIGN:
8597 {
8598 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8599 /* In a boolean context, GCC will fold the inner COND_EXPR to
8600 1. So e.g. "if (isinf_sign(x))" would be folded to just
8601 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8602 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8603 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8604 tree tmp = NULL_TREE;
8605
8606 arg = builtin_save_expr (arg);
8607
8608 if (signbit_fn && isinf_fn)
8609 {
8610 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8611 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8612
8613 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8614 signbit_call, integer_zero_node);
8615 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8616 isinf_call, integer_zero_node);
8617
8618 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8619 integer_minus_one_node, integer_one_node);
8620 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8621 isinf_call, tmp,
8622 integer_zero_node);
8623 }
8624
8625 return tmp;
8626 }
8627
8628 case BUILT_IN_ISFINITE:
8629 if (!HONOR_NANS (arg)
8630 && !HONOR_INFINITIES (arg))
8631 return omit_one_operand_loc (loc, type, integer_one_node, arg);
8632
8633 return NULL_TREE;
8634
8635 case BUILT_IN_ISNAN:
8636 if (!HONOR_NANS (arg))
8637 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8638
8639 {
8640 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8641 if (is_ibm_extended)
8642 {
8643 /* NaN and Inf are encoded in the high-order double value
8644 only. The low-order value is not significant. */
8645 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8646 }
8647 }
8648 arg = builtin_save_expr (arg);
8649 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8650
8651 default:
8652 gcc_unreachable ();
8653 }
8654 }
8655
8656 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8657 This builtin will generate code to return the appropriate floating
8658 point classification depending on the value of the floating point
8659 number passed in. The possible return values must be supplied as
8660 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8661 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
8662 one floating point argument which is "type generic". */
8663
8664 static tree
fold_builtin_fpclassify(location_t loc,tree * args,int nargs)8665 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8666 {
8667 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8668 arg, type, res, tmp;
8669 machine_mode mode;
8670 REAL_VALUE_TYPE r;
8671 char buf[128];
8672
8673 /* Verify the required arguments in the original call. */
8674 if (nargs != 6
8675 || !validate_arg (args[0], INTEGER_TYPE)
8676 || !validate_arg (args[1], INTEGER_TYPE)
8677 || !validate_arg (args[2], INTEGER_TYPE)
8678 || !validate_arg (args[3], INTEGER_TYPE)
8679 || !validate_arg (args[4], INTEGER_TYPE)
8680 || !validate_arg (args[5], REAL_TYPE))
8681 return NULL_TREE;
8682
8683 fp_nan = args[0];
8684 fp_infinite = args[1];
8685 fp_normal = args[2];
8686 fp_subnormal = args[3];
8687 fp_zero = args[4];
8688 arg = args[5];
8689 type = TREE_TYPE (arg);
8690 mode = TYPE_MODE (type);
8691 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8692
8693 /* fpclassify(x) ->
8694 isnan(x) ? FP_NAN :
8695 (fabs(x) == Inf ? FP_INFINITE :
8696 (fabs(x) >= DBL_MIN ? FP_NORMAL :
8697 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
8698
8699 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8700 build_real (type, dconst0));
8701 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8702 tmp, fp_zero, fp_subnormal);
8703
8704 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8705 real_from_string (&r, buf);
8706 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8707 arg, build_real (type, r));
8708 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8709
8710 if (HONOR_INFINITIES (mode))
8711 {
8712 real_inf (&r);
8713 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8714 build_real (type, r));
8715 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8716 fp_infinite, res);
8717 }
8718
8719 if (HONOR_NANS (mode))
8720 {
8721 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8722 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8723 }
8724
8725 return res;
8726 }
8727
8728 /* Fold a call to an unordered comparison function such as
8729 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
8730 being called and ARG0 and ARG1 are the arguments for the call.
8731 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8732 the opposite of the desired result. UNORDERED_CODE is used
8733 for modes that can hold NaNs and ORDERED_CODE is used for
8734 the rest. */
8735
8736 static tree
fold_builtin_unordered_cmp(location_t loc,tree fndecl,tree arg0,tree arg1,enum tree_code unordered_code,enum tree_code ordered_code)8737 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8738 enum tree_code unordered_code,
8739 enum tree_code ordered_code)
8740 {
8741 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8742 enum tree_code code;
8743 tree type0, type1;
8744 enum tree_code code0, code1;
8745 tree cmp_type = NULL_TREE;
8746
8747 type0 = TREE_TYPE (arg0);
8748 type1 = TREE_TYPE (arg1);
8749
8750 code0 = TREE_CODE (type0);
8751 code1 = TREE_CODE (type1);
8752
8753 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8754 /* Choose the wider of two real types. */
8755 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8756 ? type0 : type1;
8757 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8758 cmp_type = type0;
8759 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8760 cmp_type = type1;
8761
8762 arg0 = fold_convert_loc (loc, cmp_type, arg0);
8763 arg1 = fold_convert_loc (loc, cmp_type, arg1);
8764
8765 if (unordered_code == UNORDERED_EXPR)
8766 {
8767 if (!HONOR_NANS (arg0))
8768 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8769 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8770 }
8771
8772 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
8773 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8774 fold_build2_loc (loc, code, type, arg0, arg1));
8775 }
8776
8777 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8778 arithmetics if it can never overflow, or into internal functions that
8779 return both result of arithmetics and overflowed boolean flag in
8780 a complex integer result, or some other check for overflow.
8781 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8782 checking part of that. */
8783
8784 static tree
fold_builtin_arith_overflow(location_t loc,enum built_in_function fcode,tree arg0,tree arg1,tree arg2)8785 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8786 tree arg0, tree arg1, tree arg2)
8787 {
8788 enum internal_fn ifn = IFN_LAST;
8789 /* The code of the expression corresponding to the type-generic
8790 built-in, or ERROR_MARK for the type-specific ones. */
8791 enum tree_code opcode = ERROR_MARK;
8792 bool ovf_only = false;
8793
8794 switch (fcode)
8795 {
8796 case BUILT_IN_ADD_OVERFLOW_P:
8797 ovf_only = true;
8798 /* FALLTHRU */
8799 case BUILT_IN_ADD_OVERFLOW:
8800 opcode = PLUS_EXPR;
8801 /* FALLTHRU */
8802 case BUILT_IN_SADD_OVERFLOW:
8803 case BUILT_IN_SADDL_OVERFLOW:
8804 case BUILT_IN_SADDLL_OVERFLOW:
8805 case BUILT_IN_UADD_OVERFLOW:
8806 case BUILT_IN_UADDL_OVERFLOW:
8807 case BUILT_IN_UADDLL_OVERFLOW:
8808 ifn = IFN_ADD_OVERFLOW;
8809 break;
8810 case BUILT_IN_SUB_OVERFLOW_P:
8811 ovf_only = true;
8812 /* FALLTHRU */
8813 case BUILT_IN_SUB_OVERFLOW:
8814 opcode = MINUS_EXPR;
8815 /* FALLTHRU */
8816 case BUILT_IN_SSUB_OVERFLOW:
8817 case BUILT_IN_SSUBL_OVERFLOW:
8818 case BUILT_IN_SSUBLL_OVERFLOW:
8819 case BUILT_IN_USUB_OVERFLOW:
8820 case BUILT_IN_USUBL_OVERFLOW:
8821 case BUILT_IN_USUBLL_OVERFLOW:
8822 ifn = IFN_SUB_OVERFLOW;
8823 break;
8824 case BUILT_IN_MUL_OVERFLOW_P:
8825 ovf_only = true;
8826 /* FALLTHRU */
8827 case BUILT_IN_MUL_OVERFLOW:
8828 opcode = MULT_EXPR;
8829 /* FALLTHRU */
8830 case BUILT_IN_SMUL_OVERFLOW:
8831 case BUILT_IN_SMULL_OVERFLOW:
8832 case BUILT_IN_SMULLL_OVERFLOW:
8833 case BUILT_IN_UMUL_OVERFLOW:
8834 case BUILT_IN_UMULL_OVERFLOW:
8835 case BUILT_IN_UMULLL_OVERFLOW:
8836 ifn = IFN_MUL_OVERFLOW;
8837 break;
8838 default:
8839 gcc_unreachable ();
8840 }
8841
8842 /* For the "generic" overloads, the first two arguments can have different
8843 types and the last argument determines the target type to use to check
8844 for overflow. The arguments of the other overloads all have the same
8845 type. */
8846 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
8847
8848 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
8849 arguments are constant, attempt to fold the built-in call into a constant
8850 expression indicating whether or not it detected an overflow. */
8851 if (ovf_only
8852 && TREE_CODE (arg0) == INTEGER_CST
8853 && TREE_CODE (arg1) == INTEGER_CST)
8854 /* Perform the computation in the target type and check for overflow. */
8855 return omit_one_operand_loc (loc, boolean_type_node,
8856 arith_overflowed_p (opcode, type, arg0, arg1)
8857 ? boolean_true_node : boolean_false_node,
8858 arg2);
8859
8860 tree ctype = build_complex_type (type);
8861 tree call = build_call_expr_internal_loc (loc, ifn, ctype,
8862 2, arg0, arg1);
8863 tree tgt = save_expr (call);
8864 tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
8865 tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
8866 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
8867
8868 if (ovf_only)
8869 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
8870
8871 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
8872 tree store
8873 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
8874 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
8875 }
8876
8877 /* Fold a call to __builtin_FILE to a constant string. */
8878
8879 static inline tree
fold_builtin_FILE(location_t loc)8880 fold_builtin_FILE (location_t loc)
8881 {
8882 if (const char *fname = LOCATION_FILE (loc))
8883 {
8884 /* The documentation says this builtin is equivalent to the preprocessor
8885 __FILE__ macro so it appears appropriate to use the same file prefix
8886 mappings. */
8887 fname = remap_macro_filename (fname);
8888 return build_string_literal (strlen (fname) + 1, fname);
8889 }
8890
8891 return build_string_literal (1, "");
8892 }
8893
8894 /* Fold a call to __builtin_FUNCTION to a constant string. */
8895
8896 static inline tree
fold_builtin_FUNCTION()8897 fold_builtin_FUNCTION ()
8898 {
8899 const char *name = "";
8900
8901 if (current_function_decl)
8902 name = lang_hooks.decl_printable_name (current_function_decl, 0);
8903
8904 return build_string_literal (strlen (name) + 1, name);
8905 }
8906
8907 /* Fold a call to __builtin_LINE to an integer constant. */
8908
8909 static inline tree
fold_builtin_LINE(location_t loc,tree type)8910 fold_builtin_LINE (location_t loc, tree type)
8911 {
8912 return build_int_cst (type, LOCATION_LINE (loc));
8913 }
8914
8915 /* Fold a call to built-in function FNDECL with 0 arguments.
8916 This function returns NULL_TREE if no simplification was possible. */
8917
8918 static tree
fold_builtin_0(location_t loc,tree fndecl)8919 fold_builtin_0 (location_t loc, tree fndecl)
8920 {
8921 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8922 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8923 switch (fcode)
8924 {
8925 case BUILT_IN_FILE:
8926 return fold_builtin_FILE (loc);
8927
8928 case BUILT_IN_FUNCTION:
8929 return fold_builtin_FUNCTION ();
8930
8931 case BUILT_IN_LINE:
8932 return fold_builtin_LINE (loc, type);
8933
8934 CASE_FLT_FN (BUILT_IN_INF):
8935 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
8936 case BUILT_IN_INFD32:
8937 case BUILT_IN_INFD64:
8938 case BUILT_IN_INFD128:
8939 return fold_builtin_inf (loc, type, true);
8940
8941 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
8942 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
8943 return fold_builtin_inf (loc, type, false);
8944
8945 case BUILT_IN_CLASSIFY_TYPE:
8946 return fold_builtin_classify_type (NULL_TREE);
8947
8948 default:
8949 break;
8950 }
8951 return NULL_TREE;
8952 }
8953
8954 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
8955 This function returns NULL_TREE if no simplification was possible. */
8956
8957 static tree
fold_builtin_1(location_t loc,tree fndecl,tree arg0)8958 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
8959 {
8960 tree type = TREE_TYPE (TREE_TYPE (fndecl));
8961 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8962
8963 if (TREE_CODE (arg0) == ERROR_MARK)
8964 return NULL_TREE;
8965
8966 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
8967 return ret;
8968
8969 switch (fcode)
8970 {
8971 case BUILT_IN_CONSTANT_P:
8972 {
8973 tree val = fold_builtin_constant_p (arg0);
8974
8975 /* Gimplification will pull the CALL_EXPR for the builtin out of
8976 an if condition. When not optimizing, we'll not CSE it back.
8977 To avoid link error types of regressions, return false now. */
8978 if (!val && !optimize)
8979 val = integer_zero_node;
8980
8981 return val;
8982 }
8983
8984 case BUILT_IN_CLASSIFY_TYPE:
8985 return fold_builtin_classify_type (arg0);
8986
8987 case BUILT_IN_STRLEN:
8988 return fold_builtin_strlen (loc, type, arg0);
8989
8990 CASE_FLT_FN (BUILT_IN_FABS):
8991 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
8992 case BUILT_IN_FABSD32:
8993 case BUILT_IN_FABSD64:
8994 case BUILT_IN_FABSD128:
8995 return fold_builtin_fabs (loc, arg0, type);
8996
8997 case BUILT_IN_ABS:
8998 case BUILT_IN_LABS:
8999 case BUILT_IN_LLABS:
9000 case BUILT_IN_IMAXABS:
9001 return fold_builtin_abs (loc, arg0, type);
9002
9003 CASE_FLT_FN (BUILT_IN_CONJ):
9004 if (validate_arg (arg0, COMPLEX_TYPE)
9005 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9006 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9007 break;
9008
9009 CASE_FLT_FN (BUILT_IN_CREAL):
9010 if (validate_arg (arg0, COMPLEX_TYPE)
9011 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9012 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9013 break;
9014
9015 CASE_FLT_FN (BUILT_IN_CIMAG):
9016 if (validate_arg (arg0, COMPLEX_TYPE)
9017 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9018 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9019 break;
9020
9021 CASE_FLT_FN (BUILT_IN_CARG):
9022 return fold_builtin_carg (loc, arg0, type);
9023
9024 case BUILT_IN_ISASCII:
9025 return fold_builtin_isascii (loc, arg0);
9026
9027 case BUILT_IN_TOASCII:
9028 return fold_builtin_toascii (loc, arg0);
9029
9030 case BUILT_IN_ISDIGIT:
9031 return fold_builtin_isdigit (loc, arg0);
9032
9033 CASE_FLT_FN (BUILT_IN_FINITE):
9034 case BUILT_IN_FINITED32:
9035 case BUILT_IN_FINITED64:
9036 case BUILT_IN_FINITED128:
9037 case BUILT_IN_ISFINITE:
9038 {
9039 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9040 if (ret)
9041 return ret;
9042 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9043 }
9044
9045 CASE_FLT_FN (BUILT_IN_ISINF):
9046 case BUILT_IN_ISINFD32:
9047 case BUILT_IN_ISINFD64:
9048 case BUILT_IN_ISINFD128:
9049 {
9050 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9051 if (ret)
9052 return ret;
9053 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9054 }
9055
9056 case BUILT_IN_ISNORMAL:
9057 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9058
9059 case BUILT_IN_ISINF_SIGN:
9060 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9061
9062 CASE_FLT_FN (BUILT_IN_ISNAN):
9063 case BUILT_IN_ISNAND32:
9064 case BUILT_IN_ISNAND64:
9065 case BUILT_IN_ISNAND128:
9066 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9067
9068 case BUILT_IN_FREE:
9069 if (integer_zerop (arg0))
9070 return build_empty_stmt (loc);
9071 break;
9072
9073 default:
9074 break;
9075 }
9076
9077 return NULL_TREE;
9078
9079 }
9080
9081 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9082 This function returns NULL_TREE if no simplification was possible. */
9083
9084 static tree
fold_builtin_2(location_t loc,tree fndecl,tree arg0,tree arg1)9085 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9086 {
9087 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9088 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9089
9090 if (TREE_CODE (arg0) == ERROR_MARK
9091 || TREE_CODE (arg1) == ERROR_MARK)
9092 return NULL_TREE;
9093
9094 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9095 return ret;
9096
9097 switch (fcode)
9098 {
9099 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9100 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9101 if (validate_arg (arg0, REAL_TYPE)
9102 && validate_arg (arg1, POINTER_TYPE))
9103 return do_mpfr_lgamma_r (arg0, arg1, type);
9104 break;
9105
9106 CASE_FLT_FN (BUILT_IN_FREXP):
9107 return fold_builtin_frexp (loc, arg0, arg1, type);
9108
9109 CASE_FLT_FN (BUILT_IN_MODF):
9110 return fold_builtin_modf (loc, arg0, arg1, type);
9111
9112 case BUILT_IN_STRSPN:
9113 return fold_builtin_strspn (loc, arg0, arg1);
9114
9115 case BUILT_IN_STRCSPN:
9116 return fold_builtin_strcspn (loc, arg0, arg1);
9117
9118 case BUILT_IN_STRPBRK:
9119 return fold_builtin_strpbrk (loc, arg0, arg1, type);
9120
9121 case BUILT_IN_EXPECT:
9122 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
9123
9124 case BUILT_IN_ISGREATER:
9125 return fold_builtin_unordered_cmp (loc, fndecl,
9126 arg0, arg1, UNLE_EXPR, LE_EXPR);
9127 case BUILT_IN_ISGREATEREQUAL:
9128 return fold_builtin_unordered_cmp (loc, fndecl,
9129 arg0, arg1, UNLT_EXPR, LT_EXPR);
9130 case BUILT_IN_ISLESS:
9131 return fold_builtin_unordered_cmp (loc, fndecl,
9132 arg0, arg1, UNGE_EXPR, GE_EXPR);
9133 case BUILT_IN_ISLESSEQUAL:
9134 return fold_builtin_unordered_cmp (loc, fndecl,
9135 arg0, arg1, UNGT_EXPR, GT_EXPR);
9136 case BUILT_IN_ISLESSGREATER:
9137 return fold_builtin_unordered_cmp (loc, fndecl,
9138 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9139 case BUILT_IN_ISUNORDERED:
9140 return fold_builtin_unordered_cmp (loc, fndecl,
9141 arg0, arg1, UNORDERED_EXPR,
9142 NOP_EXPR);
9143
9144 /* We do the folding for va_start in the expander. */
9145 case BUILT_IN_VA_START:
9146 break;
9147
9148 case BUILT_IN_OBJECT_SIZE:
9149 return fold_builtin_object_size (arg0, arg1);
9150
9151 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9152 return fold_builtin_atomic_always_lock_free (arg0, arg1);
9153
9154 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9155 return fold_builtin_atomic_is_lock_free (arg0, arg1);
9156
9157 default:
9158 break;
9159 }
9160 return NULL_TREE;
9161 }
9162
9163 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9164 and ARG2.
9165 This function returns NULL_TREE if no simplification was possible. */
9166
9167 static tree
fold_builtin_3(location_t loc,tree fndecl,tree arg0,tree arg1,tree arg2)9168 fold_builtin_3 (location_t loc, tree fndecl,
9169 tree arg0, tree arg1, tree arg2)
9170 {
9171 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9172 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9173
9174 if (TREE_CODE (arg0) == ERROR_MARK
9175 || TREE_CODE (arg1) == ERROR_MARK
9176 || TREE_CODE (arg2) == ERROR_MARK)
9177 return NULL_TREE;
9178
9179 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9180 arg0, arg1, arg2))
9181 return ret;
9182
9183 switch (fcode)
9184 {
9185
9186 CASE_FLT_FN (BUILT_IN_SINCOS):
9187 return fold_builtin_sincos (loc, arg0, arg1, arg2);
9188
9189 CASE_FLT_FN (BUILT_IN_FMA):
9190 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
9191 return fold_builtin_fma (loc, arg0, arg1, arg2, type);
9192
9193 CASE_FLT_FN (BUILT_IN_REMQUO):
9194 if (validate_arg (arg0, REAL_TYPE)
9195 && validate_arg (arg1, REAL_TYPE)
9196 && validate_arg (arg2, POINTER_TYPE))
9197 return do_mpfr_remquo (arg0, arg1, arg2);
9198 break;
9199
9200 case BUILT_IN_MEMCMP:
9201 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9202
9203 case BUILT_IN_EXPECT:
9204 return fold_builtin_expect (loc, arg0, arg1, arg2);
9205
9206 case BUILT_IN_ADD_OVERFLOW:
9207 case BUILT_IN_SUB_OVERFLOW:
9208 case BUILT_IN_MUL_OVERFLOW:
9209 case BUILT_IN_ADD_OVERFLOW_P:
9210 case BUILT_IN_SUB_OVERFLOW_P:
9211 case BUILT_IN_MUL_OVERFLOW_P:
9212 case BUILT_IN_SADD_OVERFLOW:
9213 case BUILT_IN_SADDL_OVERFLOW:
9214 case BUILT_IN_SADDLL_OVERFLOW:
9215 case BUILT_IN_SSUB_OVERFLOW:
9216 case BUILT_IN_SSUBL_OVERFLOW:
9217 case BUILT_IN_SSUBLL_OVERFLOW:
9218 case BUILT_IN_SMUL_OVERFLOW:
9219 case BUILT_IN_SMULL_OVERFLOW:
9220 case BUILT_IN_SMULLL_OVERFLOW:
9221 case BUILT_IN_UADD_OVERFLOW:
9222 case BUILT_IN_UADDL_OVERFLOW:
9223 case BUILT_IN_UADDLL_OVERFLOW:
9224 case BUILT_IN_USUB_OVERFLOW:
9225 case BUILT_IN_USUBL_OVERFLOW:
9226 case BUILT_IN_USUBLL_OVERFLOW:
9227 case BUILT_IN_UMUL_OVERFLOW:
9228 case BUILT_IN_UMULL_OVERFLOW:
9229 case BUILT_IN_UMULLL_OVERFLOW:
9230 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9231
9232 default:
9233 break;
9234 }
9235 return NULL_TREE;
9236 }
9237
9238 /* Fold a call to built-in function FNDECL. ARGS is an array of NARGS
9239 arguments. IGNORE is true if the result of the
9240 function call is ignored. This function returns NULL_TREE if no
9241 simplification was possible. */
9242
9243 tree
fold_builtin_n(location_t loc,tree fndecl,tree * args,int nargs,bool)9244 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9245 {
9246 tree ret = NULL_TREE;
9247
9248 switch (nargs)
9249 {
9250 case 0:
9251 ret = fold_builtin_0 (loc, fndecl);
9252 break;
9253 case 1:
9254 ret = fold_builtin_1 (loc, fndecl, args[0]);
9255 break;
9256 case 2:
9257 ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9258 break;
9259 case 3:
9260 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9261 break;
9262 default:
9263 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9264 break;
9265 }
9266 if (ret)
9267 {
9268 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9269 SET_EXPR_LOCATION (ret, loc);
9270 TREE_NO_WARNING (ret) = 1;
9271 return ret;
9272 }
9273 return NULL_TREE;
9274 }
9275
9276 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9277 list ARGS along with N new arguments in NEWARGS. SKIP is the number
9278 of arguments in ARGS to be omitted. OLDNARGS is the number of
9279 elements in ARGS. */
9280
9281 static tree
rewrite_call_expr_valist(location_t loc,int oldnargs,tree * args,int skip,tree fndecl,int n,va_list newargs)9282 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9283 int skip, tree fndecl, int n, va_list newargs)
9284 {
9285 int nargs = oldnargs - skip + n;
9286 tree *buffer;
9287
9288 if (n > 0)
9289 {
9290 int i, j;
9291
9292 buffer = XALLOCAVEC (tree, nargs);
9293 for (i = 0; i < n; i++)
9294 buffer[i] = va_arg (newargs, tree);
9295 for (j = skip; j < oldnargs; j++, i++)
9296 buffer[i] = args[j];
9297 }
9298 else
9299 buffer = args + skip;
9300
9301 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9302 }
9303
9304 /* Return true if FNDECL shouldn't be folded right now.
9305 If a built-in function has an inline attribute always_inline
9306 wrapper, defer folding it after always_inline functions have
9307 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9308 might not be performed. */
9309
9310 bool
avoid_folding_inline_builtin(tree fndecl)9311 avoid_folding_inline_builtin (tree fndecl)
9312 {
9313 return (DECL_DECLARED_INLINE_P (fndecl)
9314 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9315 && cfun
9316 && !cfun->always_inline_functions_inlined
9317 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9318 }
9319
9320 /* A wrapper function for builtin folding that prevents warnings for
9321 "statement without effect" and the like, caused by removing the
9322 call node earlier than the warning is generated. */
9323
9324 tree
fold_call_expr(location_t loc,tree exp,bool ignore)9325 fold_call_expr (location_t loc, tree exp, bool ignore)
9326 {
9327 tree ret = NULL_TREE;
9328 tree fndecl = get_callee_fndecl (exp);
9329 if (fndecl
9330 && TREE_CODE (fndecl) == FUNCTION_DECL
9331 && DECL_BUILT_IN (fndecl)
9332 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9333 yet. Defer folding until we see all the arguments
9334 (after inlining). */
9335 && !CALL_EXPR_VA_ARG_PACK (exp))
9336 {
9337 int nargs = call_expr_nargs (exp);
9338
9339 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9340 instead last argument is __builtin_va_arg_pack (). Defer folding
9341 even in that case, until arguments are finalized. */
9342 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9343 {
9344 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9345 if (fndecl2
9346 && TREE_CODE (fndecl2) == FUNCTION_DECL
9347 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9348 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9349 return NULL_TREE;
9350 }
9351
9352 if (avoid_folding_inline_builtin (fndecl))
9353 return NULL_TREE;
9354
9355 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9356 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9357 CALL_EXPR_ARGP (exp), ignore);
9358 else
9359 {
9360 tree *args = CALL_EXPR_ARGP (exp);
9361 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9362 if (ret)
9363 return ret;
9364 }
9365 }
9366 return NULL_TREE;
9367 }
9368
9369 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9370 N arguments are passed in the array ARGARRAY. Return a folded
9371 expression or NULL_TREE if no simplification was possible. */
9372
9373 tree
fold_builtin_call_array(location_t loc,tree,tree fn,int n,tree * argarray)9374 fold_builtin_call_array (location_t loc, tree,
9375 tree fn,
9376 int n,
9377 tree *argarray)
9378 {
9379 if (TREE_CODE (fn) != ADDR_EXPR)
9380 return NULL_TREE;
9381
9382 tree fndecl = TREE_OPERAND (fn, 0);
9383 if (TREE_CODE (fndecl) == FUNCTION_DECL
9384 && DECL_BUILT_IN (fndecl))
9385 {
9386 /* If last argument is __builtin_va_arg_pack (), arguments to this
9387 function are not finalized yet. Defer folding until they are. */
9388 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9389 {
9390 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9391 if (fndecl2
9392 && TREE_CODE (fndecl2) == FUNCTION_DECL
9393 && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
9394 && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
9395 return NULL_TREE;
9396 }
9397 if (avoid_folding_inline_builtin (fndecl))
9398 return NULL_TREE;
9399 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9400 return targetm.fold_builtin (fndecl, n, argarray, false);
9401 else
9402 return fold_builtin_n (loc, fndecl, argarray, n, false);
9403 }
9404
9405 return NULL_TREE;
9406 }
9407
9408 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9409 along with N new arguments specified as the "..." parameters. SKIP
9410 is the number of arguments in EXP to be omitted. This function is used
9411 to do varargs-to-varargs transformations. */
9412
9413 static tree
rewrite_call_expr(location_t loc,tree exp,int skip,tree fndecl,int n,...)9414 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9415 {
9416 va_list ap;
9417 tree t;
9418
9419 va_start (ap, n);
9420 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9421 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9422 va_end (ap);
9423
9424 return t;
9425 }
9426
9427 /* Validate a single argument ARG against a tree code CODE representing
9428 a type. Return true when argument is valid. */
9429
9430 static bool
validate_arg(const_tree arg,enum tree_code code)9431 validate_arg (const_tree arg, enum tree_code code)
9432 {
9433 if (!arg)
9434 return false;
9435 else if (code == POINTER_TYPE)
9436 return POINTER_TYPE_P (TREE_TYPE (arg));
9437 else if (code == INTEGER_TYPE)
9438 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9439 return code == TREE_CODE (TREE_TYPE (arg));
9440 }
9441
9442 /* This function validates the types of a function call argument list
9443 against a specified list of tree_codes. If the last specifier is a 0,
9444 that represents an ellipses, otherwise the last specifier must be a
9445 VOID_TYPE.
9446
9447 This is the GIMPLE version of validate_arglist. Eventually we want to
9448 completely convert builtins.c to work from GIMPLEs and the tree based
9449 validate_arglist will then be removed. */
9450
9451 bool
validate_gimple_arglist(const gcall * call,...)9452 validate_gimple_arglist (const gcall *call, ...)
9453 {
9454 enum tree_code code;
9455 bool res = 0;
9456 va_list ap;
9457 const_tree arg;
9458 size_t i;
9459
9460 va_start (ap, call);
9461 i = 0;
9462
9463 do
9464 {
9465 code = (enum tree_code) va_arg (ap, int);
9466 switch (code)
9467 {
9468 case 0:
9469 /* This signifies an ellipses, any further arguments are all ok. */
9470 res = true;
9471 goto end;
9472 case VOID_TYPE:
9473 /* This signifies an endlink, if no arguments remain, return
9474 true, otherwise return false. */
9475 res = (i == gimple_call_num_args (call));
9476 goto end;
9477 default:
9478 /* If no parameters remain or the parameter's code does not
9479 match the specified code, return false. Otherwise continue
9480 checking any remaining arguments. */
9481 arg = gimple_call_arg (call, i++);
9482 if (!validate_arg (arg, code))
9483 goto end;
9484 break;
9485 }
9486 }
9487 while (1);
9488
9489 /* We need gotos here since we can only have one VA_CLOSE in a
9490 function. */
9491 end: ;
9492 va_end (ap);
9493
9494 return res;
9495 }
9496
9497 /* Default target-specific builtin expander that does nothing. */
9498
9499 rtx
default_expand_builtin(tree exp ATTRIBUTE_UNUSED,rtx target ATTRIBUTE_UNUSED,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)9500 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9501 rtx target ATTRIBUTE_UNUSED,
9502 rtx subtarget ATTRIBUTE_UNUSED,
9503 machine_mode mode ATTRIBUTE_UNUSED,
9504 int ignore ATTRIBUTE_UNUSED)
9505 {
9506 return NULL_RTX;
9507 }
9508
9509 /* Returns true is EXP represents data that would potentially reside
9510 in a readonly section. */
9511
9512 bool
readonly_data_expr(tree exp)9513 readonly_data_expr (tree exp)
9514 {
9515 STRIP_NOPS (exp);
9516
9517 if (TREE_CODE (exp) != ADDR_EXPR)
9518 return false;
9519
9520 exp = get_base_address (TREE_OPERAND (exp, 0));
9521 if (!exp)
9522 return false;
9523
9524 /* Make sure we call decl_readonly_section only for trees it
9525 can handle (since it returns true for everything it doesn't
9526 understand). */
9527 if (TREE_CODE (exp) == STRING_CST
9528 || TREE_CODE (exp) == CONSTRUCTOR
9529 || (VAR_P (exp) && TREE_STATIC (exp)))
9530 return decl_readonly_section (exp, 0);
9531 else
9532 return false;
9533 }
9534
9535 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
9536 to the call, and TYPE is its return type.
9537
9538 Return NULL_TREE if no simplification was possible, otherwise return the
9539 simplified form of the call as a tree.
9540
9541 The simplified form may be a constant or other expression which
9542 computes the same value, but in a more efficient manner (including
9543 calls to other builtin functions).
9544
9545 The call may contain arguments which need to be evaluated, but
9546 which are not useful to determine the result of the call. In
9547 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9548 COMPOUND_EXPR will be an argument which must be evaluated.
9549 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9550 COMPOUND_EXPR in the chain will contain the tree for the simplified
9551 form of the builtin function call. */
9552
9553 static tree
fold_builtin_strpbrk(location_t loc,tree s1,tree s2,tree type)9554 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
9555 {
9556 if (!validate_arg (s1, POINTER_TYPE)
9557 || !validate_arg (s2, POINTER_TYPE))
9558 return NULL_TREE;
9559 else
9560 {
9561 tree fn;
9562 const char *p1, *p2;
9563
9564 p2 = c_getstr (s2);
9565 if (p2 == NULL)
9566 return NULL_TREE;
9567
9568 p1 = c_getstr (s1);
9569 if (p1 != NULL)
9570 {
9571 const char *r = strpbrk (p1, p2);
9572 tree tem;
9573
9574 if (r == NULL)
9575 return build_int_cst (TREE_TYPE (s1), 0);
9576
9577 /* Return an offset into the constant string argument. */
9578 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9579 return fold_convert_loc (loc, type, tem);
9580 }
9581
9582 if (p2[0] == '\0')
9583 /* strpbrk(x, "") == NULL.
9584 Evaluate and ignore s1 in case it had side-effects. */
9585 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9586
9587 if (p2[1] != '\0')
9588 return NULL_TREE; /* Really call strpbrk. */
9589
9590 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9591 if (!fn)
9592 return NULL_TREE;
9593
9594 /* New argument list transforming strpbrk(s1, s2) to
9595 strchr(s1, s2[0]). */
9596 return build_call_expr_loc (loc, fn, 2, s1,
9597 build_int_cst (integer_type_node, p2[0]));
9598 }
9599 }
9600
9601 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
9602 to the call.
9603
9604 Return NULL_TREE if no simplification was possible, otherwise return the
9605 simplified form of the call as a tree.
9606
9607 The simplified form may be a constant or other expression which
9608 computes the same value, but in a more efficient manner (including
9609 calls to other builtin functions).
9610
9611 The call may contain arguments which need to be evaluated, but
9612 which are not useful to determine the result of the call. In
9613 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9614 COMPOUND_EXPR will be an argument which must be evaluated.
9615 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9616 COMPOUND_EXPR in the chain will contain the tree for the simplified
9617 form of the builtin function call. */
9618
9619 static tree
fold_builtin_strspn(location_t loc,tree s1,tree s2)9620 fold_builtin_strspn (location_t loc, tree s1, tree s2)
9621 {
9622 if (!validate_arg (s1, POINTER_TYPE)
9623 || !validate_arg (s2, POINTER_TYPE))
9624 return NULL_TREE;
9625 else
9626 {
9627 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9628
9629 /* If either argument is "", return NULL_TREE. */
9630 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9631 /* Evaluate and ignore both arguments in case either one has
9632 side-effects. */
9633 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9634 s1, s2);
9635 return NULL_TREE;
9636 }
9637 }
9638
9639 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
9640 to the call.
9641
9642 Return NULL_TREE if no simplification was possible, otherwise return the
9643 simplified form of the call as a tree.
9644
9645 The simplified form may be a constant or other expression which
9646 computes the same value, but in a more efficient manner (including
9647 calls to other builtin functions).
9648
9649 The call may contain arguments which need to be evaluated, but
9650 which are not useful to determine the result of the call. In
9651 this case we return a chain of COMPOUND_EXPRs. The LHS of each
9652 COMPOUND_EXPR will be an argument which must be evaluated.
9653 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
9654 COMPOUND_EXPR in the chain will contain the tree for the simplified
9655 form of the builtin function call. */
9656
9657 static tree
fold_builtin_strcspn(location_t loc,tree s1,tree s2)9658 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
9659 {
9660 if (!validate_arg (s1, POINTER_TYPE)
9661 || !validate_arg (s2, POINTER_TYPE))
9662 return NULL_TREE;
9663 else
9664 {
9665 /* If the first argument is "", return NULL_TREE. */
9666 const char *p1 = c_getstr (s1);
9667 if (p1 && *p1 == '\0')
9668 {
9669 /* Evaluate and ignore argument s2 in case it has
9670 side-effects. */
9671 return omit_one_operand_loc (loc, size_type_node,
9672 size_zero_node, s2);
9673 }
9674
9675 /* If the second argument is "", return __builtin_strlen(s1). */
9676 const char *p2 = c_getstr (s2);
9677 if (p2 && *p2 == '\0')
9678 {
9679 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9680
9681 /* If the replacement _DECL isn't initialized, don't do the
9682 transformation. */
9683 if (!fn)
9684 return NULL_TREE;
9685
9686 return build_call_expr_loc (loc, fn, 1, s1);
9687 }
9688 return NULL_TREE;
9689 }
9690 }
9691
9692 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9693 produced. False otherwise. This is done so that we don't output the error
9694 or warning twice or three times. */
9695
9696 bool
fold_builtin_next_arg(tree exp,bool va_start_p)9697 fold_builtin_next_arg (tree exp, bool va_start_p)
9698 {
9699 tree fntype = TREE_TYPE (current_function_decl);
9700 int nargs = call_expr_nargs (exp);
9701 tree arg;
9702 /* There is good chance the current input_location points inside the
9703 definition of the va_start macro (perhaps on the token for
9704 builtin) in a system header, so warnings will not be emitted.
9705 Use the location in real source code. */
9706 source_location current_location =
9707 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9708 NULL);
9709
9710 if (!stdarg_p (fntype))
9711 {
9712 error ("%<va_start%> used in function with fixed args");
9713 return true;
9714 }
9715
9716 if (va_start_p)
9717 {
9718 if (va_start_p && (nargs != 2))
9719 {
9720 error ("wrong number of arguments to function %<va_start%>");
9721 return true;
9722 }
9723 arg = CALL_EXPR_ARG (exp, 1);
9724 }
9725 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9726 when we checked the arguments and if needed issued a warning. */
9727 else
9728 {
9729 if (nargs == 0)
9730 {
9731 /* Evidently an out of date version of <stdarg.h>; can't validate
9732 va_start's second argument, but can still work as intended. */
9733 warning_at (current_location,
9734 OPT_Wvarargs,
9735 "%<__builtin_next_arg%> called without an argument");
9736 return true;
9737 }
9738 else if (nargs > 1)
9739 {
9740 error ("wrong number of arguments to function %<__builtin_next_arg%>");
9741 return true;
9742 }
9743 arg = CALL_EXPR_ARG (exp, 0);
9744 }
9745
9746 if (TREE_CODE (arg) == SSA_NAME)
9747 arg = SSA_NAME_VAR (arg);
9748
9749 /* We destructively modify the call to be __builtin_va_start (ap, 0)
9750 or __builtin_next_arg (0) the first time we see it, after checking
9751 the arguments and if needed issuing a warning. */
9752 if (!integer_zerop (arg))
9753 {
9754 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9755
9756 /* Strip off all nops for the sake of the comparison. This
9757 is not quite the same as STRIP_NOPS. It does more.
9758 We must also strip off INDIRECT_EXPR for C++ reference
9759 parameters. */
9760 while (CONVERT_EXPR_P (arg)
9761 || TREE_CODE (arg) == INDIRECT_REF)
9762 arg = TREE_OPERAND (arg, 0);
9763 if (arg != last_parm)
9764 {
9765 /* FIXME: Sometimes with the tree optimizers we can get the
9766 not the last argument even though the user used the last
9767 argument. We just warn and set the arg to be the last
9768 argument so that we will get wrong-code because of
9769 it. */
9770 warning_at (current_location,
9771 OPT_Wvarargs,
9772 "second parameter of %<va_start%> not last named argument");
9773 }
9774
9775 /* Undefined by C99 7.15.1.4p4 (va_start):
9776 "If the parameter parmN is declared with the register storage
9777 class, with a function or array type, or with a type that is
9778 not compatible with the type that results after application of
9779 the default argument promotions, the behavior is undefined."
9780 */
9781 else if (DECL_REGISTER (arg))
9782 {
9783 warning_at (current_location,
9784 OPT_Wvarargs,
9785 "undefined behavior when second parameter of "
9786 "%<va_start%> is declared with %<register%> storage");
9787 }
9788
9789 /* We want to verify the second parameter just once before the tree
9790 optimizers are run and then avoid keeping it in the tree,
9791 as otherwise we could warn even for correct code like:
9792 void foo (int i, ...)
9793 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
9794 if (va_start_p)
9795 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9796 else
9797 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9798 }
9799 return false;
9800 }
9801
9802
9803 /* Expand a call EXP to __builtin_object_size. */
9804
9805 static rtx
expand_builtin_object_size(tree exp)9806 expand_builtin_object_size (tree exp)
9807 {
9808 tree ost;
9809 int object_size_type;
9810 tree fndecl = get_callee_fndecl (exp);
9811
9812 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9813 {
9814 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
9815 exp, fndecl);
9816 expand_builtin_trap ();
9817 return const0_rtx;
9818 }
9819
9820 ost = CALL_EXPR_ARG (exp, 1);
9821 STRIP_NOPS (ost);
9822
9823 if (TREE_CODE (ost) != INTEGER_CST
9824 || tree_int_cst_sgn (ost) < 0
9825 || compare_tree_int (ost, 3) > 0)
9826 {
9827 error ("%Klast argument of %qD is not integer constant between 0 and 3",
9828 exp, fndecl);
9829 expand_builtin_trap ();
9830 return const0_rtx;
9831 }
9832
9833 object_size_type = tree_to_shwi (ost);
9834
9835 return object_size_type < 2 ? constm1_rtx : const0_rtx;
9836 }
9837
9838 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9839 FCODE is the BUILT_IN_* to use.
9840 Return NULL_RTX if we failed; the caller should emit a normal call,
9841 otherwise try to get the result in TARGET, if convenient (and in
9842 mode MODE if that's convenient). */
9843
9844 static rtx
expand_builtin_memory_chk(tree exp,rtx target,machine_mode mode,enum built_in_function fcode)9845 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9846 enum built_in_function fcode)
9847 {
9848 if (!validate_arglist (exp,
9849 POINTER_TYPE,
9850 fcode == BUILT_IN_MEMSET_CHK
9851 ? INTEGER_TYPE : POINTER_TYPE,
9852 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9853 return NULL_RTX;
9854
9855 tree dest = CALL_EXPR_ARG (exp, 0);
9856 tree src = CALL_EXPR_ARG (exp, 1);
9857 tree len = CALL_EXPR_ARG (exp, 2);
9858 tree size = CALL_EXPR_ARG (exp, 3);
9859
9860 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
9861 /*str=*/NULL_TREE, size);
9862
9863 if (!tree_fits_uhwi_p (size))
9864 return NULL_RTX;
9865
9866 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9867 {
9868 /* Avoid transforming the checking call to an ordinary one when
9869 an overflow has been detected or when the call couldn't be
9870 validated because the size is not constant. */
9871 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
9872 return NULL_RTX;
9873
9874 tree fn = NULL_TREE;
9875 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9876 mem{cpy,pcpy,move,set} is available. */
9877 switch (fcode)
9878 {
9879 case BUILT_IN_MEMCPY_CHK:
9880 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9881 break;
9882 case BUILT_IN_MEMPCPY_CHK:
9883 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9884 break;
9885 case BUILT_IN_MEMMOVE_CHK:
9886 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9887 break;
9888 case BUILT_IN_MEMSET_CHK:
9889 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9890 break;
9891 default:
9892 break;
9893 }
9894
9895 if (! fn)
9896 return NULL_RTX;
9897
9898 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9899 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9900 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9901 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9902 }
9903 else if (fcode == BUILT_IN_MEMSET_CHK)
9904 return NULL_RTX;
9905 else
9906 {
9907 unsigned int dest_align = get_pointer_alignment (dest);
9908
9909 /* If DEST is not a pointer type, call the normal function. */
9910 if (dest_align == 0)
9911 return NULL_RTX;
9912
9913 /* If SRC and DEST are the same (and not volatile), do nothing. */
9914 if (operand_equal_p (src, dest, 0))
9915 {
9916 tree expr;
9917
9918 if (fcode != BUILT_IN_MEMPCPY_CHK)
9919 {
9920 /* Evaluate and ignore LEN in case it has side-effects. */
9921 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9922 return expand_expr (dest, target, mode, EXPAND_NORMAL);
9923 }
9924
9925 expr = fold_build_pointer_plus (dest, len);
9926 return expand_expr (expr, target, mode, EXPAND_NORMAL);
9927 }
9928
9929 /* __memmove_chk special case. */
9930 if (fcode == BUILT_IN_MEMMOVE_CHK)
9931 {
9932 unsigned int src_align = get_pointer_alignment (src);
9933
9934 if (src_align == 0)
9935 return NULL_RTX;
9936
9937 /* If src is categorized for a readonly section we can use
9938 normal __memcpy_chk. */
9939 if (readonly_data_expr (src))
9940 {
9941 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9942 if (!fn)
9943 return NULL_RTX;
9944 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9945 dest, src, len, size);
9946 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9947 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9948 return expand_expr (fn, target, mode, EXPAND_NORMAL);
9949 }
9950 }
9951 return NULL_RTX;
9952 }
9953 }
9954
9955 /* Emit warning if a buffer overflow is detected at compile time. */
9956
9957 static void
maybe_emit_chk_warning(tree exp,enum built_in_function fcode)9958 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9959 {
9960 /* The source string. */
9961 tree srcstr = NULL_TREE;
9962 /* The size of the destination object. */
9963 tree objsize = NULL_TREE;
9964 /* The string that is being concatenated with (as in __strcat_chk)
9965 or null if it isn't. */
9966 tree catstr = NULL_TREE;
9967 /* The maximum length of the source sequence in a bounded operation
9968 (such as __strncat_chk) or null if the operation isn't bounded
9969 (such as __strcat_chk). */
9970 tree maxread = NULL_TREE;
9971 /* The exact size of the access (such as in __strncpy_chk). */
9972 tree size = NULL_TREE;
9973
9974 switch (fcode)
9975 {
9976 case BUILT_IN_STRCPY_CHK:
9977 case BUILT_IN_STPCPY_CHK:
9978 srcstr = CALL_EXPR_ARG (exp, 1);
9979 objsize = CALL_EXPR_ARG (exp, 2);
9980 break;
9981
9982 case BUILT_IN_STRCAT_CHK:
9983 /* For __strcat_chk the warning will be emitted only if overflowing
9984 by at least strlen (dest) + 1 bytes. */
9985 catstr = CALL_EXPR_ARG (exp, 0);
9986 srcstr = CALL_EXPR_ARG (exp, 1);
9987 objsize = CALL_EXPR_ARG (exp, 2);
9988 break;
9989
9990 case BUILT_IN_STRNCAT_CHK:
9991 catstr = CALL_EXPR_ARG (exp, 0);
9992 srcstr = CALL_EXPR_ARG (exp, 1);
9993 maxread = CALL_EXPR_ARG (exp, 2);
9994 objsize = CALL_EXPR_ARG (exp, 3);
9995 break;
9996
9997 case BUILT_IN_STRNCPY_CHK:
9998 case BUILT_IN_STPNCPY_CHK:
9999 srcstr = CALL_EXPR_ARG (exp, 1);
10000 size = CALL_EXPR_ARG (exp, 2);
10001 objsize = CALL_EXPR_ARG (exp, 3);
10002 break;
10003
10004 case BUILT_IN_SNPRINTF_CHK:
10005 case BUILT_IN_VSNPRINTF_CHK:
10006 maxread = CALL_EXPR_ARG (exp, 1);
10007 objsize = CALL_EXPR_ARG (exp, 3);
10008 break;
10009 default:
10010 gcc_unreachable ();
10011 }
10012
10013 if (catstr && maxread)
10014 {
10015 /* Check __strncat_chk. There is no way to determine the length
10016 of the string to which the source string is being appended so
10017 just warn when the length of the source string is not known. */
10018 check_strncat_sizes (exp, objsize);
10019 return;
10020 }
10021
10022 /* The destination argument is the first one for all built-ins above. */
10023 tree dst = CALL_EXPR_ARG (exp, 0);
10024
10025 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10026 }
10027
10028 /* Emit warning if a buffer overflow is detected at compile time
10029 in __sprintf_chk/__vsprintf_chk calls. */
10030
10031 static void
maybe_emit_sprintf_chk_warning(tree exp,enum built_in_function fcode)10032 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10033 {
10034 tree size, len, fmt;
10035 const char *fmt_str;
10036 int nargs = call_expr_nargs (exp);
10037
10038 /* Verify the required arguments in the original call. */
10039
10040 if (nargs < 4)
10041 return;
10042 size = CALL_EXPR_ARG (exp, 2);
10043 fmt = CALL_EXPR_ARG (exp, 3);
10044
10045 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10046 return;
10047
10048 /* Check whether the format is a literal string constant. */
10049 fmt_str = c_getstr (fmt);
10050 if (fmt_str == NULL)
10051 return;
10052
10053 if (!init_target_chars ())
10054 return;
10055
10056 /* If the format doesn't contain % args or %%, we know its size. */
10057 if (strchr (fmt_str, target_percent) == 0)
10058 len = build_int_cstu (size_type_node, strlen (fmt_str));
10059 /* If the format is "%s" and first ... argument is a string literal,
10060 we know it too. */
10061 else if (fcode == BUILT_IN_SPRINTF_CHK
10062 && strcmp (fmt_str, target_percent_s) == 0)
10063 {
10064 tree arg;
10065
10066 if (nargs < 5)
10067 return;
10068 arg = CALL_EXPR_ARG (exp, 4);
10069 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10070 return;
10071
10072 len = c_strlen (arg, 1);
10073 if (!len || ! tree_fits_uhwi_p (len))
10074 return;
10075 }
10076 else
10077 return;
10078
10079 /* Add one for the terminating nul. */
10080 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10081
10082 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10083 /*maxread=*/NULL_TREE, len, size);
10084 }
10085
10086 /* Emit warning if a free is called with address of a variable. */
10087
10088 static void
maybe_emit_free_warning(tree exp)10089 maybe_emit_free_warning (tree exp)
10090 {
10091 tree arg = CALL_EXPR_ARG (exp, 0);
10092
10093 STRIP_NOPS (arg);
10094 if (TREE_CODE (arg) != ADDR_EXPR)
10095 return;
10096
10097 arg = get_base_address (TREE_OPERAND (arg, 0));
10098 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10099 return;
10100
10101 if (SSA_VAR_P (arg))
10102 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10103 "%Kattempt to free a non-heap object %qD", exp, arg);
10104 else
10105 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10106 "%Kattempt to free a non-heap object", exp);
10107 }
10108
10109 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10110 if possible. */
10111
10112 static tree
fold_builtin_object_size(tree ptr,tree ost)10113 fold_builtin_object_size (tree ptr, tree ost)
10114 {
10115 unsigned HOST_WIDE_INT bytes;
10116 int object_size_type;
10117
10118 if (!validate_arg (ptr, POINTER_TYPE)
10119 || !validate_arg (ost, INTEGER_TYPE))
10120 return NULL_TREE;
10121
10122 STRIP_NOPS (ost);
10123
10124 if (TREE_CODE (ost) != INTEGER_CST
10125 || tree_int_cst_sgn (ost) < 0
10126 || compare_tree_int (ost, 3) > 0)
10127 return NULL_TREE;
10128
10129 object_size_type = tree_to_shwi (ost);
10130
10131 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10132 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10133 and (size_t) 0 for types 2 and 3. */
10134 if (TREE_SIDE_EFFECTS (ptr))
10135 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10136
10137 if (TREE_CODE (ptr) == ADDR_EXPR)
10138 {
10139 compute_builtin_object_size (ptr, object_size_type, &bytes);
10140 if (wi::fits_to_tree_p (bytes, size_type_node))
10141 return build_int_cstu (size_type_node, bytes);
10142 }
10143 else if (TREE_CODE (ptr) == SSA_NAME)
10144 {
10145 /* If object size is not known yet, delay folding until
10146 later. Maybe subsequent passes will help determining
10147 it. */
10148 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10149 && wi::fits_to_tree_p (bytes, size_type_node))
10150 return build_int_cstu (size_type_node, bytes);
10151 }
10152
10153 return NULL_TREE;
10154 }
10155
10156 /* Builtins with folding operations that operate on "..." arguments
10157 need special handling; we need to store the arguments in a convenient
10158 data structure before attempting any folding. Fortunately there are
10159 only a few builtins that fall into this category. FNDECL is the
10160 function, EXP is the CALL_EXPR for the call. */
10161
10162 static tree
fold_builtin_varargs(location_t loc,tree fndecl,tree * args,int nargs)10163 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10164 {
10165 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10166 tree ret = NULL_TREE;
10167
10168 switch (fcode)
10169 {
10170 case BUILT_IN_FPCLASSIFY:
10171 ret = fold_builtin_fpclassify (loc, args, nargs);
10172 break;
10173
10174 default:
10175 break;
10176 }
10177 if (ret)
10178 {
10179 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10180 SET_EXPR_LOCATION (ret, loc);
10181 TREE_NO_WARNING (ret) = 1;
10182 return ret;
10183 }
10184 return NULL_TREE;
10185 }
10186
10187 /* Initialize format string characters in the target charset. */
10188
10189 bool
init_target_chars(void)10190 init_target_chars (void)
10191 {
10192 static bool init;
10193 if (!init)
10194 {
10195 target_newline = lang_hooks.to_target_charset ('\n');
10196 target_percent = lang_hooks.to_target_charset ('%');
10197 target_c = lang_hooks.to_target_charset ('c');
10198 target_s = lang_hooks.to_target_charset ('s');
10199 if (target_newline == 0 || target_percent == 0 || target_c == 0
10200 || target_s == 0)
10201 return false;
10202
10203 target_percent_c[0] = target_percent;
10204 target_percent_c[1] = target_c;
10205 target_percent_c[2] = '\0';
10206
10207 target_percent_s[0] = target_percent;
10208 target_percent_s[1] = target_s;
10209 target_percent_s[2] = '\0';
10210
10211 target_percent_s_newline[0] = target_percent;
10212 target_percent_s_newline[1] = target_s;
10213 target_percent_s_newline[2] = target_newline;
10214 target_percent_s_newline[3] = '\0';
10215
10216 init = true;
10217 }
10218 return true;
10219 }
10220
10221 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
10222 and no overflow/underflow occurred. INEXACT is true if M was not
10223 exactly calculated. TYPE is the tree type for the result. This
10224 function assumes that you cleared the MPFR flags and then
10225 calculated M to see if anything subsequently set a flag prior to
10226 entering this function. Return NULL_TREE if any checks fail. */
10227
10228 static tree
do_mpfr_ckconv(mpfr_srcptr m,tree type,int inexact)10229 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10230 {
10231 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10232 overflow/underflow occurred. If -frounding-math, proceed iff the
10233 result of calling FUNC was exact. */
10234 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10235 && (!flag_rounding_math || !inexact))
10236 {
10237 REAL_VALUE_TYPE rr;
10238
10239 real_from_mpfr (&rr, m, type, GMP_RNDN);
10240 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10241 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10242 but the mpft_t is not, then we underflowed in the
10243 conversion. */
10244 if (real_isfinite (&rr)
10245 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10246 {
10247 REAL_VALUE_TYPE rmode;
10248
10249 real_convert (&rmode, TYPE_MODE (type), &rr);
10250 /* Proceed iff the specified mode can hold the value. */
10251 if (real_identical (&rmode, &rr))
10252 return build_real (type, rmode);
10253 }
10254 }
10255 return NULL_TREE;
10256 }
10257
10258 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
10259 number and no overflow/underflow occurred. INEXACT is true if M
10260 was not exactly calculated. TYPE is the tree type for the result.
10261 This function assumes that you cleared the MPFR flags and then
10262 calculated M to see if anything subsequently set a flag prior to
10263 entering this function. Return NULL_TREE if any checks fail, if
10264 FORCE_CONVERT is true, then bypass the checks. */
10265
10266 static tree
do_mpc_ckconv(mpc_srcptr m,tree type,int inexact,int force_convert)10267 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10268 {
10269 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10270 overflow/underflow occurred. If -frounding-math, proceed iff the
10271 result of calling FUNC was exact. */
10272 if (force_convert
10273 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10274 && !mpfr_overflow_p () && !mpfr_underflow_p ()
10275 && (!flag_rounding_math || !inexact)))
10276 {
10277 REAL_VALUE_TYPE re, im;
10278
10279 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10280 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10281 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10282 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
10283 but the mpft_t is not, then we underflowed in the
10284 conversion. */
10285 if (force_convert
10286 || (real_isfinite (&re) && real_isfinite (&im)
10287 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10288 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10289 {
10290 REAL_VALUE_TYPE re_mode, im_mode;
10291
10292 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10293 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10294 /* Proceed iff the specified mode can hold the value. */
10295 if (force_convert
10296 || (real_identical (&re_mode, &re)
10297 && real_identical (&im_mode, &im)))
10298 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10299 build_real (TREE_TYPE (type), im_mode));
10300 }
10301 }
10302 return NULL_TREE;
10303 }
10304
10305 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10306 the pointer *(ARG_QUO) and return the result. The type is taken
10307 from the type of ARG0 and is used for setting the precision of the
10308 calculation and results. */
10309
10310 static tree
do_mpfr_remquo(tree arg0,tree arg1,tree arg_quo)10311 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10312 {
10313 tree const type = TREE_TYPE (arg0);
10314 tree result = NULL_TREE;
10315
10316 STRIP_NOPS (arg0);
10317 STRIP_NOPS (arg1);
10318
10319 /* To proceed, MPFR must exactly represent the target floating point
10320 format, which only happens when the target base equals two. */
10321 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10322 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10323 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10324 {
10325 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10326 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10327
10328 if (real_isfinite (ra0) && real_isfinite (ra1))
10329 {
10330 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10331 const int prec = fmt->p;
10332 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10333 tree result_rem;
10334 long integer_quo;
10335 mpfr_t m0, m1;
10336
10337 mpfr_inits2 (prec, m0, m1, NULL);
10338 mpfr_from_real (m0, ra0, GMP_RNDN);
10339 mpfr_from_real (m1, ra1, GMP_RNDN);
10340 mpfr_clear_flags ();
10341 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10342 /* Remquo is independent of the rounding mode, so pass
10343 inexact=0 to do_mpfr_ckconv(). */
10344 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10345 mpfr_clears (m0, m1, NULL);
10346 if (result_rem)
10347 {
10348 /* MPFR calculates quo in the host's long so it may
10349 return more bits in quo than the target int can hold
10350 if sizeof(host long) > sizeof(target int). This can
10351 happen even for native compilers in LP64 mode. In
10352 these cases, modulo the quo value with the largest
10353 number that the target int can hold while leaving one
10354 bit for the sign. */
10355 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10356 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10357
10358 /* Dereference the quo pointer argument. */
10359 arg_quo = build_fold_indirect_ref (arg_quo);
10360 /* Proceed iff a valid pointer type was passed in. */
10361 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10362 {
10363 /* Set the value. */
10364 tree result_quo
10365 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10366 build_int_cst (TREE_TYPE (arg_quo),
10367 integer_quo));
10368 TREE_SIDE_EFFECTS (result_quo) = 1;
10369 /* Combine the quo assignment with the rem. */
10370 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10371 result_quo, result_rem));
10372 }
10373 }
10374 }
10375 }
10376 return result;
10377 }
10378
10379 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10380 resulting value as a tree with type TYPE. The mpfr precision is
10381 set to the precision of TYPE. We assume that this mpfr function
10382 returns zero if the result could be calculated exactly within the
10383 requested precision. In addition, the integer pointer represented
10384 by ARG_SG will be dereferenced and set to the appropriate signgam
10385 (-1,1) value. */
10386
10387 static tree
do_mpfr_lgamma_r(tree arg,tree arg_sg,tree type)10388 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10389 {
10390 tree result = NULL_TREE;
10391
10392 STRIP_NOPS (arg);
10393
10394 /* To proceed, MPFR must exactly represent the target floating point
10395 format, which only happens when the target base equals two. Also
10396 verify ARG is a constant and that ARG_SG is an int pointer. */
10397 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10398 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10399 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10400 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10401 {
10402 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10403
10404 /* In addition to NaN and Inf, the argument cannot be zero or a
10405 negative integer. */
10406 if (real_isfinite (ra)
10407 && ra->cl != rvc_zero
10408 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10409 {
10410 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10411 const int prec = fmt->p;
10412 const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10413 int inexact, sg;
10414 mpfr_t m;
10415 tree result_lg;
10416
10417 mpfr_init2 (m, prec);
10418 mpfr_from_real (m, ra, GMP_RNDN);
10419 mpfr_clear_flags ();
10420 inexact = mpfr_lgamma (m, &sg, m, rnd);
10421 result_lg = do_mpfr_ckconv (m, type, inexact);
10422 mpfr_clear (m);
10423 if (result_lg)
10424 {
10425 tree result_sg;
10426
10427 /* Dereference the arg_sg pointer argument. */
10428 arg_sg = build_fold_indirect_ref (arg_sg);
10429 /* Assign the signgam value into *arg_sg. */
10430 result_sg = fold_build2 (MODIFY_EXPR,
10431 TREE_TYPE (arg_sg), arg_sg,
10432 build_int_cst (TREE_TYPE (arg_sg), sg));
10433 TREE_SIDE_EFFECTS (result_sg) = 1;
10434 /* Combine the signgam assignment with the lgamma result. */
10435 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10436 result_sg, result_lg));
10437 }
10438 }
10439 }
10440
10441 return result;
10442 }
10443
10444 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10445 mpc function FUNC on it and return the resulting value as a tree
10446 with type TYPE. The mpfr precision is set to the precision of
10447 TYPE. We assume that function FUNC returns zero if the result
10448 could be calculated exactly within the requested precision. If
10449 DO_NONFINITE is true, then fold expressions containing Inf or NaN
10450 in the arguments and/or results. */
10451
10452 tree
do_mpc_arg2(tree arg0,tree arg1,tree type,int do_nonfinite,int (* func)(mpc_ptr,mpc_srcptr,mpc_srcptr,mpc_rnd_t))10453 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10454 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10455 {
10456 tree result = NULL_TREE;
10457
10458 STRIP_NOPS (arg0);
10459 STRIP_NOPS (arg1);
10460
10461 /* To proceed, MPFR must exactly represent the target floating point
10462 format, which only happens when the target base equals two. */
10463 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10464 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10465 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10466 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10467 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10468 {
10469 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10470 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10471 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10472 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10473
10474 if (do_nonfinite
10475 || (real_isfinite (re0) && real_isfinite (im0)
10476 && real_isfinite (re1) && real_isfinite (im1)))
10477 {
10478 const struct real_format *const fmt =
10479 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10480 const int prec = fmt->p;
10481 const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
10482 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10483 int inexact;
10484 mpc_t m0, m1;
10485
10486 mpc_init2 (m0, prec);
10487 mpc_init2 (m1, prec);
10488 mpfr_from_real (mpc_realref (m0), re0, rnd);
10489 mpfr_from_real (mpc_imagref (m0), im0, rnd);
10490 mpfr_from_real (mpc_realref (m1), re1, rnd);
10491 mpfr_from_real (mpc_imagref (m1), im1, rnd);
10492 mpfr_clear_flags ();
10493 inexact = func (m0, m0, m1, crnd);
10494 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10495 mpc_clear (m0);
10496 mpc_clear (m1);
10497 }
10498 }
10499
10500 return result;
10501 }
10502
10503 /* A wrapper function for builtin folding that prevents warnings for
10504 "statement without effect" and the like, caused by removing the
10505 call node earlier than the warning is generated. */
10506
10507 tree
fold_call_stmt(gcall * stmt,bool ignore)10508 fold_call_stmt (gcall *stmt, bool ignore)
10509 {
10510 tree ret = NULL_TREE;
10511 tree fndecl = gimple_call_fndecl (stmt);
10512 location_t loc = gimple_location (stmt);
10513 if (fndecl
10514 && TREE_CODE (fndecl) == FUNCTION_DECL
10515 && DECL_BUILT_IN (fndecl)
10516 && !gimple_call_va_arg_pack_p (stmt))
10517 {
10518 int nargs = gimple_call_num_args (stmt);
10519 tree *args = (nargs > 0
10520 ? gimple_call_arg_ptr (stmt, 0)
10521 : &error_mark_node);
10522
10523 if (avoid_folding_inline_builtin (fndecl))
10524 return NULL_TREE;
10525 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10526 {
10527 return targetm.fold_builtin (fndecl, nargs, args, ignore);
10528 }
10529 else
10530 {
10531 ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
10532 if (ret)
10533 {
10534 /* Propagate location information from original call to
10535 expansion of builtin. Otherwise things like
10536 maybe_emit_chk_warning, that operate on the expansion
10537 of a builtin, will use the wrong location information. */
10538 if (gimple_has_location (stmt))
10539 {
10540 tree realret = ret;
10541 if (TREE_CODE (ret) == NOP_EXPR)
10542 realret = TREE_OPERAND (ret, 0);
10543 if (CAN_HAVE_LOCATION_P (realret)
10544 && !EXPR_HAS_LOCATION (realret))
10545 SET_EXPR_LOCATION (realret, loc);
10546 return realret;
10547 }
10548 return ret;
10549 }
10550 }
10551 }
10552 return NULL_TREE;
10553 }
10554
10555 /* Look up the function in builtin_decl that corresponds to DECL
10556 and set ASMSPEC as its user assembler name. DECL must be a
10557 function decl that declares a builtin. */
10558
10559 void
set_builtin_user_assembler_name(tree decl,const char * asmspec)10560 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10561 {
10562 gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
10563 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
10564 && asmspec != 0);
10565
10566 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10567 set_user_assembler_name (builtin, asmspec);
10568
10569 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10570 && INT_TYPE_SIZE < BITS_PER_WORD)
10571 {
10572 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10573 set_user_assembler_libfunc ("ffs", asmspec);
10574 set_optab_libfunc (ffs_optab, mode, "ffs");
10575 }
10576 }
10577
10578 /* Return true if DECL is a builtin that expands to a constant or similarly
10579 simple code. */
10580 bool
is_simple_builtin(tree decl)10581 is_simple_builtin (tree decl)
10582 {
10583 if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10584 switch (DECL_FUNCTION_CODE (decl))
10585 {
10586 /* Builtins that expand to constants. */
10587 case BUILT_IN_CONSTANT_P:
10588 case BUILT_IN_EXPECT:
10589 case BUILT_IN_OBJECT_SIZE:
10590 case BUILT_IN_UNREACHABLE:
10591 /* Simple register moves or loads from stack. */
10592 case BUILT_IN_ASSUME_ALIGNED:
10593 case BUILT_IN_RETURN_ADDRESS:
10594 case BUILT_IN_EXTRACT_RETURN_ADDR:
10595 case BUILT_IN_FROB_RETURN_ADDR:
10596 case BUILT_IN_RETURN:
10597 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10598 case BUILT_IN_FRAME_ADDRESS:
10599 case BUILT_IN_VA_END:
10600 case BUILT_IN_STACK_SAVE:
10601 case BUILT_IN_STACK_RESTORE:
10602 /* Exception state returns or moves registers around. */
10603 case BUILT_IN_EH_FILTER:
10604 case BUILT_IN_EH_POINTER:
10605 case BUILT_IN_EH_COPY_VALUES:
10606 return true;
10607
10608 default:
10609 return false;
10610 }
10611
10612 return false;
10613 }
10614
10615 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10616 most probably expanded inline into reasonably simple code. This is a
10617 superset of is_simple_builtin. */
10618 bool
is_inexpensive_builtin(tree decl)10619 is_inexpensive_builtin (tree decl)
10620 {
10621 if (!decl)
10622 return false;
10623 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10624 return true;
10625 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10626 switch (DECL_FUNCTION_CODE (decl))
10627 {
10628 case BUILT_IN_ABS:
10629 CASE_BUILT_IN_ALLOCA:
10630 case BUILT_IN_BSWAP16:
10631 case BUILT_IN_BSWAP32:
10632 case BUILT_IN_BSWAP64:
10633 case BUILT_IN_CLZ:
10634 case BUILT_IN_CLZIMAX:
10635 case BUILT_IN_CLZL:
10636 case BUILT_IN_CLZLL:
10637 case BUILT_IN_CTZ:
10638 case BUILT_IN_CTZIMAX:
10639 case BUILT_IN_CTZL:
10640 case BUILT_IN_CTZLL:
10641 case BUILT_IN_FFS:
10642 case BUILT_IN_FFSIMAX:
10643 case BUILT_IN_FFSL:
10644 case BUILT_IN_FFSLL:
10645 case BUILT_IN_IMAXABS:
10646 case BUILT_IN_FINITE:
10647 case BUILT_IN_FINITEF:
10648 case BUILT_IN_FINITEL:
10649 case BUILT_IN_FINITED32:
10650 case BUILT_IN_FINITED64:
10651 case BUILT_IN_FINITED128:
10652 case BUILT_IN_FPCLASSIFY:
10653 case BUILT_IN_ISFINITE:
10654 case BUILT_IN_ISINF_SIGN:
10655 case BUILT_IN_ISINF:
10656 case BUILT_IN_ISINFF:
10657 case BUILT_IN_ISINFL:
10658 case BUILT_IN_ISINFD32:
10659 case BUILT_IN_ISINFD64:
10660 case BUILT_IN_ISINFD128:
10661 case BUILT_IN_ISNAN:
10662 case BUILT_IN_ISNANF:
10663 case BUILT_IN_ISNANL:
10664 case BUILT_IN_ISNAND32:
10665 case BUILT_IN_ISNAND64:
10666 case BUILT_IN_ISNAND128:
10667 case BUILT_IN_ISNORMAL:
10668 case BUILT_IN_ISGREATER:
10669 case BUILT_IN_ISGREATEREQUAL:
10670 case BUILT_IN_ISLESS:
10671 case BUILT_IN_ISLESSEQUAL:
10672 case BUILT_IN_ISLESSGREATER:
10673 case BUILT_IN_ISUNORDERED:
10674 case BUILT_IN_VA_ARG_PACK:
10675 case BUILT_IN_VA_ARG_PACK_LEN:
10676 case BUILT_IN_VA_COPY:
10677 case BUILT_IN_TRAP:
10678 case BUILT_IN_SAVEREGS:
10679 case BUILT_IN_POPCOUNTL:
10680 case BUILT_IN_POPCOUNTLL:
10681 case BUILT_IN_POPCOUNTIMAX:
10682 case BUILT_IN_POPCOUNT:
10683 case BUILT_IN_PARITYL:
10684 case BUILT_IN_PARITYLL:
10685 case BUILT_IN_PARITYIMAX:
10686 case BUILT_IN_PARITY:
10687 case BUILT_IN_LABS:
10688 case BUILT_IN_LLABS:
10689 case BUILT_IN_PREFETCH:
10690 case BUILT_IN_ACC_ON_DEVICE:
10691 return true;
10692
10693 default:
10694 return is_simple_builtin (decl);
10695 }
10696
10697 return false;
10698 }
10699
10700 /* Return true if T is a constant and the value cast to a target char
10701 can be represented by a host char.
10702 Store the casted char constant in *P if so. */
10703
10704 bool
target_char_cst_p(tree t,char * p)10705 target_char_cst_p (tree t, char *p)
10706 {
10707 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10708 return false;
10709
10710 *p = (char)tree_to_uhwi (t);
10711 return true;
10712 }
10713
10714 /* Return the maximum object size. */
10715
10716 tree
max_object_size(void)10717 max_object_size (void)
10718 {
10719 /* To do: Make this a configurable parameter. */
10720 return TYPE_MAX_VALUE (ptrdiff_type_node);
10721 }
10722