1 /* Expand builtin functions.
2 Copyright (C) 1988-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76
77 struct target_builtins default_target_builtins;
78 #if SWITCHABLE_TARGET
79 struct target_builtins *this_target_builtins = &default_target_builtins;
80 #endif
81
82 /* Define the names of the builtin function types and codes. */
83 const char *const built_in_class_names[BUILT_IN_LAST]
84 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
85
86 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
87 const char * built_in_names[(int) END_BUILTINS] =
88 {
89 #include "builtins.def"
90 };
91
92 /* Setup an array of builtin_info_type, make sure each element decl is
93 initialized to NULL_TREE. */
94 builtin_info_type builtin_info[(int)END_BUILTINS];
95
96 /* Non-zero if __builtin_constant_p should be folded right away. */
97 bool force_folding_builtin_constant_p;
98
99 static int target_char_cast (tree, char *);
100 static rtx get_memory_rtx (tree, tree);
101 static int apply_args_size (void);
102 static int apply_result_size (void);
103 static rtx result_vector (int, rtx);
104 static void expand_builtin_prefetch (tree);
105 static rtx expand_builtin_apply_args (void);
106 static rtx expand_builtin_apply_args_1 (void);
107 static rtx expand_builtin_apply (rtx, rtx, rtx);
108 static void expand_builtin_return (rtx);
109 static enum type_class type_to_class (tree);
110 static rtx expand_builtin_classify_type (tree);
111 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
112 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
113 static rtx expand_builtin_interclass_mathfn (tree, rtx);
114 static rtx expand_builtin_sincos (tree);
115 static rtx expand_builtin_cexpi (tree, rtx);
116 static rtx expand_builtin_int_roundingfn (tree, rtx);
117 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
118 static rtx expand_builtin_next_arg (void);
119 static rtx expand_builtin_va_start (tree);
120 static rtx expand_builtin_va_end (tree);
121 static rtx expand_builtin_va_copy (tree);
122 static rtx inline_expand_builtin_bytecmp (tree, rtx);
123 static rtx expand_builtin_strcmp (tree, rtx);
124 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
125 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
126 static rtx expand_builtin_memchr (tree, rtx);
127 static rtx expand_builtin_memcpy (tree, rtx);
128 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
129 rtx target, tree exp,
130 memop_ret retmode,
131 bool might_overlap);
132 static rtx expand_builtin_memmove (tree, rtx);
133 static rtx expand_builtin_mempcpy (tree, rtx);
134 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
135 static rtx expand_builtin_strcat (tree);
136 static rtx expand_builtin_strcpy (tree, rtx);
137 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
138 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
139 static rtx expand_builtin_stpncpy (tree, rtx);
140 static rtx expand_builtin_strncat (tree, rtx);
141 static rtx expand_builtin_strncpy (tree, rtx);
142 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
143 static rtx expand_builtin_memset (tree, rtx, machine_mode);
144 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
145 static rtx expand_builtin_bzero (tree);
146 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
147 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
148 static rtx expand_builtin_alloca (tree);
149 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
150 static rtx expand_builtin_frame_address (tree, tree);
151 static tree stabilize_va_list_loc (location_t, tree, int);
152 static rtx expand_builtin_expect (tree, rtx);
153 static rtx expand_builtin_expect_with_probability (tree, rtx);
154 static tree fold_builtin_constant_p (tree);
155 static tree fold_builtin_classify_type (tree);
156 static tree fold_builtin_strlen (location_t, tree, tree);
157 static tree fold_builtin_inf (location_t, tree, int);
158 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
159 static bool validate_arg (const_tree, enum tree_code code);
160 static rtx expand_builtin_fabs (tree, rtx, rtx);
161 static rtx expand_builtin_signbit (tree, rtx);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_isascii (location_t, tree);
164 static tree fold_builtin_toascii (location_t, tree);
165 static tree fold_builtin_isdigit (location_t, tree);
166 static tree fold_builtin_fabs (location_t, tree, tree);
167 static tree fold_builtin_abs (location_t, tree, tree);
168 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
169 enum tree_code);
170 static tree fold_builtin_varargs (location_t, tree, tree*, int);
171
172 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_strspn (location_t, tree, tree, tree);
174 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
175
176 static rtx expand_builtin_object_size (tree);
177 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 enum built_in_function);
179 static void maybe_emit_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181 static void maybe_emit_free_warning (tree);
182 static tree fold_builtin_object_size (tree, tree);
183
184 unsigned HOST_WIDE_INT target_newline;
185 unsigned HOST_WIDE_INT target_percent;
186 static unsigned HOST_WIDE_INT target_c;
187 static unsigned HOST_WIDE_INT target_s;
188 char target_percent_c[3];
189 char target_percent_s[3];
190 char target_percent_s_newline[4];
191 static tree do_mpfr_remquo (tree, tree, tree);
192 static tree do_mpfr_lgamma_r (tree, tree, tree);
193 static void expand_builtin_sync_synchronize (void);
194
195 /* Return true if NAME starts with __builtin_ or __sync_. */
196
197 static bool
is_builtin_name(const char * name)198 is_builtin_name (const char *name)
199 {
200 if (strncmp (name, "__builtin_", 10) == 0)
201 return true;
202 if (strncmp (name, "__sync_", 7) == 0)
203 return true;
204 if (strncmp (name, "__atomic_", 9) == 0)
205 return true;
206 return false;
207 }
208
209 /* Return true if NODE should be considered for inline expansion regardless
210 of the optimization level. This means whenever a function is invoked with
211 its "internal" name, which normally contains the prefix "__builtin". */
212
213 bool
called_as_built_in(tree node)214 called_as_built_in (tree node)
215 {
216 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
217 we want the name used to call the function, not the name it
218 will have. */
219 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
220 return is_builtin_name (name);
221 }
222
223 /* Compute values M and N such that M divides (address of EXP - N) and such
224 that N < M. If these numbers can be determined, store M in alignp and N in
225 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
226 *alignp and any bit-offset to *bitposp.
227
228 Note that the address (and thus the alignment) computed here is based
229 on the address to which a symbol resolves, whereas DECL_ALIGN is based
230 on the address at which an object is actually located. These two
231 addresses are not always the same. For example, on ARM targets,
232 the address &foo of a Thumb function foo() has the lowest bit set,
233 whereas foo() itself starts on an even address.
234
235 If ADDR_P is true we are taking the address of the memory reference EXP
236 and thus cannot rely on the access taking place. */
237
238 static bool
get_object_alignment_2(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp,bool addr_p)239 get_object_alignment_2 (tree exp, unsigned int *alignp,
240 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
241 {
242 poly_int64 bitsize, bitpos;
243 tree offset;
244 machine_mode mode;
245 int unsignedp, reversep, volatilep;
246 unsigned int align = BITS_PER_UNIT;
247 bool known_alignment = false;
248
249 /* Get the innermost object and the constant (bitpos) and possibly
250 variable (offset) offset of the access. */
251 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
252 &unsignedp, &reversep, &volatilep);
253
254 /* Extract alignment information from the innermost object and
255 possibly adjust bitpos and offset. */
256 if (TREE_CODE (exp) == FUNCTION_DECL)
257 {
258 /* Function addresses can encode extra information besides their
259 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
260 allows the low bit to be used as a virtual bit, we know
261 that the address itself must be at least 2-byte aligned. */
262 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
263 align = 2 * BITS_PER_UNIT;
264 }
265 else if (TREE_CODE (exp) == LABEL_DECL)
266 ;
267 else if (TREE_CODE (exp) == CONST_DECL)
268 {
269 /* The alignment of a CONST_DECL is determined by its initializer. */
270 exp = DECL_INITIAL (exp);
271 align = TYPE_ALIGN (TREE_TYPE (exp));
272 if (CONSTANT_CLASS_P (exp))
273 align = targetm.constant_alignment (exp, align);
274
275 known_alignment = true;
276 }
277 else if (DECL_P (exp))
278 {
279 align = DECL_ALIGN (exp);
280 known_alignment = true;
281 }
282 else if (TREE_CODE (exp) == INDIRECT_REF
283 || TREE_CODE (exp) == MEM_REF
284 || TREE_CODE (exp) == TARGET_MEM_REF)
285 {
286 tree addr = TREE_OPERAND (exp, 0);
287 unsigned ptr_align;
288 unsigned HOST_WIDE_INT ptr_bitpos;
289 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
290
291 /* If the address is explicitely aligned, handle that. */
292 if (TREE_CODE (addr) == BIT_AND_EXPR
293 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
294 {
295 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
296 ptr_bitmask *= BITS_PER_UNIT;
297 align = least_bit_hwi (ptr_bitmask);
298 addr = TREE_OPERAND (addr, 0);
299 }
300
301 known_alignment
302 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
303 align = MAX (ptr_align, align);
304
305 /* Re-apply explicit alignment to the bitpos. */
306 ptr_bitpos &= ptr_bitmask;
307
308 /* The alignment of the pointer operand in a TARGET_MEM_REF
309 has to take the variable offset parts into account. */
310 if (TREE_CODE (exp) == TARGET_MEM_REF)
311 {
312 if (TMR_INDEX (exp))
313 {
314 unsigned HOST_WIDE_INT step = 1;
315 if (TMR_STEP (exp))
316 step = TREE_INT_CST_LOW (TMR_STEP (exp));
317 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
318 }
319 if (TMR_INDEX2 (exp))
320 align = BITS_PER_UNIT;
321 known_alignment = false;
322 }
323
324 /* When EXP is an actual memory reference then we can use
325 TYPE_ALIGN of a pointer indirection to derive alignment.
326 Do so only if get_pointer_alignment_1 did not reveal absolute
327 alignment knowledge and if using that alignment would
328 improve the situation. */
329 unsigned int talign;
330 if (!addr_p && !known_alignment
331 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
332 && talign > align)
333 align = talign;
334 else
335 {
336 /* Else adjust bitpos accordingly. */
337 bitpos += ptr_bitpos;
338 if (TREE_CODE (exp) == MEM_REF
339 || TREE_CODE (exp) == TARGET_MEM_REF)
340 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
341 }
342 }
343 else if (TREE_CODE (exp) == STRING_CST)
344 {
345 /* STRING_CST are the only constant objects we allow to be not
346 wrapped inside a CONST_DECL. */
347 align = TYPE_ALIGN (TREE_TYPE (exp));
348 if (CONSTANT_CLASS_P (exp))
349 align = targetm.constant_alignment (exp, align);
350
351 known_alignment = true;
352 }
353
354 /* If there is a non-constant offset part extract the maximum
355 alignment that can prevail. */
356 if (offset)
357 {
358 unsigned int trailing_zeros = tree_ctz (offset);
359 if (trailing_zeros < HOST_BITS_PER_INT)
360 {
361 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
362 if (inner)
363 align = MIN (align, inner);
364 }
365 }
366
367 /* Account for the alignment of runtime coefficients, so that the constant
368 bitpos is guaranteed to be accurate. */
369 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
370 if (alt_align != 0 && alt_align < align)
371 {
372 align = alt_align;
373 known_alignment = false;
374 }
375
376 *alignp = align;
377 *bitposp = bitpos.coeffs[0] & (align - 1);
378 return known_alignment;
379 }
380
381 /* For a memory reference expression EXP compute values M and N such that M
382 divides (&EXP - N) and such that N < M. If these numbers can be determined,
383 store M in alignp and N in *BITPOSP and return true. Otherwise return false
384 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
385
386 bool
get_object_alignment_1(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp)387 get_object_alignment_1 (tree exp, unsigned int *alignp,
388 unsigned HOST_WIDE_INT *bitposp)
389 {
390 return get_object_alignment_2 (exp, alignp, bitposp, false);
391 }
392
393 /* Return the alignment in bits of EXP, an object. */
394
395 unsigned int
get_object_alignment(tree exp)396 get_object_alignment (tree exp)
397 {
398 unsigned HOST_WIDE_INT bitpos = 0;
399 unsigned int align;
400
401 get_object_alignment_1 (exp, &align, &bitpos);
402
403 /* align and bitpos now specify known low bits of the pointer.
404 ptr & (align - 1) == bitpos. */
405
406 if (bitpos != 0)
407 align = least_bit_hwi (bitpos);
408 return align;
409 }
410
411 /* For a pointer valued expression EXP compute values M and N such that M
412 divides (EXP - N) and such that N < M. If these numbers can be determined,
413 store M in alignp and N in *BITPOSP and return true. Return false if
414 the results are just a conservative approximation.
415
416 If EXP is not a pointer, false is returned too. */
417
418 bool
get_pointer_alignment_1(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp)419 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
420 unsigned HOST_WIDE_INT *bitposp)
421 {
422 STRIP_NOPS (exp);
423
424 if (TREE_CODE (exp) == ADDR_EXPR)
425 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
426 alignp, bitposp, true);
427 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
428 {
429 unsigned int align;
430 unsigned HOST_WIDE_INT bitpos;
431 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
432 &align, &bitpos);
433 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
434 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
435 else
436 {
437 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
438 if (trailing_zeros < HOST_BITS_PER_INT)
439 {
440 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
441 if (inner)
442 align = MIN (align, inner);
443 }
444 }
445 *alignp = align;
446 *bitposp = bitpos & (align - 1);
447 return res;
448 }
449 else if (TREE_CODE (exp) == SSA_NAME
450 && POINTER_TYPE_P (TREE_TYPE (exp)))
451 {
452 unsigned int ptr_align, ptr_misalign;
453 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
454
455 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
456 {
457 *bitposp = ptr_misalign * BITS_PER_UNIT;
458 *alignp = ptr_align * BITS_PER_UNIT;
459 /* Make sure to return a sensible alignment when the multiplication
460 by BITS_PER_UNIT overflowed. */
461 if (*alignp == 0)
462 *alignp = 1u << (HOST_BITS_PER_INT - 1);
463 /* We cannot really tell whether this result is an approximation. */
464 return false;
465 }
466 else
467 {
468 *bitposp = 0;
469 *alignp = BITS_PER_UNIT;
470 return false;
471 }
472 }
473 else if (TREE_CODE (exp) == INTEGER_CST)
474 {
475 *alignp = BIGGEST_ALIGNMENT;
476 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
477 & (BIGGEST_ALIGNMENT - 1));
478 return true;
479 }
480
481 *bitposp = 0;
482 *alignp = BITS_PER_UNIT;
483 return false;
484 }
485
486 /* Return the alignment in bits of EXP, a pointer valued expression.
487 The alignment returned is, by default, the alignment of the thing that
488 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
489
490 Otherwise, look at the expression to see if we can do better, i.e., if the
491 expression is actually pointing at an object whose alignment is tighter. */
492
493 unsigned int
get_pointer_alignment(tree exp)494 get_pointer_alignment (tree exp)
495 {
496 unsigned HOST_WIDE_INT bitpos = 0;
497 unsigned int align;
498
499 get_pointer_alignment_1 (exp, &align, &bitpos);
500
501 /* align and bitpos now specify known low bits of the pointer.
502 ptr & (align - 1) == bitpos. */
503
504 if (bitpos != 0)
505 align = least_bit_hwi (bitpos);
506
507 return align;
508 }
509
510 /* Return the number of leading non-zero elements in the sequence
511 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
512 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
513
514 unsigned
string_length(const void * ptr,unsigned eltsize,unsigned maxelts)515 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
516 {
517 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
518
519 unsigned n;
520
521 if (eltsize == 1)
522 {
523 /* Optimize the common case of plain char. */
524 for (n = 0; n < maxelts; n++)
525 {
526 const char *elt = (const char*) ptr + n;
527 if (!*elt)
528 break;
529 }
530 }
531 else
532 {
533 for (n = 0; n < maxelts; n++)
534 {
535 const char *elt = (const char*) ptr + n * eltsize;
536 if (!memcmp (elt, "\0\0\0\0", eltsize))
537 break;
538 }
539 }
540 return n;
541 }
542
543 /* For a call at LOC to a function FN that expects a string in the argument
544 ARG, issue a diagnostic due to it being a called with an argument
545 declared at NONSTR that is a character array with no terminating NUL. */
546
547 void
warn_string_no_nul(location_t loc,const char * fn,tree arg,tree decl)548 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
549 {
550 if (TREE_NO_WARNING (arg))
551 return;
552
553 loc = expansion_point_location_if_in_system_header (loc);
554
555 if (warning_at (loc, OPT_Wstringop_overflow_,
556 "%qs argument missing terminating nul", fn))
557 {
558 inform (DECL_SOURCE_LOCATION (decl),
559 "referenced argument declared here");
560 TREE_NO_WARNING (arg) = 1;
561 }
562 }
563
564 /* For a call EXPR (which may be null) that expects a string argument
565 and SRC as the argument, returns false if SRC is a character array
566 with no terminating NUL. When nonnull, BOUND is the number of
567 characters in which to expect the terminating NUL.
568 When EXPR is nonnull also issues a warning. */
569
570 bool
check_nul_terminated_array(tree expr,tree src,tree bound)571 check_nul_terminated_array (tree expr, tree src, tree bound /* = NULL_TREE */)
572 {
573 tree size;
574 bool exact;
575 tree nonstr = unterminated_array (src, &size, &exact);
576 if (!nonstr)
577 return true;
578
579 /* NONSTR refers to the non-nul terminated constant array and SIZE
580 is the constant size of the array in bytes. EXACT is true when
581 SIZE is exact. */
582
583 if (bound)
584 {
585 wide_int min, max;
586 if (TREE_CODE (bound) == INTEGER_CST)
587 min = max = wi::to_wide (bound);
588 else
589 {
590 value_range_kind rng = get_range_info (bound, &min, &max);
591 if (rng != VR_RANGE)
592 return true;
593 }
594
595 if (wi::leu_p (min, wi::to_wide (size)))
596 return true;
597 }
598
599 if (expr && !TREE_NO_WARNING (expr))
600 {
601 tree fndecl = get_callee_fndecl (expr);
602 const char *fname = IDENTIFIER_POINTER (DECL_NAME (fndecl));
603 warn_string_no_nul (EXPR_LOCATION (expr), fname, src, nonstr);
604 }
605
606 return false;
607 }
608
609 /* If EXP refers to an unterminated constant character array return
610 the declaration of the object of which the array is a member or
611 element and if SIZE is not null, set *SIZE to the size of
612 the unterminated array and set *EXACT if the size is exact or
613 clear it otherwise. Otherwise return null. */
614
615 tree
unterminated_array(tree exp,tree * size,bool * exact)616 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
617 {
618 /* C_STRLEN will return NULL and set DECL in the info
619 structure if EXP references a unterminated array. */
620 c_strlen_data lendata = { };
621 tree len = c_strlen (exp, 1, &lendata);
622 if (len == NULL_TREE && lendata.minlen && lendata.decl)
623 {
624 if (size)
625 {
626 len = lendata.minlen;
627 if (lendata.off)
628 {
629 /* Constant offsets are already accounted for in LENDATA.MINLEN,
630 but not in a SSA_NAME + CST expression. */
631 if (TREE_CODE (lendata.off) == INTEGER_CST)
632 *exact = true;
633 else if (TREE_CODE (lendata.off) == PLUS_EXPR
634 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
635 {
636 /* Subtract the offset from the size of the array. */
637 *exact = false;
638 tree temp = TREE_OPERAND (lendata.off, 1);
639 temp = fold_convert (ssizetype, temp);
640 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
641 }
642 else
643 *exact = false;
644 }
645 else
646 *exact = true;
647
648 *size = len;
649 }
650 return lendata.decl;
651 }
652
653 return NULL_TREE;
654 }
655
656 /* Compute the length of a null-terminated character string or wide
657 character string handling character sizes of 1, 2, and 4 bytes.
658 TREE_STRING_LENGTH is not the right way because it evaluates to
659 the size of the character array in bytes (as opposed to characters)
660 and because it can contain a zero byte in the middle.
661
662 ONLY_VALUE should be nonzero if the result is not going to be emitted
663 into the instruction stream and zero if it is going to be expanded.
664 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
665 is returned, otherwise NULL, since
666 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
667 evaluate the side-effects.
668
669 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
670 accesses. Note that this implies the result is not going to be emitted
671 into the instruction stream.
672
673 Additional information about the string accessed may be recorded
674 in DATA. For example, if ARG references an unterminated string,
675 then the declaration will be stored in the DECL field. If the
676 length of the unterminated string can be determined, it'll be
677 stored in the LEN field. Note this length could well be different
678 than what a C strlen call would return.
679
680 ELTSIZE is 1 for normal single byte character strings, and 2 or
681 4 for wide characer strings. ELTSIZE is by default 1.
682
683 The value returned is of type `ssizetype'. */
684
685 tree
c_strlen(tree arg,int only_value,c_strlen_data * data,unsigned eltsize)686 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
687 {
688 /* If we were not passed a DATA pointer, then get one to a local
689 structure. That avoids having to check DATA for NULL before
690 each time we want to use it. */
691 c_strlen_data local_strlen_data = { };
692 if (!data)
693 data = &local_strlen_data;
694
695 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
696
697 tree src = STRIP_NOPS (arg);
698 if (TREE_CODE (src) == COND_EXPR
699 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
700 {
701 tree len1, len2;
702
703 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
704 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
705 if (tree_int_cst_equal (len1, len2))
706 return len1;
707 }
708
709 if (TREE_CODE (src) == COMPOUND_EXPR
710 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
711 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
712
713 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
714
715 /* Offset from the beginning of the string in bytes. */
716 tree byteoff;
717 tree memsize;
718 tree decl;
719 src = string_constant (src, &byteoff, &memsize, &decl);
720 if (src == 0)
721 return NULL_TREE;
722
723 /* Determine the size of the string element. */
724 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
725 return NULL_TREE;
726
727 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
728 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
729 in case the latter is less than the size of the array, such as when
730 SRC refers to a short string literal used to initialize a large array.
731 In that case, the elements of the array after the terminating NUL are
732 all NUL. */
733 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
734 strelts = strelts / eltsize;
735
736 if (!tree_fits_uhwi_p (memsize))
737 return NULL_TREE;
738
739 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
740
741 /* PTR can point to the byte representation of any string type, including
742 char* and wchar_t*. */
743 const char *ptr = TREE_STRING_POINTER (src);
744
745 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
746 {
747 /* The code below works only for single byte character types. */
748 if (eltsize != 1)
749 return NULL_TREE;
750
751 /* If the string has an internal NUL character followed by any
752 non-NUL characters (e.g., "foo\0bar"), we can't compute
753 the offset to the following NUL if we don't know where to
754 start searching for it. */
755 unsigned len = string_length (ptr, eltsize, strelts);
756
757 /* Return when an embedded null character is found or none at all.
758 In the latter case, set the DECL/LEN field in the DATA structure
759 so that callers may examine them. */
760 if (len + 1 < strelts)
761 return NULL_TREE;
762 else if (len >= maxelts)
763 {
764 data->decl = decl;
765 data->off = byteoff;
766 data->minlen = ssize_int (len);
767 return NULL_TREE;
768 }
769
770 /* For empty strings the result should be zero. */
771 if (len == 0)
772 return ssize_int (0);
773
774 /* We don't know the starting offset, but we do know that the string
775 has no internal zero bytes. If the offset falls within the bounds
776 of the string subtract the offset from the length of the string,
777 and return that. Otherwise the length is zero. Take care to
778 use SAVE_EXPR in case the OFFSET has side-effects. */
779 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
780 : byteoff;
781 offsave = fold_convert_loc (loc, sizetype, offsave);
782 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
783 size_int (len));
784 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
785 offsave);
786 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
787 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
788 build_zero_cst (ssizetype));
789 }
790
791 /* Offset from the beginning of the string in elements. */
792 HOST_WIDE_INT eltoff;
793
794 /* We have a known offset into the string. Start searching there for
795 a null character if we can represent it as a single HOST_WIDE_INT. */
796 if (byteoff == 0)
797 eltoff = 0;
798 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
799 eltoff = -1;
800 else
801 eltoff = tree_to_uhwi (byteoff) / eltsize;
802
803 /* If the offset is known to be out of bounds, warn, and call strlen at
804 runtime. */
805 if (eltoff < 0 || eltoff >= maxelts)
806 {
807 /* Suppress multiple warnings for propagated constant strings. */
808 if (only_value != 2
809 && !TREE_NO_WARNING (arg)
810 && warning_at (loc, OPT_Warray_bounds,
811 "offset %qwi outside bounds of constant string",
812 eltoff))
813 {
814 if (decl)
815 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
816 TREE_NO_WARNING (arg) = 1;
817 }
818 return NULL_TREE;
819 }
820
821 /* If eltoff is larger than strelts but less than maxelts the
822 string length is zero, since the excess memory will be zero. */
823 if (eltoff > strelts)
824 return ssize_int (0);
825
826 /* Use strlen to search for the first zero byte. Since any strings
827 constructed with build_string will have nulls appended, we win even
828 if we get handed something like (char[4])"abcd".
829
830 Since ELTOFF is our starting index into the string, no further
831 calculation is needed. */
832 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
833 strelts - eltoff);
834
835 /* Don't know what to return if there was no zero termination.
836 Ideally this would turn into a gcc_checking_assert over time.
837 Set DECL/LEN so callers can examine them. */
838 if (len >= maxelts - eltoff)
839 {
840 data->decl = decl;
841 data->off = byteoff;
842 data->minlen = ssize_int (len);
843 return NULL_TREE;
844 }
845
846 return ssize_int (len);
847 }
848
849 /* Return a constant integer corresponding to target reading
850 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
851 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
852 are assumed to be zero, otherwise it reads as many characters
853 as needed. */
854
855 rtx
c_readstr(const char * str,scalar_int_mode mode,bool null_terminated_p)856 c_readstr (const char *str, scalar_int_mode mode,
857 bool null_terminated_p/*=true*/)
858 {
859 HOST_WIDE_INT ch;
860 unsigned int i, j;
861 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
862
863 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
864 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
865 / HOST_BITS_PER_WIDE_INT;
866
867 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
868 for (i = 0; i < len; i++)
869 tmp[i] = 0;
870
871 ch = 1;
872 for (i = 0; i < GET_MODE_SIZE (mode); i++)
873 {
874 j = i;
875 if (WORDS_BIG_ENDIAN)
876 j = GET_MODE_SIZE (mode) - i - 1;
877 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
878 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
879 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
880 j *= BITS_PER_UNIT;
881
882 if (ch || !null_terminated_p)
883 ch = (unsigned char) str[i];
884 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
885 }
886
887 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
888 return immed_wide_int_const (c, mode);
889 }
890
891 /* Cast a target constant CST to target CHAR and if that value fits into
892 host char type, return zero and put that value into variable pointed to by
893 P. */
894
895 static int
target_char_cast(tree cst,char * p)896 target_char_cast (tree cst, char *p)
897 {
898 unsigned HOST_WIDE_INT val, hostval;
899
900 if (TREE_CODE (cst) != INTEGER_CST
901 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
902 return 1;
903
904 /* Do not care if it fits or not right here. */
905 val = TREE_INT_CST_LOW (cst);
906
907 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
908 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
909
910 hostval = val;
911 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
912 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
913
914 if (val != hostval)
915 return 1;
916
917 *p = hostval;
918 return 0;
919 }
920
921 /* Similar to save_expr, but assumes that arbitrary code is not executed
922 in between the multiple evaluations. In particular, we assume that a
923 non-addressable local variable will not be modified. */
924
925 static tree
builtin_save_expr(tree exp)926 builtin_save_expr (tree exp)
927 {
928 if (TREE_CODE (exp) == SSA_NAME
929 || (TREE_ADDRESSABLE (exp) == 0
930 && (TREE_CODE (exp) == PARM_DECL
931 || (VAR_P (exp) && !TREE_STATIC (exp)))))
932 return exp;
933
934 return save_expr (exp);
935 }
936
937 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
938 times to get the address of either a higher stack frame, or a return
939 address located within it (depending on FNDECL_CODE). */
940
941 static rtx
expand_builtin_return_addr(enum built_in_function fndecl_code,int count)942 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
943 {
944 int i;
945 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
946 if (tem == NULL_RTX)
947 {
948 /* For a zero count with __builtin_return_address, we don't care what
949 frame address we return, because target-specific definitions will
950 override us. Therefore frame pointer elimination is OK, and using
951 the soft frame pointer is OK.
952
953 For a nonzero count, or a zero count with __builtin_frame_address,
954 we require a stable offset from the current frame pointer to the
955 previous one, so we must use the hard frame pointer, and
956 we must disable frame pointer elimination. */
957 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
958 tem = frame_pointer_rtx;
959 else
960 {
961 tem = hard_frame_pointer_rtx;
962
963 /* Tell reload not to eliminate the frame pointer. */
964 crtl->accesses_prior_frames = 1;
965 }
966 }
967
968 if (count > 0)
969 SETUP_FRAME_ADDRESSES ();
970
971 /* On the SPARC, the return address is not in the frame, it is in a
972 register. There is no way to access it off of the current frame
973 pointer, but it can be accessed off the previous frame pointer by
974 reading the value from the register window save area. */
975 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
976 count--;
977
978 /* Scan back COUNT frames to the specified frame. */
979 for (i = 0; i < count; i++)
980 {
981 /* Assume the dynamic chain pointer is in the word that the
982 frame address points to, unless otherwise specified. */
983 tem = DYNAMIC_CHAIN_ADDRESS (tem);
984 tem = memory_address (Pmode, tem);
985 tem = gen_frame_mem (Pmode, tem);
986 tem = copy_to_reg (tem);
987 }
988
989 /* For __builtin_frame_address, return what we've got. But, on
990 the SPARC for example, we may have to add a bias. */
991 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
992 return FRAME_ADDR_RTX (tem);
993
994 /* For __builtin_return_address, get the return address from that frame. */
995 #ifdef RETURN_ADDR_RTX
996 tem = RETURN_ADDR_RTX (count, tem);
997 #else
998 tem = memory_address (Pmode,
999 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
1000 tem = gen_frame_mem (Pmode, tem);
1001 #endif
1002 return tem;
1003 }
1004
1005 /* Alias set used for setjmp buffer. */
1006 static alias_set_type setjmp_alias_set = -1;
1007
1008 /* Construct the leading half of a __builtin_setjmp call. Control will
1009 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1010 exception handling code. */
1011
1012 void
expand_builtin_setjmp_setup(rtx buf_addr,rtx receiver_label)1013 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
1014 {
1015 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1016 rtx stack_save;
1017 rtx mem;
1018
1019 if (setjmp_alias_set == -1)
1020 setjmp_alias_set = new_alias_set ();
1021
1022 buf_addr = convert_memory_address (Pmode, buf_addr);
1023
1024 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
1025
1026 /* We store the frame pointer and the address of receiver_label in
1027 the buffer and use the rest of it for the stack save area, which
1028 is machine-dependent. */
1029
1030 mem = gen_rtx_MEM (Pmode, buf_addr);
1031 set_mem_alias_set (mem, setjmp_alias_set);
1032 emit_move_insn (mem, hard_frame_pointer_rtx);
1033
1034 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1035 GET_MODE_SIZE (Pmode))),
1036 set_mem_alias_set (mem, setjmp_alias_set);
1037
1038 emit_move_insn (validize_mem (mem),
1039 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
1040
1041 stack_save = gen_rtx_MEM (sa_mode,
1042 plus_constant (Pmode, buf_addr,
1043 2 * GET_MODE_SIZE (Pmode)));
1044 set_mem_alias_set (stack_save, setjmp_alias_set);
1045 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1046
1047 /* If there is further processing to do, do it. */
1048 if (targetm.have_builtin_setjmp_setup ())
1049 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1050
1051 /* We have a nonlocal label. */
1052 cfun->has_nonlocal_label = 1;
1053 }
1054
1055 /* Construct the trailing part of a __builtin_setjmp call. This is
1056 also called directly by the SJLJ exception handling code.
1057 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1058
1059 void
expand_builtin_setjmp_receiver(rtx receiver_label)1060 expand_builtin_setjmp_receiver (rtx receiver_label)
1061 {
1062 rtx chain;
1063
1064 /* Mark the FP as used when we get here, so we have to make sure it's
1065 marked as used by this function. */
1066 emit_use (hard_frame_pointer_rtx);
1067
1068 /* Mark the static chain as clobbered here so life information
1069 doesn't get messed up for it. */
1070 chain = rtx_for_static_chain (current_function_decl, true);
1071 if (chain && REG_P (chain))
1072 emit_clobber (chain);
1073
1074 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1075 {
1076 /* If the argument pointer can be eliminated in favor of the
1077 frame pointer, we don't need to restore it. We assume here
1078 that if such an elimination is present, it can always be used.
1079 This is the case on all known machines; if we don't make this
1080 assumption, we do unnecessary saving on many machines. */
1081 size_t i;
1082 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1083
1084 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1085 if (elim_regs[i].from == ARG_POINTER_REGNUM
1086 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1087 break;
1088
1089 if (i == ARRAY_SIZE (elim_regs))
1090 {
1091 /* Now restore our arg pointer from the address at which it
1092 was saved in our stack frame. */
1093 emit_move_insn (crtl->args.internal_arg_pointer,
1094 copy_to_reg (get_arg_pointer_save_area ()));
1095 }
1096 }
1097
1098 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1099 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1100 else if (targetm.have_nonlocal_goto_receiver ())
1101 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1102 else
1103 { /* Nothing */ }
1104
1105 /* We must not allow the code we just generated to be reordered by
1106 scheduling. Specifically, the update of the frame pointer must
1107 happen immediately, not later. */
1108 emit_insn (gen_blockage ());
1109 }
1110
1111 /* __builtin_longjmp is passed a pointer to an array of five words (not
1112 all will be used on all machines). It operates similarly to the C
1113 library function of the same name, but is more efficient. Much of
1114 the code below is copied from the handling of non-local gotos. */
1115
1116 static void
expand_builtin_longjmp(rtx buf_addr,rtx value)1117 expand_builtin_longjmp (rtx buf_addr, rtx value)
1118 {
1119 rtx fp, lab, stack;
1120 rtx_insn *insn, *last;
1121 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1122
1123 /* DRAP is needed for stack realign if longjmp is expanded to current
1124 function */
1125 if (SUPPORTS_STACK_ALIGNMENT)
1126 crtl->need_drap = true;
1127
1128 if (setjmp_alias_set == -1)
1129 setjmp_alias_set = new_alias_set ();
1130
1131 buf_addr = convert_memory_address (Pmode, buf_addr);
1132
1133 buf_addr = force_reg (Pmode, buf_addr);
1134
1135 /* We require that the user must pass a second argument of 1, because
1136 that is what builtin_setjmp will return. */
1137 gcc_assert (value == const1_rtx);
1138
1139 last = get_last_insn ();
1140 if (targetm.have_builtin_longjmp ())
1141 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1142 else
1143 {
1144 fp = gen_rtx_MEM (Pmode, buf_addr);
1145 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1146 GET_MODE_SIZE (Pmode)));
1147
1148 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1149 2 * GET_MODE_SIZE (Pmode)));
1150 set_mem_alias_set (fp, setjmp_alias_set);
1151 set_mem_alias_set (lab, setjmp_alias_set);
1152 set_mem_alias_set (stack, setjmp_alias_set);
1153
1154 /* Pick up FP, label, and SP from the block and jump. This code is
1155 from expand_goto in stmt.c; see there for detailed comments. */
1156 if (targetm.have_nonlocal_goto ())
1157 /* We have to pass a value to the nonlocal_goto pattern that will
1158 get copied into the static_chain pointer, but it does not matter
1159 what that value is, because builtin_setjmp does not use it. */
1160 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1161 else
1162 {
1163 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1164 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1165
1166 lab = copy_to_reg (lab);
1167
1168 /* Restore the frame pointer and stack pointer. We must use a
1169 temporary since the setjmp buffer may be a local. */
1170 fp = copy_to_reg (fp);
1171 emit_stack_restore (SAVE_NONLOCAL, stack);
1172
1173 /* Ensure the frame pointer move is not optimized. */
1174 emit_insn (gen_blockage ());
1175 emit_clobber (hard_frame_pointer_rtx);
1176 emit_clobber (frame_pointer_rtx);
1177 emit_move_insn (hard_frame_pointer_rtx, fp);
1178
1179 emit_use (hard_frame_pointer_rtx);
1180 emit_use (stack_pointer_rtx);
1181 emit_indirect_jump (lab);
1182 }
1183 }
1184
1185 /* Search backwards and mark the jump insn as a non-local goto.
1186 Note that this precludes the use of __builtin_longjmp to a
1187 __builtin_setjmp target in the same function. However, we've
1188 already cautioned the user that these functions are for
1189 internal exception handling use only. */
1190 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1191 {
1192 gcc_assert (insn != last);
1193
1194 if (JUMP_P (insn))
1195 {
1196 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1197 break;
1198 }
1199 else if (CALL_P (insn))
1200 break;
1201 }
1202 }
1203
1204 static inline bool
more_const_call_expr_args_p(const const_call_expr_arg_iterator * iter)1205 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1206 {
1207 return (iter->i < iter->n);
1208 }
1209
1210 /* This function validates the types of a function call argument list
1211 against a specified list of tree_codes. If the last specifier is a 0,
1212 that represents an ellipsis, otherwise the last specifier must be a
1213 VOID_TYPE. */
1214
1215 static bool
validate_arglist(const_tree callexpr,...)1216 validate_arglist (const_tree callexpr, ...)
1217 {
1218 enum tree_code code;
1219 bool res = 0;
1220 va_list ap;
1221 const_call_expr_arg_iterator iter;
1222 const_tree arg;
1223
1224 va_start (ap, callexpr);
1225 init_const_call_expr_arg_iterator (callexpr, &iter);
1226
1227 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1228 tree fn = CALL_EXPR_FN (callexpr);
1229 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1230
1231 for (unsigned argno = 1; ; ++argno)
1232 {
1233 code = (enum tree_code) va_arg (ap, int);
1234
1235 switch (code)
1236 {
1237 case 0:
1238 /* This signifies an ellipses, any further arguments are all ok. */
1239 res = true;
1240 goto end;
1241 case VOID_TYPE:
1242 /* This signifies an endlink, if no arguments remain, return
1243 true, otherwise return false. */
1244 res = !more_const_call_expr_args_p (&iter);
1245 goto end;
1246 case POINTER_TYPE:
1247 /* The actual argument must be nonnull when either the whole
1248 called function has been declared nonnull, or when the formal
1249 argument corresponding to the actual argument has been. */
1250 if (argmap
1251 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1252 {
1253 arg = next_const_call_expr_arg (&iter);
1254 if (!validate_arg (arg, code) || integer_zerop (arg))
1255 goto end;
1256 break;
1257 }
1258 /* FALLTHRU */
1259 default:
1260 /* If no parameters remain or the parameter's code does not
1261 match the specified code, return false. Otherwise continue
1262 checking any remaining arguments. */
1263 arg = next_const_call_expr_arg (&iter);
1264 if (!validate_arg (arg, code))
1265 goto end;
1266 break;
1267 }
1268 }
1269
1270 /* We need gotos here since we can only have one VA_CLOSE in a
1271 function. */
1272 end: ;
1273 va_end (ap);
1274
1275 BITMAP_FREE (argmap);
1276
1277 return res;
1278 }
1279
1280 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1281 and the address of the save area. */
1282
1283 static rtx
expand_builtin_nonlocal_goto(tree exp)1284 expand_builtin_nonlocal_goto (tree exp)
1285 {
1286 tree t_label, t_save_area;
1287 rtx r_label, r_save_area, r_fp, r_sp;
1288 rtx_insn *insn;
1289
1290 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1291 return NULL_RTX;
1292
1293 t_label = CALL_EXPR_ARG (exp, 0);
1294 t_save_area = CALL_EXPR_ARG (exp, 1);
1295
1296 r_label = expand_normal (t_label);
1297 r_label = convert_memory_address (Pmode, r_label);
1298 r_save_area = expand_normal (t_save_area);
1299 r_save_area = convert_memory_address (Pmode, r_save_area);
1300 /* Copy the address of the save location to a register just in case it was
1301 based on the frame pointer. */
1302 r_save_area = copy_to_reg (r_save_area);
1303 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1304 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1305 plus_constant (Pmode, r_save_area,
1306 GET_MODE_SIZE (Pmode)));
1307
1308 crtl->has_nonlocal_goto = 1;
1309
1310 /* ??? We no longer need to pass the static chain value, afaik. */
1311 if (targetm.have_nonlocal_goto ())
1312 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1313 else
1314 {
1315 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1316 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1317
1318 r_label = copy_to_reg (r_label);
1319
1320 /* Restore the frame pointer and stack pointer. We must use a
1321 temporary since the setjmp buffer may be a local. */
1322 r_fp = copy_to_reg (r_fp);
1323 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1324
1325 /* Ensure the frame pointer move is not optimized. */
1326 emit_insn (gen_blockage ());
1327 emit_clobber (hard_frame_pointer_rtx);
1328 emit_clobber (frame_pointer_rtx);
1329 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1330
1331 /* USE of hard_frame_pointer_rtx added for consistency;
1332 not clear if really needed. */
1333 emit_use (hard_frame_pointer_rtx);
1334 emit_use (stack_pointer_rtx);
1335
1336 /* If the architecture is using a GP register, we must
1337 conservatively assume that the target function makes use of it.
1338 The prologue of functions with nonlocal gotos must therefore
1339 initialize the GP register to the appropriate value, and we
1340 must then make sure that this value is live at the point
1341 of the jump. (Note that this doesn't necessarily apply
1342 to targets with a nonlocal_goto pattern; they are free
1343 to implement it in their own way. Note also that this is
1344 a no-op if the GP register is a global invariant.) */
1345 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1346 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1347 emit_use (pic_offset_table_rtx);
1348
1349 emit_indirect_jump (r_label);
1350 }
1351
1352 /* Search backwards to the jump insn and mark it as a
1353 non-local goto. */
1354 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1355 {
1356 if (JUMP_P (insn))
1357 {
1358 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1359 break;
1360 }
1361 else if (CALL_P (insn))
1362 break;
1363 }
1364
1365 return const0_rtx;
1366 }
1367
1368 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1369 (not all will be used on all machines) that was passed to __builtin_setjmp.
1370 It updates the stack pointer in that block to the current value. This is
1371 also called directly by the SJLJ exception handling code. */
1372
1373 void
expand_builtin_update_setjmp_buf(rtx buf_addr)1374 expand_builtin_update_setjmp_buf (rtx buf_addr)
1375 {
1376 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1377 buf_addr = convert_memory_address (Pmode, buf_addr);
1378 rtx stack_save
1379 = gen_rtx_MEM (sa_mode,
1380 memory_address
1381 (sa_mode,
1382 plus_constant (Pmode, buf_addr,
1383 2 * GET_MODE_SIZE (Pmode))));
1384
1385 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1386 }
1387
1388 /* Expand a call to __builtin_prefetch. For a target that does not support
1389 data prefetch, evaluate the memory address argument in case it has side
1390 effects. */
1391
1392 static void
expand_builtin_prefetch(tree exp)1393 expand_builtin_prefetch (tree exp)
1394 {
1395 tree arg0, arg1, arg2;
1396 int nargs;
1397 rtx op0, op1, op2;
1398
1399 if (!validate_arglist (exp, POINTER_TYPE, 0))
1400 return;
1401
1402 arg0 = CALL_EXPR_ARG (exp, 0);
1403
1404 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1405 zero (read) and argument 2 (locality) defaults to 3 (high degree of
1406 locality). */
1407 nargs = call_expr_nargs (exp);
1408 if (nargs > 1)
1409 arg1 = CALL_EXPR_ARG (exp, 1);
1410 else
1411 arg1 = integer_zero_node;
1412 if (nargs > 2)
1413 arg2 = CALL_EXPR_ARG (exp, 2);
1414 else
1415 arg2 = integer_three_node;
1416
1417 /* Argument 0 is an address. */
1418 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1419
1420 /* Argument 1 (read/write flag) must be a compile-time constant int. */
1421 if (TREE_CODE (arg1) != INTEGER_CST)
1422 {
1423 error ("second argument to %<__builtin_prefetch%> must be a constant");
1424 arg1 = integer_zero_node;
1425 }
1426 op1 = expand_normal (arg1);
1427 /* Argument 1 must be either zero or one. */
1428 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1429 {
1430 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1431 " using zero");
1432 op1 = const0_rtx;
1433 }
1434
1435 /* Argument 2 (locality) must be a compile-time constant int. */
1436 if (TREE_CODE (arg2) != INTEGER_CST)
1437 {
1438 error ("third argument to %<__builtin_prefetch%> must be a constant");
1439 arg2 = integer_zero_node;
1440 }
1441 op2 = expand_normal (arg2);
1442 /* Argument 2 must be 0, 1, 2, or 3. */
1443 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1444 {
1445 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1446 op2 = const0_rtx;
1447 }
1448
1449 if (targetm.have_prefetch ())
1450 {
1451 class expand_operand ops[3];
1452
1453 create_address_operand (&ops[0], op0);
1454 create_integer_operand (&ops[1], INTVAL (op1));
1455 create_integer_operand (&ops[2], INTVAL (op2));
1456 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1457 return;
1458 }
1459
1460 /* Don't do anything with direct references to volatile memory, but
1461 generate code to handle other side effects. */
1462 if (!MEM_P (op0) && side_effects_p (op0))
1463 emit_insn (op0);
1464 }
1465
1466 /* Get a MEM rtx for expression EXP which is the address of an operand
1467 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
1468 the maximum length of the block of memory that might be accessed or
1469 NULL if unknown. */
1470
1471 static rtx
get_memory_rtx(tree exp,tree len)1472 get_memory_rtx (tree exp, tree len)
1473 {
1474 tree orig_exp = exp;
1475 rtx addr, mem;
1476
1477 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1478 from its expression, for expr->a.b only <variable>.a.b is recorded. */
1479 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1480 exp = TREE_OPERAND (exp, 0);
1481
1482 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1483 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1484
1485 /* Get an expression we can use to find the attributes to assign to MEM.
1486 First remove any nops. */
1487 while (CONVERT_EXPR_P (exp)
1488 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1489 exp = TREE_OPERAND (exp, 0);
1490
1491 /* Build a MEM_REF representing the whole accessed area as a byte blob,
1492 (as builtin stringops may alias with anything). */
1493 exp = fold_build2 (MEM_REF,
1494 build_array_type (char_type_node,
1495 build_range_type (sizetype,
1496 size_one_node, len)),
1497 exp, build_int_cst (ptr_type_node, 0));
1498
1499 /* If the MEM_REF has no acceptable address, try to get the base object
1500 from the original address we got, and build an all-aliasing
1501 unknown-sized access to that one. */
1502 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1503 set_mem_attributes (mem, exp, 0);
1504 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1505 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1506 0))))
1507 {
1508 exp = build_fold_addr_expr (exp);
1509 exp = fold_build2 (MEM_REF,
1510 build_array_type (char_type_node,
1511 build_range_type (sizetype,
1512 size_zero_node,
1513 NULL)),
1514 exp, build_int_cst (ptr_type_node, 0));
1515 set_mem_attributes (mem, exp, 0);
1516 }
1517 set_mem_alias_set (mem, 0);
1518 return mem;
1519 }
1520
1521 /* Built-in functions to perform an untyped call and return. */
1522
1523 #define apply_args_mode \
1524 (this_target_builtins->x_apply_args_mode)
1525 #define apply_result_mode \
1526 (this_target_builtins->x_apply_result_mode)
1527
1528 /* Return the size required for the block returned by __builtin_apply_args,
1529 and initialize apply_args_mode. */
1530
1531 static int
apply_args_size(void)1532 apply_args_size (void)
1533 {
1534 static int size = -1;
1535 int align;
1536 unsigned int regno;
1537
1538 /* The values computed by this function never change. */
1539 if (size < 0)
1540 {
1541 /* The first value is the incoming arg-pointer. */
1542 size = GET_MODE_SIZE (Pmode);
1543
1544 /* The second value is the structure value address unless this is
1545 passed as an "invisible" first argument. */
1546 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1547 size += GET_MODE_SIZE (Pmode);
1548
1549 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1550 if (FUNCTION_ARG_REGNO_P (regno))
1551 {
1552 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1553
1554 gcc_assert (mode != VOIDmode);
1555
1556 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1557 if (size % align != 0)
1558 size = CEIL (size, align) * align;
1559 size += GET_MODE_SIZE (mode);
1560 apply_args_mode[regno] = mode;
1561 }
1562 else
1563 {
1564 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1565 }
1566 }
1567 return size;
1568 }
1569
1570 /* Return the size required for the block returned by __builtin_apply,
1571 and initialize apply_result_mode. */
1572
1573 static int
apply_result_size(void)1574 apply_result_size (void)
1575 {
1576 static int size = -1;
1577 int align, regno;
1578
1579 /* The values computed by this function never change. */
1580 if (size < 0)
1581 {
1582 size = 0;
1583
1584 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1585 if (targetm.calls.function_value_regno_p (regno))
1586 {
1587 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1588
1589 gcc_assert (mode != VOIDmode);
1590
1591 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1592 if (size % align != 0)
1593 size = CEIL (size, align) * align;
1594 size += GET_MODE_SIZE (mode);
1595 apply_result_mode[regno] = mode;
1596 }
1597 else
1598 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1599
1600 /* Allow targets that use untyped_call and untyped_return to override
1601 the size so that machine-specific information can be stored here. */
1602 #ifdef APPLY_RESULT_SIZE
1603 size = APPLY_RESULT_SIZE;
1604 #endif
1605 }
1606 return size;
1607 }
1608
1609 /* Create a vector describing the result block RESULT. If SAVEP is true,
1610 the result block is used to save the values; otherwise it is used to
1611 restore the values. */
1612
1613 static rtx
result_vector(int savep,rtx result)1614 result_vector (int savep, rtx result)
1615 {
1616 int regno, size, align, nelts;
1617 fixed_size_mode mode;
1618 rtx reg, mem;
1619 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1620
1621 size = nelts = 0;
1622 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1623 if ((mode = apply_result_mode[regno]) != VOIDmode)
1624 {
1625 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1626 if (size % align != 0)
1627 size = CEIL (size, align) * align;
1628 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1629 mem = adjust_address (result, mode, size);
1630 savevec[nelts++] = (savep
1631 ? gen_rtx_SET (mem, reg)
1632 : gen_rtx_SET (reg, mem));
1633 size += GET_MODE_SIZE (mode);
1634 }
1635 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1636 }
1637
1638 /* Save the state required to perform an untyped call with the same
1639 arguments as were passed to the current function. */
1640
1641 static rtx
expand_builtin_apply_args_1(void)1642 expand_builtin_apply_args_1 (void)
1643 {
1644 rtx registers, tem;
1645 int size, align, regno;
1646 fixed_size_mode mode;
1647 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1648
1649 /* Create a block where the arg-pointer, structure value address,
1650 and argument registers can be saved. */
1651 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1652
1653 /* Walk past the arg-pointer and structure value address. */
1654 size = GET_MODE_SIZE (Pmode);
1655 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1656 size += GET_MODE_SIZE (Pmode);
1657
1658 /* Save each register used in calling a function to the block. */
1659 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1660 if ((mode = apply_args_mode[regno]) != VOIDmode)
1661 {
1662 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1663 if (size % align != 0)
1664 size = CEIL (size, align) * align;
1665
1666 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1667
1668 emit_move_insn (adjust_address (registers, mode, size), tem);
1669 size += GET_MODE_SIZE (mode);
1670 }
1671
1672 /* Save the arg pointer to the block. */
1673 tem = copy_to_reg (crtl->args.internal_arg_pointer);
1674 /* We need the pointer as the caller actually passed them to us, not
1675 as we might have pretended they were passed. Make sure it's a valid
1676 operand, as emit_move_insn isn't expected to handle a PLUS. */
1677 if (STACK_GROWS_DOWNWARD)
1678 tem
1679 = force_operand (plus_constant (Pmode, tem,
1680 crtl->args.pretend_args_size),
1681 NULL_RTX);
1682 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1683
1684 size = GET_MODE_SIZE (Pmode);
1685
1686 /* Save the structure value address unless this is passed as an
1687 "invisible" first argument. */
1688 if (struct_incoming_value)
1689 emit_move_insn (adjust_address (registers, Pmode, size),
1690 copy_to_reg (struct_incoming_value));
1691
1692 /* Return the address of the block. */
1693 return copy_addr_to_reg (XEXP (registers, 0));
1694 }
1695
1696 /* __builtin_apply_args returns block of memory allocated on
1697 the stack into which is stored the arg pointer, structure
1698 value address, static chain, and all the registers that might
1699 possibly be used in performing a function call. The code is
1700 moved to the start of the function so the incoming values are
1701 saved. */
1702
1703 static rtx
expand_builtin_apply_args(void)1704 expand_builtin_apply_args (void)
1705 {
1706 /* Don't do __builtin_apply_args more than once in a function.
1707 Save the result of the first call and reuse it. */
1708 if (apply_args_value != 0)
1709 return apply_args_value;
1710 {
1711 /* When this function is called, it means that registers must be
1712 saved on entry to this function. So we migrate the
1713 call to the first insn of this function. */
1714 rtx temp;
1715
1716 start_sequence ();
1717 temp = expand_builtin_apply_args_1 ();
1718 rtx_insn *seq = get_insns ();
1719 end_sequence ();
1720
1721 apply_args_value = temp;
1722
1723 /* Put the insns after the NOTE that starts the function.
1724 If this is inside a start_sequence, make the outer-level insn
1725 chain current, so the code is placed at the start of the
1726 function. If internal_arg_pointer is a non-virtual pseudo,
1727 it needs to be placed after the function that initializes
1728 that pseudo. */
1729 push_topmost_sequence ();
1730 if (REG_P (crtl->args.internal_arg_pointer)
1731 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1732 emit_insn_before (seq, parm_birth_insn);
1733 else
1734 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1735 pop_topmost_sequence ();
1736 return temp;
1737 }
1738 }
1739
1740 /* Perform an untyped call and save the state required to perform an
1741 untyped return of whatever value was returned by the given function. */
1742
1743 static rtx
expand_builtin_apply(rtx function,rtx arguments,rtx argsize)1744 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1745 {
1746 int size, align, regno;
1747 fixed_size_mode mode;
1748 rtx incoming_args, result, reg, dest, src;
1749 rtx_call_insn *call_insn;
1750 rtx old_stack_level = 0;
1751 rtx call_fusage = 0;
1752 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1753
1754 arguments = convert_memory_address (Pmode, arguments);
1755
1756 /* Create a block where the return registers can be saved. */
1757 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1758
1759 /* Fetch the arg pointer from the ARGUMENTS block. */
1760 incoming_args = gen_reg_rtx (Pmode);
1761 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1762 if (!STACK_GROWS_DOWNWARD)
1763 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1764 incoming_args, 0, OPTAB_LIB_WIDEN);
1765
1766 /* Push a new argument block and copy the arguments. Do not allow
1767 the (potential) memcpy call below to interfere with our stack
1768 manipulations. */
1769 do_pending_stack_adjust ();
1770 NO_DEFER_POP;
1771
1772 /* Save the stack with nonlocal if available. */
1773 if (targetm.have_save_stack_nonlocal ())
1774 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1775 else
1776 emit_stack_save (SAVE_BLOCK, &old_stack_level);
1777
1778 /* Allocate a block of memory onto the stack and copy the memory
1779 arguments to the outgoing arguments address. We can pass TRUE
1780 as the 4th argument because we just saved the stack pointer
1781 and will restore it right after the call. */
1782 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1783
1784 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1785 may have already set current_function_calls_alloca to true.
1786 current_function_calls_alloca won't be set if argsize is zero,
1787 so we have to guarantee need_drap is true here. */
1788 if (SUPPORTS_STACK_ALIGNMENT)
1789 crtl->need_drap = true;
1790
1791 dest = virtual_outgoing_args_rtx;
1792 if (!STACK_GROWS_DOWNWARD)
1793 {
1794 if (CONST_INT_P (argsize))
1795 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1796 else
1797 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1798 }
1799 dest = gen_rtx_MEM (BLKmode, dest);
1800 set_mem_align (dest, PARM_BOUNDARY);
1801 src = gen_rtx_MEM (BLKmode, incoming_args);
1802 set_mem_align (src, PARM_BOUNDARY);
1803 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1804
1805 /* Refer to the argument block. */
1806 apply_args_size ();
1807 arguments = gen_rtx_MEM (BLKmode, arguments);
1808 set_mem_align (arguments, PARM_BOUNDARY);
1809
1810 /* Walk past the arg-pointer and structure value address. */
1811 size = GET_MODE_SIZE (Pmode);
1812 if (struct_value)
1813 size += GET_MODE_SIZE (Pmode);
1814
1815 /* Restore each of the registers previously saved. Make USE insns
1816 for each of these registers for use in making the call. */
1817 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1818 if ((mode = apply_args_mode[regno]) != VOIDmode)
1819 {
1820 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1821 if (size % align != 0)
1822 size = CEIL (size, align) * align;
1823 reg = gen_rtx_REG (mode, regno);
1824 emit_move_insn (reg, adjust_address (arguments, mode, size));
1825 use_reg (&call_fusage, reg);
1826 size += GET_MODE_SIZE (mode);
1827 }
1828
1829 /* Restore the structure value address unless this is passed as an
1830 "invisible" first argument. */
1831 size = GET_MODE_SIZE (Pmode);
1832 if (struct_value)
1833 {
1834 rtx value = gen_reg_rtx (Pmode);
1835 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1836 emit_move_insn (struct_value, value);
1837 if (REG_P (struct_value))
1838 use_reg (&call_fusage, struct_value);
1839 }
1840
1841 /* All arguments and registers used for the call are set up by now! */
1842 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1843
1844 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1845 and we don't want to load it into a register as an optimization,
1846 because prepare_call_address already did it if it should be done. */
1847 if (GET_CODE (function) != SYMBOL_REF)
1848 function = memory_address (FUNCTION_MODE, function);
1849
1850 /* Generate the actual call instruction and save the return value. */
1851 if (targetm.have_untyped_call ())
1852 {
1853 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1854 emit_call_insn (targetm.gen_untyped_call (mem, result,
1855 result_vector (1, result)));
1856 }
1857 else if (targetm.have_call_value ())
1858 {
1859 rtx valreg = 0;
1860
1861 /* Locate the unique return register. It is not possible to
1862 express a call that sets more than one return register using
1863 call_value; use untyped_call for that. In fact, untyped_call
1864 only needs to save the return registers in the given block. */
1865 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1866 if ((mode = apply_result_mode[regno]) != VOIDmode)
1867 {
1868 gcc_assert (!valreg); /* have_untyped_call required. */
1869
1870 valreg = gen_rtx_REG (mode, regno);
1871 }
1872
1873 emit_insn (targetm.gen_call_value (valreg,
1874 gen_rtx_MEM (FUNCTION_MODE, function),
1875 const0_rtx, NULL_RTX, const0_rtx));
1876
1877 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1878 }
1879 else
1880 gcc_unreachable ();
1881
1882 /* Find the CALL insn we just emitted, and attach the register usage
1883 information. */
1884 call_insn = last_call_insn ();
1885 add_function_usage_to (call_insn, call_fusage);
1886
1887 /* Restore the stack. */
1888 if (targetm.have_save_stack_nonlocal ())
1889 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1890 else
1891 emit_stack_restore (SAVE_BLOCK, old_stack_level);
1892 fixup_args_size_notes (call_insn, get_last_insn (), 0);
1893
1894 OK_DEFER_POP;
1895
1896 /* Return the address of the result block. */
1897 result = copy_addr_to_reg (XEXP (result, 0));
1898 return convert_memory_address (ptr_mode, result);
1899 }
1900
1901 /* Perform an untyped return. */
1902
1903 static void
expand_builtin_return(rtx result)1904 expand_builtin_return (rtx result)
1905 {
1906 int size, align, regno;
1907 fixed_size_mode mode;
1908 rtx reg;
1909 rtx_insn *call_fusage = 0;
1910
1911 result = convert_memory_address (Pmode, result);
1912
1913 apply_result_size ();
1914 result = gen_rtx_MEM (BLKmode, result);
1915
1916 if (targetm.have_untyped_return ())
1917 {
1918 rtx vector = result_vector (0, result);
1919 emit_jump_insn (targetm.gen_untyped_return (result, vector));
1920 emit_barrier ();
1921 return;
1922 }
1923
1924 /* Restore the return value and note that each value is used. */
1925 size = 0;
1926 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1927 if ((mode = apply_result_mode[regno]) != VOIDmode)
1928 {
1929 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1930 if (size % align != 0)
1931 size = CEIL (size, align) * align;
1932 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1933 emit_move_insn (reg, adjust_address (result, mode, size));
1934
1935 push_to_sequence (call_fusage);
1936 emit_use (reg);
1937 call_fusage = get_insns ();
1938 end_sequence ();
1939 size += GET_MODE_SIZE (mode);
1940 }
1941
1942 /* Put the USE insns before the return. */
1943 emit_insn (call_fusage);
1944
1945 /* Return whatever values was restored by jumping directly to the end
1946 of the function. */
1947 expand_naked_return ();
1948 }
1949
1950 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1951
1952 static enum type_class
type_to_class(tree type)1953 type_to_class (tree type)
1954 {
1955 switch (TREE_CODE (type))
1956 {
1957 case VOID_TYPE: return void_type_class;
1958 case INTEGER_TYPE: return integer_type_class;
1959 case ENUMERAL_TYPE: return enumeral_type_class;
1960 case BOOLEAN_TYPE: return boolean_type_class;
1961 case POINTER_TYPE: return pointer_type_class;
1962 case REFERENCE_TYPE: return reference_type_class;
1963 case OFFSET_TYPE: return offset_type_class;
1964 case REAL_TYPE: return real_type_class;
1965 case COMPLEX_TYPE: return complex_type_class;
1966 case FUNCTION_TYPE: return function_type_class;
1967 case METHOD_TYPE: return method_type_class;
1968 case RECORD_TYPE: return record_type_class;
1969 case UNION_TYPE:
1970 case QUAL_UNION_TYPE: return union_type_class;
1971 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1972 ? string_type_class : array_type_class);
1973 case LANG_TYPE: return lang_type_class;
1974 default: return no_type_class;
1975 }
1976 }
1977
1978 /* Expand a call EXP to __builtin_classify_type. */
1979
1980 static rtx
expand_builtin_classify_type(tree exp)1981 expand_builtin_classify_type (tree exp)
1982 {
1983 if (call_expr_nargs (exp))
1984 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1985 return GEN_INT (no_type_class);
1986 }
1987
1988 /* This helper macro, meant to be used in mathfn_built_in below, determines
1989 which among a set of builtin math functions is appropriate for a given type
1990 mode. The `F' (float) and `L' (long double) are automatically generated
1991 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
1992 types, there are additional types that are considered with 'F32', 'F64',
1993 'F128', etc. suffixes. */
1994 #define CASE_MATHFN(MATHFN) \
1995 CASE_CFN_##MATHFN: \
1996 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1997 fcodel = BUILT_IN_##MATHFN##L ; break;
1998 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1999 types. */
2000 #define CASE_MATHFN_FLOATN(MATHFN) \
2001 CASE_CFN_##MATHFN: \
2002 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2003 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2004 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2005 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2006 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2007 break;
2008 /* Similar to above, but appends _R after any F/L suffix. */
2009 #define CASE_MATHFN_REENT(MATHFN) \
2010 case CFN_BUILT_IN_##MATHFN##_R: \
2011 case CFN_BUILT_IN_##MATHFN##F_R: \
2012 case CFN_BUILT_IN_##MATHFN##L_R: \
2013 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2014 fcodel = BUILT_IN_##MATHFN##L_R ; break;
2015
2016 /* Return a function equivalent to FN but operating on floating-point
2017 values of type TYPE, or END_BUILTINS if no such function exists.
2018 This is purely an operation on function codes; it does not guarantee
2019 that the target actually has an implementation of the function. */
2020
2021 static built_in_function
mathfn_built_in_2(tree type,combined_fn fn)2022 mathfn_built_in_2 (tree type, combined_fn fn)
2023 {
2024 tree mtype;
2025 built_in_function fcode, fcodef, fcodel;
2026 built_in_function fcodef16 = END_BUILTINS;
2027 built_in_function fcodef32 = END_BUILTINS;
2028 built_in_function fcodef64 = END_BUILTINS;
2029 built_in_function fcodef128 = END_BUILTINS;
2030 built_in_function fcodef32x = END_BUILTINS;
2031 built_in_function fcodef64x = END_BUILTINS;
2032 built_in_function fcodef128x = END_BUILTINS;
2033
2034 switch (fn)
2035 {
2036 CASE_MATHFN (ACOS)
2037 CASE_MATHFN (ACOSH)
2038 CASE_MATHFN (ASIN)
2039 CASE_MATHFN (ASINH)
2040 CASE_MATHFN (ATAN)
2041 CASE_MATHFN (ATAN2)
2042 CASE_MATHFN (ATANH)
2043 CASE_MATHFN (CBRT)
2044 CASE_MATHFN_FLOATN (CEIL)
2045 CASE_MATHFN (CEXPI)
2046 CASE_MATHFN_FLOATN (COPYSIGN)
2047 CASE_MATHFN (COS)
2048 CASE_MATHFN (COSH)
2049 CASE_MATHFN (DREM)
2050 CASE_MATHFN (ERF)
2051 CASE_MATHFN (ERFC)
2052 CASE_MATHFN (EXP)
2053 CASE_MATHFN (EXP10)
2054 CASE_MATHFN (EXP2)
2055 CASE_MATHFN (EXPM1)
2056 CASE_MATHFN (FABS)
2057 CASE_MATHFN (FDIM)
2058 CASE_MATHFN_FLOATN (FLOOR)
2059 CASE_MATHFN_FLOATN (FMA)
2060 CASE_MATHFN_FLOATN (FMAX)
2061 CASE_MATHFN_FLOATN (FMIN)
2062 CASE_MATHFN (FMOD)
2063 CASE_MATHFN (FREXP)
2064 CASE_MATHFN (GAMMA)
2065 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2066 CASE_MATHFN (HUGE_VAL)
2067 CASE_MATHFN (HYPOT)
2068 CASE_MATHFN (ILOGB)
2069 CASE_MATHFN (ICEIL)
2070 CASE_MATHFN (IFLOOR)
2071 CASE_MATHFN (INF)
2072 CASE_MATHFN (IRINT)
2073 CASE_MATHFN (IROUND)
2074 CASE_MATHFN (ISINF)
2075 CASE_MATHFN (J0)
2076 CASE_MATHFN (J1)
2077 CASE_MATHFN (JN)
2078 CASE_MATHFN (LCEIL)
2079 CASE_MATHFN (LDEXP)
2080 CASE_MATHFN (LFLOOR)
2081 CASE_MATHFN (LGAMMA)
2082 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2083 CASE_MATHFN (LLCEIL)
2084 CASE_MATHFN (LLFLOOR)
2085 CASE_MATHFN (LLRINT)
2086 CASE_MATHFN (LLROUND)
2087 CASE_MATHFN (LOG)
2088 CASE_MATHFN (LOG10)
2089 CASE_MATHFN (LOG1P)
2090 CASE_MATHFN (LOG2)
2091 CASE_MATHFN (LOGB)
2092 CASE_MATHFN (LRINT)
2093 CASE_MATHFN (LROUND)
2094 CASE_MATHFN (MODF)
2095 CASE_MATHFN (NAN)
2096 CASE_MATHFN (NANS)
2097 CASE_MATHFN_FLOATN (NEARBYINT)
2098 CASE_MATHFN (NEXTAFTER)
2099 CASE_MATHFN (NEXTTOWARD)
2100 CASE_MATHFN (POW)
2101 CASE_MATHFN (POWI)
2102 CASE_MATHFN (POW10)
2103 CASE_MATHFN (REMAINDER)
2104 CASE_MATHFN (REMQUO)
2105 CASE_MATHFN_FLOATN (RINT)
2106 CASE_MATHFN_FLOATN (ROUND)
2107 CASE_MATHFN_FLOATN (ROUNDEVEN)
2108 CASE_MATHFN (SCALB)
2109 CASE_MATHFN (SCALBLN)
2110 CASE_MATHFN (SCALBN)
2111 CASE_MATHFN (SIGNBIT)
2112 CASE_MATHFN (SIGNIFICAND)
2113 CASE_MATHFN (SIN)
2114 CASE_MATHFN (SINCOS)
2115 CASE_MATHFN (SINH)
2116 CASE_MATHFN_FLOATN (SQRT)
2117 CASE_MATHFN (TAN)
2118 CASE_MATHFN (TANH)
2119 CASE_MATHFN (TGAMMA)
2120 CASE_MATHFN_FLOATN (TRUNC)
2121 CASE_MATHFN (Y0)
2122 CASE_MATHFN (Y1)
2123 CASE_MATHFN (YN)
2124
2125 default:
2126 return END_BUILTINS;
2127 }
2128
2129 mtype = TYPE_MAIN_VARIANT (type);
2130 if (mtype == double_type_node)
2131 return fcode;
2132 else if (mtype == float_type_node)
2133 return fcodef;
2134 else if (mtype == long_double_type_node)
2135 return fcodel;
2136 else if (mtype == float16_type_node)
2137 return fcodef16;
2138 else if (mtype == float32_type_node)
2139 return fcodef32;
2140 else if (mtype == float64_type_node)
2141 return fcodef64;
2142 else if (mtype == float128_type_node)
2143 return fcodef128;
2144 else if (mtype == float32x_type_node)
2145 return fcodef32x;
2146 else if (mtype == float64x_type_node)
2147 return fcodef64x;
2148 else if (mtype == float128x_type_node)
2149 return fcodef128x;
2150 else
2151 return END_BUILTINS;
2152 }
2153
2154 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2155 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2156 otherwise use the explicit declaration. If we can't do the conversion,
2157 return null. */
2158
2159 static tree
mathfn_built_in_1(tree type,combined_fn fn,bool implicit_p)2160 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2161 {
2162 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2163 if (fcode2 == END_BUILTINS)
2164 return NULL_TREE;
2165
2166 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2167 return NULL_TREE;
2168
2169 return builtin_decl_explicit (fcode2);
2170 }
2171
2172 /* Like mathfn_built_in_1, but always use the implicit array. */
2173
2174 tree
mathfn_built_in(tree type,combined_fn fn)2175 mathfn_built_in (tree type, combined_fn fn)
2176 {
2177 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2178 }
2179
2180 /* Like mathfn_built_in_1, but take a built_in_function and
2181 always use the implicit array. */
2182
2183 tree
mathfn_built_in(tree type,enum built_in_function fn)2184 mathfn_built_in (tree type, enum built_in_function fn)
2185 {
2186 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2187 }
2188
2189 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2190 return its code, otherwise return IFN_LAST. Note that this function
2191 only tests whether the function is defined in internals.def, not whether
2192 it is actually available on the target. */
2193
2194 internal_fn
associated_internal_fn(tree fndecl)2195 associated_internal_fn (tree fndecl)
2196 {
2197 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2198 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2199 switch (DECL_FUNCTION_CODE (fndecl))
2200 {
2201 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2202 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2203 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2204 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2205 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2206 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2207 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2208 #include "internal-fn.def"
2209
2210 CASE_FLT_FN (BUILT_IN_POW10):
2211 return IFN_EXP10;
2212
2213 CASE_FLT_FN (BUILT_IN_DREM):
2214 return IFN_REMAINDER;
2215
2216 CASE_FLT_FN (BUILT_IN_SCALBN):
2217 CASE_FLT_FN (BUILT_IN_SCALBLN):
2218 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2219 return IFN_LDEXP;
2220 return IFN_LAST;
2221
2222 default:
2223 return IFN_LAST;
2224 }
2225 }
2226
2227 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2228 on the current target by a call to an internal function, return the
2229 code of that internal function, otherwise return IFN_LAST. The caller
2230 is responsible for ensuring that any side-effects of the built-in
2231 call are dealt with correctly. E.g. if CALL sets errno, the caller
2232 must decide that the errno result isn't needed or make it available
2233 in some other way. */
2234
2235 internal_fn
replacement_internal_fn(gcall * call)2236 replacement_internal_fn (gcall *call)
2237 {
2238 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2239 {
2240 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2241 if (ifn != IFN_LAST)
2242 {
2243 tree_pair types = direct_internal_fn_types (ifn, call);
2244 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2245 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2246 return ifn;
2247 }
2248 }
2249 return IFN_LAST;
2250 }
2251
2252 /* Expand a call to the builtin trinary math functions (fma).
2253 Return NULL_RTX if a normal call should be emitted rather than expanding the
2254 function in-line. EXP is the expression that is a call to the builtin
2255 function; if convenient, the result should be placed in TARGET.
2256 SUBTARGET may be used as the target for computing one of EXP's
2257 operands. */
2258
2259 static rtx
expand_builtin_mathfn_ternary(tree exp,rtx target,rtx subtarget)2260 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2261 {
2262 optab builtin_optab;
2263 rtx op0, op1, op2, result;
2264 rtx_insn *insns;
2265 tree fndecl = get_callee_fndecl (exp);
2266 tree arg0, arg1, arg2;
2267 machine_mode mode;
2268
2269 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2270 return NULL_RTX;
2271
2272 arg0 = CALL_EXPR_ARG (exp, 0);
2273 arg1 = CALL_EXPR_ARG (exp, 1);
2274 arg2 = CALL_EXPR_ARG (exp, 2);
2275
2276 switch (DECL_FUNCTION_CODE (fndecl))
2277 {
2278 CASE_FLT_FN (BUILT_IN_FMA):
2279 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2280 builtin_optab = fma_optab; break;
2281 default:
2282 gcc_unreachable ();
2283 }
2284
2285 /* Make a suitable register to place result in. */
2286 mode = TYPE_MODE (TREE_TYPE (exp));
2287
2288 /* Before working hard, check whether the instruction is available. */
2289 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2290 return NULL_RTX;
2291
2292 result = gen_reg_rtx (mode);
2293
2294 /* Always stabilize the argument list. */
2295 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2296 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2297 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2298
2299 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2300 op1 = expand_normal (arg1);
2301 op2 = expand_normal (arg2);
2302
2303 start_sequence ();
2304
2305 /* Compute into RESULT.
2306 Set RESULT to wherever the result comes back. */
2307 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2308 result, 0);
2309
2310 /* If we were unable to expand via the builtin, stop the sequence
2311 (without outputting the insns) and call to the library function
2312 with the stabilized argument list. */
2313 if (result == 0)
2314 {
2315 end_sequence ();
2316 return expand_call (exp, target, target == const0_rtx);
2317 }
2318
2319 /* Output the entire sequence. */
2320 insns = get_insns ();
2321 end_sequence ();
2322 emit_insn (insns);
2323
2324 return result;
2325 }
2326
2327 /* Expand a call to the builtin sin and cos math functions.
2328 Return NULL_RTX if a normal call should be emitted rather than expanding the
2329 function in-line. EXP is the expression that is a call to the builtin
2330 function; if convenient, the result should be placed in TARGET.
2331 SUBTARGET may be used as the target for computing one of EXP's
2332 operands. */
2333
2334 static rtx
expand_builtin_mathfn_3(tree exp,rtx target,rtx subtarget)2335 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2336 {
2337 optab builtin_optab;
2338 rtx op0;
2339 rtx_insn *insns;
2340 tree fndecl = get_callee_fndecl (exp);
2341 machine_mode mode;
2342 tree arg;
2343
2344 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2345 return NULL_RTX;
2346
2347 arg = CALL_EXPR_ARG (exp, 0);
2348
2349 switch (DECL_FUNCTION_CODE (fndecl))
2350 {
2351 CASE_FLT_FN (BUILT_IN_SIN):
2352 CASE_FLT_FN (BUILT_IN_COS):
2353 builtin_optab = sincos_optab; break;
2354 default:
2355 gcc_unreachable ();
2356 }
2357
2358 /* Make a suitable register to place result in. */
2359 mode = TYPE_MODE (TREE_TYPE (exp));
2360
2361 /* Check if sincos insn is available, otherwise fallback
2362 to sin or cos insn. */
2363 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2364 switch (DECL_FUNCTION_CODE (fndecl))
2365 {
2366 CASE_FLT_FN (BUILT_IN_SIN):
2367 builtin_optab = sin_optab; break;
2368 CASE_FLT_FN (BUILT_IN_COS):
2369 builtin_optab = cos_optab; break;
2370 default:
2371 gcc_unreachable ();
2372 }
2373
2374 /* Before working hard, check whether the instruction is available. */
2375 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2376 {
2377 rtx result = gen_reg_rtx (mode);
2378
2379 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2380 need to expand the argument again. This way, we will not perform
2381 side-effects more the once. */
2382 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2383
2384 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2385
2386 start_sequence ();
2387
2388 /* Compute into RESULT.
2389 Set RESULT to wherever the result comes back. */
2390 if (builtin_optab == sincos_optab)
2391 {
2392 int ok;
2393
2394 switch (DECL_FUNCTION_CODE (fndecl))
2395 {
2396 CASE_FLT_FN (BUILT_IN_SIN):
2397 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2398 break;
2399 CASE_FLT_FN (BUILT_IN_COS):
2400 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2401 break;
2402 default:
2403 gcc_unreachable ();
2404 }
2405 gcc_assert (ok);
2406 }
2407 else
2408 result = expand_unop (mode, builtin_optab, op0, result, 0);
2409
2410 if (result != 0)
2411 {
2412 /* Output the entire sequence. */
2413 insns = get_insns ();
2414 end_sequence ();
2415 emit_insn (insns);
2416 return result;
2417 }
2418
2419 /* If we were unable to expand via the builtin, stop the sequence
2420 (without outputting the insns) and call to the library function
2421 with the stabilized argument list. */
2422 end_sequence ();
2423 }
2424
2425 return expand_call (exp, target, target == const0_rtx);
2426 }
2427
2428 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2429 return an RTL instruction code that implements the functionality.
2430 If that isn't possible or available return CODE_FOR_nothing. */
2431
2432 static enum insn_code
interclass_mathfn_icode(tree arg,tree fndecl)2433 interclass_mathfn_icode (tree arg, tree fndecl)
2434 {
2435 bool errno_set = false;
2436 optab builtin_optab = unknown_optab;
2437 machine_mode mode;
2438
2439 switch (DECL_FUNCTION_CODE (fndecl))
2440 {
2441 CASE_FLT_FN (BUILT_IN_ILOGB):
2442 errno_set = true; builtin_optab = ilogb_optab; break;
2443 CASE_FLT_FN (BUILT_IN_ISINF):
2444 builtin_optab = isinf_optab; break;
2445 case BUILT_IN_ISNORMAL:
2446 case BUILT_IN_ISFINITE:
2447 CASE_FLT_FN (BUILT_IN_FINITE):
2448 case BUILT_IN_FINITED32:
2449 case BUILT_IN_FINITED64:
2450 case BUILT_IN_FINITED128:
2451 case BUILT_IN_ISINFD32:
2452 case BUILT_IN_ISINFD64:
2453 case BUILT_IN_ISINFD128:
2454 /* These builtins have no optabs (yet). */
2455 break;
2456 default:
2457 gcc_unreachable ();
2458 }
2459
2460 /* There's no easy way to detect the case we need to set EDOM. */
2461 if (flag_errno_math && errno_set)
2462 return CODE_FOR_nothing;
2463
2464 /* Optab mode depends on the mode of the input argument. */
2465 mode = TYPE_MODE (TREE_TYPE (arg));
2466
2467 if (builtin_optab)
2468 return optab_handler (builtin_optab, mode);
2469 return CODE_FOR_nothing;
2470 }
2471
2472 /* Expand a call to one of the builtin math functions that operate on
2473 floating point argument and output an integer result (ilogb, isinf,
2474 isnan, etc).
2475 Return 0 if a normal call should be emitted rather than expanding the
2476 function in-line. EXP is the expression that is a call to the builtin
2477 function; if convenient, the result should be placed in TARGET. */
2478
2479 static rtx
expand_builtin_interclass_mathfn(tree exp,rtx target)2480 expand_builtin_interclass_mathfn (tree exp, rtx target)
2481 {
2482 enum insn_code icode = CODE_FOR_nothing;
2483 rtx op0;
2484 tree fndecl = get_callee_fndecl (exp);
2485 machine_mode mode;
2486 tree arg;
2487
2488 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2489 return NULL_RTX;
2490
2491 arg = CALL_EXPR_ARG (exp, 0);
2492 icode = interclass_mathfn_icode (arg, fndecl);
2493 mode = TYPE_MODE (TREE_TYPE (arg));
2494
2495 if (icode != CODE_FOR_nothing)
2496 {
2497 class expand_operand ops[1];
2498 rtx_insn *last = get_last_insn ();
2499 tree orig_arg = arg;
2500
2501 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2502 need to expand the argument again. This way, we will not perform
2503 side-effects more the once. */
2504 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2505
2506 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2507
2508 if (mode != GET_MODE (op0))
2509 op0 = convert_to_mode (mode, op0, 0);
2510
2511 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2512 if (maybe_legitimize_operands (icode, 0, 1, ops)
2513 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2514 return ops[0].value;
2515
2516 delete_insns_since (last);
2517 CALL_EXPR_ARG (exp, 0) = orig_arg;
2518 }
2519
2520 return NULL_RTX;
2521 }
2522
2523 /* Expand a call to the builtin sincos math function.
2524 Return NULL_RTX if a normal call should be emitted rather than expanding the
2525 function in-line. EXP is the expression that is a call to the builtin
2526 function. */
2527
2528 static rtx
expand_builtin_sincos(tree exp)2529 expand_builtin_sincos (tree exp)
2530 {
2531 rtx op0, op1, op2, target1, target2;
2532 machine_mode mode;
2533 tree arg, sinp, cosp;
2534 int result;
2535 location_t loc = EXPR_LOCATION (exp);
2536 tree alias_type, alias_off;
2537
2538 if (!validate_arglist (exp, REAL_TYPE,
2539 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2540 return NULL_RTX;
2541
2542 arg = CALL_EXPR_ARG (exp, 0);
2543 sinp = CALL_EXPR_ARG (exp, 1);
2544 cosp = CALL_EXPR_ARG (exp, 2);
2545
2546 /* Make a suitable register to place result in. */
2547 mode = TYPE_MODE (TREE_TYPE (arg));
2548
2549 /* Check if sincos insn is available, otherwise emit the call. */
2550 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2551 return NULL_RTX;
2552
2553 target1 = gen_reg_rtx (mode);
2554 target2 = gen_reg_rtx (mode);
2555
2556 op0 = expand_normal (arg);
2557 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2558 alias_off = build_int_cst (alias_type, 0);
2559 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2560 sinp, alias_off));
2561 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2562 cosp, alias_off));
2563
2564 /* Compute into target1 and target2.
2565 Set TARGET to wherever the result comes back. */
2566 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2567 gcc_assert (result);
2568
2569 /* Move target1 and target2 to the memory locations indicated
2570 by op1 and op2. */
2571 emit_move_insn (op1, target1);
2572 emit_move_insn (op2, target2);
2573
2574 return const0_rtx;
2575 }
2576
2577 /* Expand a call to the internal cexpi builtin to the sincos math function.
2578 EXP is the expression that is a call to the builtin function; if convenient,
2579 the result should be placed in TARGET. */
2580
2581 static rtx
expand_builtin_cexpi(tree exp,rtx target)2582 expand_builtin_cexpi (tree exp, rtx target)
2583 {
2584 tree fndecl = get_callee_fndecl (exp);
2585 tree arg, type;
2586 machine_mode mode;
2587 rtx op0, op1, op2;
2588 location_t loc = EXPR_LOCATION (exp);
2589
2590 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2591 return NULL_RTX;
2592
2593 arg = CALL_EXPR_ARG (exp, 0);
2594 type = TREE_TYPE (arg);
2595 mode = TYPE_MODE (TREE_TYPE (arg));
2596
2597 /* Try expanding via a sincos optab, fall back to emitting a libcall
2598 to sincos or cexp. We are sure we have sincos or cexp because cexpi
2599 is only generated from sincos, cexp or if we have either of them. */
2600 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2601 {
2602 op1 = gen_reg_rtx (mode);
2603 op2 = gen_reg_rtx (mode);
2604
2605 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2606
2607 /* Compute into op1 and op2. */
2608 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2609 }
2610 else if (targetm.libc_has_function (function_sincos))
2611 {
2612 tree call, fn = NULL_TREE;
2613 tree top1, top2;
2614 rtx op1a, op2a;
2615
2616 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2617 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2618 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2619 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2620 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2621 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2622 else
2623 gcc_unreachable ();
2624
2625 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2626 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2627 op1a = copy_addr_to_reg (XEXP (op1, 0));
2628 op2a = copy_addr_to_reg (XEXP (op2, 0));
2629 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2630 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2631
2632 /* Make sure not to fold the sincos call again. */
2633 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2634 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2635 call, 3, arg, top1, top2));
2636 }
2637 else
2638 {
2639 tree call, fn = NULL_TREE, narg;
2640 tree ctype = build_complex_type (type);
2641
2642 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2643 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2644 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2645 fn = builtin_decl_explicit (BUILT_IN_CEXP);
2646 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2647 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2648 else
2649 gcc_unreachable ();
2650
2651 /* If we don't have a decl for cexp create one. This is the
2652 friendliest fallback if the user calls __builtin_cexpi
2653 without full target C99 function support. */
2654 if (fn == NULL_TREE)
2655 {
2656 tree fntype;
2657 const char *name = NULL;
2658
2659 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2660 name = "cexpf";
2661 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2662 name = "cexp";
2663 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2664 name = "cexpl";
2665
2666 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2667 fn = build_fn_decl (name, fntype);
2668 }
2669
2670 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2671 build_real (type, dconst0), arg);
2672
2673 /* Make sure not to fold the cexp call again. */
2674 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2675 return expand_expr (build_call_nary (ctype, call, 1, narg),
2676 target, VOIDmode, EXPAND_NORMAL);
2677 }
2678
2679 /* Now build the proper return type. */
2680 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2681 make_tree (TREE_TYPE (arg), op2),
2682 make_tree (TREE_TYPE (arg), op1)),
2683 target, VOIDmode, EXPAND_NORMAL);
2684 }
2685
2686 /* Conveniently construct a function call expression. FNDECL names the
2687 function to be called, N is the number of arguments, and the "..."
2688 parameters are the argument expressions. Unlike build_call_exr
2689 this doesn't fold the call, hence it will always return a CALL_EXPR. */
2690
2691 static tree
build_call_nofold_loc(location_t loc,tree fndecl,int n,...)2692 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2693 {
2694 va_list ap;
2695 tree fntype = TREE_TYPE (fndecl);
2696 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2697
2698 va_start (ap, n);
2699 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2700 va_end (ap);
2701 SET_EXPR_LOCATION (fn, loc);
2702 return fn;
2703 }
2704
2705 /* Expand a call to one of the builtin rounding functions gcc defines
2706 as an extension (lfloor and lceil). As these are gcc extensions we
2707 do not need to worry about setting errno to EDOM.
2708 If expanding via optab fails, lower expression to (int)(floor(x)).
2709 EXP is the expression that is a call to the builtin function;
2710 if convenient, the result should be placed in TARGET. */
2711
2712 static rtx
expand_builtin_int_roundingfn(tree exp,rtx target)2713 expand_builtin_int_roundingfn (tree exp, rtx target)
2714 {
2715 convert_optab builtin_optab;
2716 rtx op0, tmp;
2717 rtx_insn *insns;
2718 tree fndecl = get_callee_fndecl (exp);
2719 enum built_in_function fallback_fn;
2720 tree fallback_fndecl;
2721 machine_mode mode;
2722 tree arg;
2723
2724 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2725 return NULL_RTX;
2726
2727 arg = CALL_EXPR_ARG (exp, 0);
2728
2729 switch (DECL_FUNCTION_CODE (fndecl))
2730 {
2731 CASE_FLT_FN (BUILT_IN_ICEIL):
2732 CASE_FLT_FN (BUILT_IN_LCEIL):
2733 CASE_FLT_FN (BUILT_IN_LLCEIL):
2734 builtin_optab = lceil_optab;
2735 fallback_fn = BUILT_IN_CEIL;
2736 break;
2737
2738 CASE_FLT_FN (BUILT_IN_IFLOOR):
2739 CASE_FLT_FN (BUILT_IN_LFLOOR):
2740 CASE_FLT_FN (BUILT_IN_LLFLOOR):
2741 builtin_optab = lfloor_optab;
2742 fallback_fn = BUILT_IN_FLOOR;
2743 break;
2744
2745 default:
2746 gcc_unreachable ();
2747 }
2748
2749 /* Make a suitable register to place result in. */
2750 mode = TYPE_MODE (TREE_TYPE (exp));
2751
2752 target = gen_reg_rtx (mode);
2753
2754 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2755 need to expand the argument again. This way, we will not perform
2756 side-effects more the once. */
2757 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2758
2759 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2760
2761 start_sequence ();
2762
2763 /* Compute into TARGET. */
2764 if (expand_sfix_optab (target, op0, builtin_optab))
2765 {
2766 /* Output the entire sequence. */
2767 insns = get_insns ();
2768 end_sequence ();
2769 emit_insn (insns);
2770 return target;
2771 }
2772
2773 /* If we were unable to expand via the builtin, stop the sequence
2774 (without outputting the insns). */
2775 end_sequence ();
2776
2777 /* Fall back to floating point rounding optab. */
2778 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2779
2780 /* For non-C99 targets we may end up without a fallback fndecl here
2781 if the user called __builtin_lfloor directly. In this case emit
2782 a call to the floor/ceil variants nevertheless. This should result
2783 in the best user experience for not full C99 targets. */
2784 if (fallback_fndecl == NULL_TREE)
2785 {
2786 tree fntype;
2787 const char *name = NULL;
2788
2789 switch (DECL_FUNCTION_CODE (fndecl))
2790 {
2791 case BUILT_IN_ICEIL:
2792 case BUILT_IN_LCEIL:
2793 case BUILT_IN_LLCEIL:
2794 name = "ceil";
2795 break;
2796 case BUILT_IN_ICEILF:
2797 case BUILT_IN_LCEILF:
2798 case BUILT_IN_LLCEILF:
2799 name = "ceilf";
2800 break;
2801 case BUILT_IN_ICEILL:
2802 case BUILT_IN_LCEILL:
2803 case BUILT_IN_LLCEILL:
2804 name = "ceill";
2805 break;
2806 case BUILT_IN_IFLOOR:
2807 case BUILT_IN_LFLOOR:
2808 case BUILT_IN_LLFLOOR:
2809 name = "floor";
2810 break;
2811 case BUILT_IN_IFLOORF:
2812 case BUILT_IN_LFLOORF:
2813 case BUILT_IN_LLFLOORF:
2814 name = "floorf";
2815 break;
2816 case BUILT_IN_IFLOORL:
2817 case BUILT_IN_LFLOORL:
2818 case BUILT_IN_LLFLOORL:
2819 name = "floorl";
2820 break;
2821 default:
2822 gcc_unreachable ();
2823 }
2824
2825 fntype = build_function_type_list (TREE_TYPE (arg),
2826 TREE_TYPE (arg), NULL_TREE);
2827 fallback_fndecl = build_fn_decl (name, fntype);
2828 }
2829
2830 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2831
2832 tmp = expand_normal (exp);
2833 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2834
2835 /* Truncate the result of floating point optab to integer
2836 via expand_fix (). */
2837 target = gen_reg_rtx (mode);
2838 expand_fix (target, tmp, 0);
2839
2840 return target;
2841 }
2842
2843 /* Expand a call to one of the builtin math functions doing integer
2844 conversion (lrint).
2845 Return 0 if a normal call should be emitted rather than expanding the
2846 function in-line. EXP is the expression that is a call to the builtin
2847 function; if convenient, the result should be placed in TARGET. */
2848
2849 static rtx
expand_builtin_int_roundingfn_2(tree exp,rtx target)2850 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2851 {
2852 convert_optab builtin_optab;
2853 rtx op0;
2854 rtx_insn *insns;
2855 tree fndecl = get_callee_fndecl (exp);
2856 tree arg;
2857 machine_mode mode;
2858 enum built_in_function fallback_fn = BUILT_IN_NONE;
2859
2860 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2861 return NULL_RTX;
2862
2863 arg = CALL_EXPR_ARG (exp, 0);
2864
2865 switch (DECL_FUNCTION_CODE (fndecl))
2866 {
2867 CASE_FLT_FN (BUILT_IN_IRINT):
2868 fallback_fn = BUILT_IN_LRINT;
2869 gcc_fallthrough ();
2870 CASE_FLT_FN (BUILT_IN_LRINT):
2871 CASE_FLT_FN (BUILT_IN_LLRINT):
2872 builtin_optab = lrint_optab;
2873 break;
2874
2875 CASE_FLT_FN (BUILT_IN_IROUND):
2876 fallback_fn = BUILT_IN_LROUND;
2877 gcc_fallthrough ();
2878 CASE_FLT_FN (BUILT_IN_LROUND):
2879 CASE_FLT_FN (BUILT_IN_LLROUND):
2880 builtin_optab = lround_optab;
2881 break;
2882
2883 default:
2884 gcc_unreachable ();
2885 }
2886
2887 /* There's no easy way to detect the case we need to set EDOM. */
2888 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2889 return NULL_RTX;
2890
2891 /* Make a suitable register to place result in. */
2892 mode = TYPE_MODE (TREE_TYPE (exp));
2893
2894 /* There's no easy way to detect the case we need to set EDOM. */
2895 if (!flag_errno_math)
2896 {
2897 rtx result = gen_reg_rtx (mode);
2898
2899 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2900 need to expand the argument again. This way, we will not perform
2901 side-effects more the once. */
2902 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2903
2904 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2905
2906 start_sequence ();
2907
2908 if (expand_sfix_optab (result, op0, builtin_optab))
2909 {
2910 /* Output the entire sequence. */
2911 insns = get_insns ();
2912 end_sequence ();
2913 emit_insn (insns);
2914 return result;
2915 }
2916
2917 /* If we were unable to expand via the builtin, stop the sequence
2918 (without outputting the insns) and call to the library function
2919 with the stabilized argument list. */
2920 end_sequence ();
2921 }
2922
2923 if (fallback_fn != BUILT_IN_NONE)
2924 {
2925 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
2926 targets, (int) round (x) should never be transformed into
2927 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2928 a call to lround in the hope that the target provides at least some
2929 C99 functions. This should result in the best user experience for
2930 not full C99 targets. */
2931 tree fallback_fndecl = mathfn_built_in_1
2932 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2933
2934 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2935 fallback_fndecl, 1, arg);
2936
2937 target = expand_call (exp, NULL_RTX, target == const0_rtx);
2938 target = maybe_emit_group_store (target, TREE_TYPE (exp));
2939 return convert_to_mode (mode, target, 0);
2940 }
2941
2942 return expand_call (exp, target, target == const0_rtx);
2943 }
2944
2945 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
2946 a normal call should be emitted rather than expanding the function
2947 in-line. EXP is the expression that is a call to the builtin
2948 function; if convenient, the result should be placed in TARGET. */
2949
2950 static rtx
expand_builtin_powi(tree exp,rtx target)2951 expand_builtin_powi (tree exp, rtx target)
2952 {
2953 tree arg0, arg1;
2954 rtx op0, op1;
2955 machine_mode mode;
2956 machine_mode mode2;
2957
2958 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2959 return NULL_RTX;
2960
2961 arg0 = CALL_EXPR_ARG (exp, 0);
2962 arg1 = CALL_EXPR_ARG (exp, 1);
2963 mode = TYPE_MODE (TREE_TYPE (exp));
2964
2965 /* Emit a libcall to libgcc. */
2966
2967 /* Mode of the 2nd argument must match that of an int. */
2968 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2969
2970 if (target == NULL_RTX)
2971 target = gen_reg_rtx (mode);
2972
2973 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2974 if (GET_MODE (op0) != mode)
2975 op0 = convert_to_mode (mode, op0, 0);
2976 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2977 if (GET_MODE (op1) != mode2)
2978 op1 = convert_to_mode (mode2, op1, 0);
2979
2980 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2981 target, LCT_CONST, mode,
2982 op0, mode, op1, mode2);
2983
2984 return target;
2985 }
2986
2987 /* Expand expression EXP which is a call to the strlen builtin. Return
2988 NULL_RTX if we failed and the caller should emit a normal call, otherwise
2989 try to get the result in TARGET, if convenient. */
2990
2991 static rtx
expand_builtin_strlen(tree exp,rtx target,machine_mode target_mode)2992 expand_builtin_strlen (tree exp, rtx target,
2993 machine_mode target_mode)
2994 {
2995 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2996 return NULL_RTX;
2997
2998 class expand_operand ops[4];
2999 rtx pat;
3000 tree len;
3001 tree src = CALL_EXPR_ARG (exp, 0);
3002 rtx src_reg;
3003 rtx_insn *before_strlen;
3004 machine_mode insn_mode;
3005 enum insn_code icode = CODE_FOR_nothing;
3006 unsigned int align;
3007
3008 /* If the length can be computed at compile-time, return it. */
3009 len = c_strlen (src, 0);
3010 if (len)
3011 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3012
3013 /* If the length can be computed at compile-time and is constant
3014 integer, but there are side-effects in src, evaluate
3015 src for side-effects, then return len.
3016 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3017 can be optimized into: i++; x = 3; */
3018 len = c_strlen (src, 1);
3019 if (len && TREE_CODE (len) == INTEGER_CST)
3020 {
3021 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3022 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3023 }
3024
3025 align = get_pointer_alignment (src) / BITS_PER_UNIT;
3026
3027 /* If SRC is not a pointer type, don't do this operation inline. */
3028 if (align == 0)
3029 return NULL_RTX;
3030
3031 /* Bail out if we can't compute strlen in the right mode. */
3032 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3033 {
3034 icode = optab_handler (strlen_optab, insn_mode);
3035 if (icode != CODE_FOR_nothing)
3036 break;
3037 }
3038 if (insn_mode == VOIDmode)
3039 return NULL_RTX;
3040
3041 /* Make a place to hold the source address. We will not expand
3042 the actual source until we are sure that the expansion will
3043 not fail -- there are trees that cannot be expanded twice. */
3044 src_reg = gen_reg_rtx (Pmode);
3045
3046 /* Mark the beginning of the strlen sequence so we can emit the
3047 source operand later. */
3048 before_strlen = get_last_insn ();
3049
3050 create_output_operand (&ops[0], target, insn_mode);
3051 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3052 create_integer_operand (&ops[2], 0);
3053 create_integer_operand (&ops[3], align);
3054 if (!maybe_expand_insn (icode, 4, ops))
3055 return NULL_RTX;
3056
3057 /* Check to see if the argument was declared attribute nonstring
3058 and if so, issue a warning since at this point it's not known
3059 to be nul-terminated. */
3060 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3061
3062 /* Now that we are assured of success, expand the source. */
3063 start_sequence ();
3064 pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3065 if (pat != src_reg)
3066 {
3067 #ifdef POINTERS_EXTEND_UNSIGNED
3068 if (GET_MODE (pat) != Pmode)
3069 pat = convert_to_mode (Pmode, pat,
3070 POINTERS_EXTEND_UNSIGNED);
3071 #endif
3072 emit_move_insn (src_reg, pat);
3073 }
3074 pat = get_insns ();
3075 end_sequence ();
3076
3077 if (before_strlen)
3078 emit_insn_after (pat, before_strlen);
3079 else
3080 emit_insn_before (pat, get_insns ());
3081
3082 /* Return the value in the proper mode for this function. */
3083 if (GET_MODE (ops[0].value) == target_mode)
3084 target = ops[0].value;
3085 else if (target != 0)
3086 convert_move (target, ops[0].value, 0);
3087 else
3088 target = convert_to_mode (target_mode, ops[0].value, 0);
3089
3090 return target;
3091 }
3092
3093 /* Expand call EXP to the strnlen built-in, returning the result
3094 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3095
3096 static rtx
expand_builtin_strnlen(tree exp,rtx target,machine_mode target_mode)3097 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3098 {
3099 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3100 return NULL_RTX;
3101
3102 tree src = CALL_EXPR_ARG (exp, 0);
3103 tree bound = CALL_EXPR_ARG (exp, 1);
3104
3105 if (!bound)
3106 return NULL_RTX;
3107
3108 location_t loc = UNKNOWN_LOCATION;
3109 if (EXPR_HAS_LOCATION (exp))
3110 loc = EXPR_LOCATION (exp);
3111
3112 tree maxobjsize = max_object_size ();
3113 tree func = get_callee_fndecl (exp);
3114
3115 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3116 so these conversions aren't necessary. */
3117 c_strlen_data lendata = { };
3118 tree len = c_strlen (src, 0, &lendata, 1);
3119 if (len)
3120 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3121
3122 if (TREE_CODE (bound) == INTEGER_CST)
3123 {
3124 if (!TREE_NO_WARNING (exp)
3125 && tree_int_cst_lt (maxobjsize, bound)
3126 && warning_at (loc, OPT_Wstringop_overflow_,
3127 "%K%qD specified bound %E "
3128 "exceeds maximum object size %E",
3129 exp, func, bound, maxobjsize))
3130 TREE_NO_WARNING (exp) = true;
3131
3132 bool exact = true;
3133 if (!len || TREE_CODE (len) != INTEGER_CST)
3134 {
3135 /* Clear EXACT if LEN may be less than SRC suggests,
3136 such as in
3137 strnlen (&a[i], sizeof a)
3138 where the value of i is unknown. Unless i's value is
3139 zero, the call is unsafe because the bound is greater. */
3140 lendata.decl = unterminated_array (src, &len, &exact);
3141 if (!lendata.decl)
3142 return NULL_RTX;
3143 }
3144
3145 if (lendata.decl && (tree_int_cst_lt (len, bound) || !exact))
3146 {
3147 location_t warnloc
3148 = expansion_point_location_if_in_system_header (loc);
3149
3150 if (!TREE_NO_WARNING (exp)
3151 && warning_at (warnloc, OPT_Wstringop_overflow_,
3152 exact
3153 ? G_("%K%qD specified bound %E exceeds the size "
3154 "%E of unterminated array")
3155 : G_("%K%qD specified bound %E may exceed the "
3156 "size of at most %E of unterminated array"),
3157 exp, func, bound, len))
3158 {
3159 inform (DECL_SOURCE_LOCATION (lendata.decl),
3160 "referenced argument declared here");
3161 TREE_NO_WARNING (exp) = true;
3162 }
3163 return NULL_RTX;
3164 }
3165
3166 if (!len)
3167 return NULL_RTX;
3168
3169 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3170 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3171 }
3172
3173 if (TREE_CODE (bound) != SSA_NAME)
3174 return NULL_RTX;
3175
3176 wide_int min, max;
3177 enum value_range_kind rng = get_range_info (bound, &min, &max);
3178 if (rng != VR_RANGE)
3179 return NULL_RTX;
3180
3181 if (!TREE_NO_WARNING (exp)
3182 && wi::ltu_p (wi::to_wide (maxobjsize, min.get_precision ()), min)
3183 && warning_at (loc, OPT_Wstringop_overflow_,
3184 "%K%qD specified bound [%wu, %wu] "
3185 "exceeds maximum object size %E",
3186 exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3187 TREE_NO_WARNING (exp) = true;
3188
3189 bool exact = true;
3190 if (!len || TREE_CODE (len) != INTEGER_CST)
3191 {
3192 lendata.decl = unterminated_array (src, &len, &exact);
3193 if (!lendata.decl)
3194 return NULL_RTX;
3195 }
3196
3197 if (lendata.decl
3198 && !TREE_NO_WARNING (exp)
3199 && (wi::ltu_p (wi::to_wide (len), min)
3200 || !exact))
3201 {
3202 location_t warnloc
3203 = expansion_point_location_if_in_system_header (loc);
3204
3205 if (warning_at (warnloc, OPT_Wstringop_overflow_,
3206 exact
3207 ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3208 "the size %E of unterminated array")
3209 : G_("%K%qD specified bound [%wu, %wu] may exceed "
3210 "the size of at most %E of unterminated array"),
3211 exp, func, min.to_uhwi (), max.to_uhwi (), len))
3212 {
3213 inform (DECL_SOURCE_LOCATION (lendata.decl),
3214 "referenced argument declared here");
3215 TREE_NO_WARNING (exp) = true;
3216 }
3217 }
3218
3219 if (lendata.decl)
3220 return NULL_RTX;
3221
3222 if (wi::gtu_p (min, wi::to_wide (len)))
3223 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3224
3225 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3226 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3227 }
3228
3229 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3230 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3231 a target constant. */
3232
3233 static rtx
builtin_memcpy_read_str(void * data,HOST_WIDE_INT offset,scalar_int_mode mode)3234 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3235 scalar_int_mode mode)
3236 {
3237 /* The REPresentation pointed to by DATA need not be a nul-terminated
3238 string but the caller guarantees it's large enough for MODE. */
3239 const char *rep = (const char *) data;
3240
3241 return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3242 }
3243
3244 /* LEN specify length of the block of memcpy/memset operation.
3245 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3246 In some cases we can make very likely guess on max size, then we
3247 set it into PROBABLE_MAX_SIZE. */
3248
3249 static void
determine_block_size(tree len,rtx len_rtx,unsigned HOST_WIDE_INT * min_size,unsigned HOST_WIDE_INT * max_size,unsigned HOST_WIDE_INT * probable_max_size)3250 determine_block_size (tree len, rtx len_rtx,
3251 unsigned HOST_WIDE_INT *min_size,
3252 unsigned HOST_WIDE_INT *max_size,
3253 unsigned HOST_WIDE_INT *probable_max_size)
3254 {
3255 if (CONST_INT_P (len_rtx))
3256 {
3257 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3258 return;
3259 }
3260 else
3261 {
3262 wide_int min, max;
3263 enum value_range_kind range_type = VR_UNDEFINED;
3264
3265 /* Determine bounds from the type. */
3266 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3267 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3268 else
3269 *min_size = 0;
3270 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3271 *probable_max_size = *max_size
3272 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3273 else
3274 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3275
3276 if (TREE_CODE (len) == SSA_NAME)
3277 range_type = get_range_info (len, &min, &max);
3278 if (range_type == VR_RANGE)
3279 {
3280 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3281 *min_size = min.to_uhwi ();
3282 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3283 *probable_max_size = *max_size = max.to_uhwi ();
3284 }
3285 else if (range_type == VR_ANTI_RANGE)
3286 {
3287 /* Anti range 0...N lets us to determine minimal size to N+1. */
3288 if (min == 0)
3289 {
3290 if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3291 *min_size = max.to_uhwi () + 1;
3292 }
3293 /* Code like
3294
3295 int n;
3296 if (n < 100)
3297 memcpy (a, b, n)
3298
3299 Produce anti range allowing negative values of N. We still
3300 can use the information and make a guess that N is not negative.
3301 */
3302 else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3303 *probable_max_size = min.to_uhwi () - 1;
3304 }
3305 }
3306 gcc_checking_assert (*max_size <=
3307 (unsigned HOST_WIDE_INT)
3308 GET_MODE_MASK (GET_MODE (len_rtx)));
3309 }
3310
3311 /* Try to verify that the sizes and lengths of the arguments to a string
3312 manipulation function given by EXP are within valid bounds and that
3313 the operation does not lead to buffer overflow or read past the end.
3314 Arguments other than EXP may be null. When non-null, the arguments
3315 have the following meaning:
3316 DST is the destination of a copy call or NULL otherwise.
3317 SRC is the source of a copy call or NULL otherwise.
3318 DSTWRITE is the number of bytes written into the destination obtained
3319 from the user-supplied size argument to the function (such as in
3320 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3321 MAXREAD is the user-supplied bound on the length of the source sequence
3322 (such as in strncat(d, s, N). It specifies the upper limit on the number
3323 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
3324 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3325 expression EXP is a string function call (as opposed to a memory call
3326 like memcpy). As an exception, SRCSTR can also be an integer denoting
3327 the precomputed size of the source string or object (for functions like
3328 memcpy).
3329 DSTSIZE is the size of the destination object specified by the last
3330 argument to the _chk builtins, typically resulting from the expansion
3331 of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3332 DSTSIZE).
3333
3334 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3335 SIZE_MAX.
3336
3337 If the call is successfully verified as safe return true, otherwise
3338 return false. */
3339
3340 bool
check_access(tree exp,tree,tree,tree dstwrite,tree maxread,tree srcstr,tree dstsize)3341 check_access (tree exp, tree, tree, tree dstwrite,
3342 tree maxread, tree srcstr, tree dstsize)
3343 {
3344 int opt = OPT_Wstringop_overflow_;
3345
3346 /* The size of the largest object is half the address space, or
3347 PTRDIFF_MAX. (This is way too permissive.) */
3348 tree maxobjsize = max_object_size ();
3349
3350 /* Either the length of the source string for string functions or
3351 the size of the source object for raw memory functions. */
3352 tree slen = NULL_TREE;
3353
3354 tree range[2] = { NULL_TREE, NULL_TREE };
3355
3356 /* Set to true when the exact number of bytes written by a string
3357 function like strcpy is not known and the only thing that is
3358 known is that it must be at least one (for the terminating nul). */
3359 bool at_least_one = false;
3360 if (srcstr)
3361 {
3362 /* SRCSTR is normally a pointer to string but as a special case
3363 it can be an integer denoting the length of a string. */
3364 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3365 {
3366 /* Try to determine the range of lengths the source string
3367 refers to. If it can be determined and is less than
3368 the upper bound given by MAXREAD add one to it for
3369 the terminating nul. Otherwise, set it to one for
3370 the same reason, or to MAXREAD as appropriate. */
3371 c_strlen_data lendata = { };
3372 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3373 range[0] = lendata.minlen;
3374 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
3375 if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3376 {
3377 if (maxread && tree_int_cst_le (maxread, range[0]))
3378 range[0] = range[1] = maxread;
3379 else
3380 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3381 range[0], size_one_node);
3382
3383 if (maxread && tree_int_cst_le (maxread, range[1]))
3384 range[1] = maxread;
3385 else if (!integer_all_onesp (range[1]))
3386 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3387 range[1], size_one_node);
3388
3389 slen = range[0];
3390 }
3391 else
3392 {
3393 at_least_one = true;
3394 slen = size_one_node;
3395 }
3396 }
3397 else
3398 slen = srcstr;
3399 }
3400
3401 if (!dstwrite && !maxread)
3402 {
3403 /* When the only available piece of data is the object size
3404 there is nothing to do. */
3405 if (!slen)
3406 return true;
3407
3408 /* Otherwise, when the length of the source sequence is known
3409 (as with strlen), set DSTWRITE to it. */
3410 if (!range[0])
3411 dstwrite = slen;
3412 }
3413
3414 if (!dstsize)
3415 dstsize = maxobjsize;
3416
3417 if (dstwrite)
3418 get_size_range (dstwrite, range);
3419
3420 tree func = get_callee_fndecl (exp);
3421
3422 /* First check the number of bytes to be written against the maximum
3423 object size. */
3424 if (range[0]
3425 && TREE_CODE (range[0]) == INTEGER_CST
3426 && tree_int_cst_lt (maxobjsize, range[0]))
3427 {
3428 if (TREE_NO_WARNING (exp))
3429 return false;
3430
3431 location_t loc = tree_nonartificial_location (exp);
3432 loc = expansion_point_location_if_in_system_header (loc);
3433
3434 bool warned;
3435 if (range[0] == range[1])
3436 warned = (func
3437 ? warning_at (loc, opt,
3438 "%K%qD specified size %E "
3439 "exceeds maximum object size %E",
3440 exp, func, range[0], maxobjsize)
3441 : warning_at (loc, opt,
3442 "%Kspecified size %E "
3443 "exceeds maximum object size %E",
3444 exp, range[0], maxobjsize));
3445 else
3446 warned = (func
3447 ? warning_at (loc, opt,
3448 "%K%qD specified size between %E and %E "
3449 "exceeds maximum object size %E",
3450 exp, func,
3451 range[0], range[1], maxobjsize)
3452 : warning_at (loc, opt,
3453 "%Kspecified size between %E and %E "
3454 "exceeds maximum object size %E",
3455 exp, range[0], range[1], maxobjsize));
3456 if (warned)
3457 TREE_NO_WARNING (exp) = true;
3458
3459 return false;
3460 }
3461
3462 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3463 constant, and in range of unsigned HOST_WIDE_INT. */
3464 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3465
3466 /* Next check the number of bytes to be written against the destination
3467 object size. */
3468 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3469 {
3470 if (range[0]
3471 && TREE_CODE (range[0]) == INTEGER_CST
3472 && ((tree_fits_uhwi_p (dstsize)
3473 && tree_int_cst_lt (dstsize, range[0]))
3474 || (dstwrite
3475 && tree_fits_uhwi_p (dstwrite)
3476 && tree_int_cst_lt (dstwrite, range[0]))))
3477 {
3478 if (TREE_NO_WARNING (exp))
3479 return false;
3480
3481 location_t loc = tree_nonartificial_location (exp);
3482 loc = expansion_point_location_if_in_system_header (loc);
3483
3484 bool warned = false;
3485 if (dstwrite == slen && at_least_one)
3486 {
3487 /* This is a call to strcpy with a destination of 0 size
3488 and a source of unknown length. The call will write
3489 at least one byte past the end of the destination. */
3490 warned = (func
3491 ? warning_at (loc, opt,
3492 "%K%qD writing %E or more bytes into "
3493 "a region of size %E overflows "
3494 "the destination",
3495 exp, func, range[0], dstsize)
3496 : warning_at (loc, opt,
3497 "%Kwriting %E or more bytes into "
3498 "a region of size %E overflows "
3499 "the destination",
3500 exp, range[0], dstsize));
3501 }
3502 else if (tree_int_cst_equal (range[0], range[1]))
3503 warned = (func
3504 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3505 "%K%qD writing %E byte into a region "
3506 "of size %E overflows the destination",
3507 "%K%qD writing %E bytes into a region "
3508 "of size %E overflows the destination",
3509 exp, func, range[0], dstsize)
3510 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3511 "%Kwriting %E byte into a region "
3512 "of size %E overflows the destination",
3513 "%Kwriting %E bytes into a region "
3514 "of size %E overflows the destination",
3515 exp, range[0], dstsize));
3516 else if (tree_int_cst_sign_bit (range[1]))
3517 {
3518 /* Avoid printing the upper bound if it's invalid. */
3519 warned = (func
3520 ? warning_at (loc, opt,
3521 "%K%qD writing %E or more bytes into "
3522 "a region of size %E overflows "
3523 "the destination",
3524 exp, func, range[0], dstsize)
3525 : warning_at (loc, opt,
3526 "%Kwriting %E or more bytes into "
3527 "a region of size %E overflows "
3528 "the destination",
3529 exp, range[0], dstsize));
3530 }
3531 else
3532 warned = (func
3533 ? warning_at (loc, opt,
3534 "%K%qD writing between %E and %E bytes "
3535 "into a region of size %E overflows "
3536 "the destination",
3537 exp, func, range[0], range[1],
3538 dstsize)
3539 : warning_at (loc, opt,
3540 "%Kwriting between %E and %E bytes "
3541 "into a region of size %E overflows "
3542 "the destination",
3543 exp, range[0], range[1],
3544 dstsize));
3545 if (warned)
3546 TREE_NO_WARNING (exp) = true;
3547
3548 /* Return error when an overflow has been detected. */
3549 return false;
3550 }
3551 }
3552
3553 /* Check the maximum length of the source sequence against the size
3554 of the destination object if known, or against the maximum size
3555 of an object. */
3556 if (maxread)
3557 {
3558 get_size_range (maxread, range);
3559 if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3560 {
3561 location_t loc = tree_nonartificial_location (exp);
3562 loc = expansion_point_location_if_in_system_header (loc);
3563
3564 if (tree_int_cst_lt (maxobjsize, range[0]))
3565 {
3566 if (TREE_NO_WARNING (exp))
3567 return false;
3568
3569 bool warned = false;
3570
3571 /* Warn about crazy big sizes first since that's more
3572 likely to be meaningful than saying that the bound
3573 is greater than the object size if both are big. */
3574 if (range[0] == range[1])
3575 warned = (func
3576 ? warning_at (loc, opt,
3577 "%K%qD specified bound %E "
3578 "exceeds maximum object size %E",
3579 exp, func, range[0], maxobjsize)
3580 : warning_at (loc, opt,
3581 "%Kspecified bound %E "
3582 "exceeds maximum object size %E",
3583 exp, range[0], maxobjsize));
3584 else
3585 warned = (func
3586 ? warning_at (loc, opt,
3587 "%K%qD specified bound between "
3588 "%E and %E exceeds maximum object "
3589 "size %E",
3590 exp, func,
3591 range[0], range[1], maxobjsize)
3592 : warning_at (loc, opt,
3593 "%Kspecified bound between "
3594 "%E and %E exceeds maximum object "
3595 "size %E",
3596 exp, range[0], range[1], maxobjsize));
3597 if (warned)
3598 TREE_NO_WARNING (exp) = true;
3599
3600 return false;
3601 }
3602
3603 if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3604 {
3605 if (TREE_NO_WARNING (exp))
3606 return false;
3607
3608 bool warned = false;
3609
3610 if (tree_int_cst_equal (range[0], range[1]))
3611 warned = (func
3612 ? warning_at (loc, opt,
3613 "%K%qD specified bound %E "
3614 "exceeds destination size %E",
3615 exp, func,
3616 range[0], dstsize)
3617 : warning_at (loc, opt,
3618 "%Kspecified bound %E "
3619 "exceeds destination size %E",
3620 exp, range[0], dstsize));
3621 else
3622 warned = (func
3623 ? warning_at (loc, opt,
3624 "%K%qD specified bound between %E "
3625 "and %E exceeds destination size %E",
3626 exp, func,
3627 range[0], range[1], dstsize)
3628 : warning_at (loc, opt,
3629 "%Kspecified bound between %E "
3630 "and %E exceeds destination size %E",
3631 exp,
3632 range[0], range[1], dstsize));
3633 if (warned)
3634 TREE_NO_WARNING (exp) = true;
3635
3636 return false;
3637 }
3638 }
3639 }
3640
3641 /* Check for reading past the end of SRC. */
3642 if (slen
3643 && slen == srcstr
3644 && dstwrite && range[0]
3645 && tree_int_cst_lt (slen, range[0]))
3646 {
3647 if (TREE_NO_WARNING (exp))
3648 return false;
3649
3650 bool warned = false;
3651 location_t loc = tree_nonartificial_location (exp);
3652 loc = expansion_point_location_if_in_system_header (loc);
3653
3654 if (tree_int_cst_equal (range[0], range[1]))
3655 warned = (func
3656 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
3657 "%K%qD reading %E byte from a region of size %E",
3658 "%K%qD reading %E bytes from a region of size %E",
3659 exp, func, range[0], slen)
3660 : warning_n (loc, opt, tree_to_uhwi (range[0]),
3661 "%Kreading %E byte from a region of size %E",
3662 "%Kreading %E bytes from a region of size %E",
3663 exp, range[0], slen));
3664 else if (tree_int_cst_sign_bit (range[1]))
3665 {
3666 /* Avoid printing the upper bound if it's invalid. */
3667 warned = (func
3668 ? warning_at (loc, opt,
3669 "%K%qD reading %E or more bytes from a region "
3670 "of size %E",
3671 exp, func, range[0], slen)
3672 : warning_at (loc, opt,
3673 "%Kreading %E or more bytes from a region "
3674 "of size %E",
3675 exp, range[0], slen));
3676 }
3677 else
3678 warned = (func
3679 ? warning_at (loc, opt,
3680 "%K%qD reading between %E and %E bytes from "
3681 "a region of size %E",
3682 exp, func, range[0], range[1], slen)
3683 : warning_at (loc, opt,
3684 "%Kreading between %E and %E bytes from "
3685 "a region of size %E",
3686 exp, range[0], range[1], slen));
3687 if (warned)
3688 TREE_NO_WARNING (exp) = true;
3689
3690 return false;
3691 }
3692
3693 return true;
3694 }
3695
3696 /* If STMT is a call to an allocation function, returns the constant
3697 size of the object allocated by the call represented as sizetype.
3698 If nonnull, sets RNG1[] to the range of the size. */
3699
3700 tree
gimple_call_alloc_size(gimple * stmt,wide_int rng1[2],const vr_values * rvals)3701 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
3702 const vr_values *rvals /* = NULL */)
3703 {
3704 if (!stmt)
3705 return NULL_TREE;
3706
3707 tree allocfntype;
3708 if (tree fndecl = gimple_call_fndecl (stmt))
3709 allocfntype = TREE_TYPE (fndecl);
3710 else
3711 allocfntype = gimple_call_fntype (stmt);
3712
3713 if (!allocfntype)
3714 return NULL_TREE;
3715
3716 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
3717 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
3718 if (!at)
3719 {
3720 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
3721 return NULL_TREE;
3722
3723 argidx1 = 0;
3724 }
3725
3726 unsigned nargs = gimple_call_num_args (stmt);
3727
3728 if (argidx1 == UINT_MAX)
3729 {
3730 tree atval = TREE_VALUE (at);
3731 if (!atval)
3732 return NULL_TREE;
3733
3734 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
3735 if (nargs <= argidx1)
3736 return NULL_TREE;
3737
3738 atval = TREE_CHAIN (atval);
3739 if (atval)
3740 {
3741 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
3742 if (nargs <= argidx2)
3743 return NULL_TREE;
3744 }
3745 }
3746
3747 tree size = gimple_call_arg (stmt, argidx1);
3748
3749 wide_int rng1_buf[2];
3750 /* If RNG1 is not set, use the buffer. */
3751 if (!rng1)
3752 rng1 = rng1_buf;
3753
3754 if (!get_range (size, rng1, rvals))
3755 return NULL_TREE;
3756
3757 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
3758 return fold_convert (sizetype, size);
3759
3760 /* To handle ranges do the math in wide_int and return the product
3761 of the upper bounds as a constant. Ignore anti-ranges. */
3762 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
3763 wide_int rng2[2];
3764 if (!get_range (n, rng2, rvals))
3765 return NULL_TREE;
3766
3767 /* Extend to the maximum precision to avoid overflow. */
3768 const int prec = ADDR_MAX_PRECISION;
3769 rng1[0] = wide_int::from (rng1[0], prec, UNSIGNED);
3770 rng1[1] = wide_int::from (rng1[1], prec, UNSIGNED);
3771 rng2[0] = wide_int::from (rng2[0], prec, UNSIGNED);
3772 rng2[1] = wide_int::from (rng2[1], prec, UNSIGNED);
3773
3774 /* Compute products of both bounds for the caller but return the lesser
3775 of SIZE_MAX and the product of the upper bounds as a constant. */
3776 rng1[0] = rng1[0] * rng2[0];
3777 rng1[1] = rng1[1] * rng2[1];
3778 tree size_max = TYPE_MAX_VALUE (sizetype);
3779 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
3780 {
3781 rng1[1] = wi::to_wide (size_max);
3782 return size_max;
3783 }
3784
3785 return wide_int_to_tree (sizetype, rng1[1]);
3786 }
3787
3788 /* Helper for compute_objsize. Returns the constant size of the DEST
3789 if it refers to a variable or field and sets *PDECL to the DECL and
3790 *POFF to zero. Otherwise returns null for other nodes. */
3791
3792 static tree
addr_decl_size(tree dest,tree * pdecl,tree * poff)3793 addr_decl_size (tree dest, tree *pdecl, tree *poff)
3794 {
3795 if (TREE_CODE (dest) == ADDR_EXPR)
3796 dest = TREE_OPERAND (dest, 0);
3797
3798 if (DECL_P (dest))
3799 {
3800 *pdecl = dest;
3801 *poff = integer_zero_node;
3802 if (tree size = DECL_SIZE_UNIT (dest))
3803 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
3804 }
3805
3806 if (TREE_CODE (dest) == COMPONENT_REF)
3807 {
3808 *pdecl = TREE_OPERAND (dest, 1);
3809 *poff = integer_zero_node;
3810 /* Only return constant sizes for now while callers depend on it. */
3811 if (tree size = component_ref_size (dest))
3812 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
3813 }
3814
3815 return NULL_TREE;
3816 }
3817
3818 /* Helper to compute the size of the object referenced by the DEST
3819 expression which must have pointer type, using Object Size type
3820 OSTYPE (only the least significant 2 bits are used).
3821 Returns an estimate of the size of the object represented as
3822 a sizetype constant if successful or NULL when the size cannot
3823 be determined.
3824 When the referenced object involves a non-constant offset in some
3825 range the returned value represents the largest size given the
3826 smallest non-negative offset in the range.
3827 If nonnull, sets *PDECL to the decl of the referenced subobject
3828 if it can be determined, or to null otherwise. Likewise, when
3829 POFF is nonnull *POFF is set to the offset into *PDECL.
3830
3831 The function is intended for diagnostics and should not be used
3832 to influence code generation or optimization. */
3833
3834 tree
compute_objsize(tree dest,int ostype,tree * pdecl,tree * poff,const vr_values * rvals)3835 compute_objsize (tree dest, int ostype, tree *pdecl /* = NULL */,
3836 tree *poff /* = NULL */, const vr_values *rvals /* = NULL */)
3837 {
3838 tree dummy_decl = NULL_TREE;
3839 if (!pdecl)
3840 pdecl = &dummy_decl;
3841
3842 tree dummy_off = NULL_TREE;
3843 if (!poff)
3844 poff = &dummy_off;
3845
3846 /* Only the two least significant bits are meaningful. */
3847 ostype &= 3;
3848
3849 if (ostype)
3850 /* Except for overly permissive calls to memcpy and other raw
3851 memory functions with zero OSTYPE, detect the size from simple
3852 DECLs first to more reliably than compute_builtin_object_size
3853 set *PDECL and *POFF. */
3854 if (tree size = addr_decl_size (dest, pdecl, poff))
3855 return size;
3856
3857 unsigned HOST_WIDE_INT size;
3858 if (compute_builtin_object_size (dest, ostype, &size, pdecl, poff))
3859 return build_int_cst (sizetype, size);
3860
3861 if (TREE_CODE (dest) == SSA_NAME)
3862 {
3863 gimple *stmt = SSA_NAME_DEF_STMT (dest);
3864 if (is_gimple_call (stmt))
3865 {
3866 /* If STMT is a call to an allocation function get the size
3867 from its argument(s). If successful, also set *PDECL to
3868 DEST for the caller to include in diagnostics. */
3869 if (tree size = gimple_call_alloc_size (stmt))
3870 {
3871 *pdecl = dest;
3872 *poff = integer_zero_node;
3873 return size;
3874 }
3875 return NULL_TREE;
3876 }
3877
3878 if (!is_gimple_assign (stmt))
3879 return NULL_TREE;
3880
3881 dest = gimple_assign_rhs1 (stmt);
3882
3883 tree_code code = gimple_assign_rhs_code (stmt);
3884 if (code == POINTER_PLUS_EXPR)
3885 {
3886 /* compute_builtin_object_size fails for addresses with
3887 non-constant offsets. Try to determine the range of
3888 such an offset here and use it to adjust the constant
3889 size. */
3890 tree off = gimple_assign_rhs2 (stmt);
3891 if (TREE_CODE (off) == INTEGER_CST)
3892 {
3893 if (tree size = compute_objsize (dest, ostype, pdecl, poff))
3894 {
3895 wide_int wioff = wi::to_wide (off);
3896 wide_int wisiz = wi::to_wide (size);
3897
3898 /* Ignore negative offsets for now. For others,
3899 use the lower bound as the most optimistic
3900 estimate of the (remaining) size. */
3901 if (wi::neg_p (wioff))
3902 ;
3903 else
3904 {
3905 if (*poff)
3906 {
3907 *poff = fold_convert (ptrdiff_type_node, *poff);
3908 off = fold_convert (ptrdiff_type_node, *poff);
3909 *poff = size_binop (PLUS_EXPR, *poff, off);
3910 }
3911 else
3912 *poff = off;
3913 if (wi::ltu_p (wioff, wisiz))
3914 return wide_int_to_tree (TREE_TYPE (size),
3915 wi::sub (wisiz, wioff));
3916 return size_zero_node;
3917 }
3918 }
3919 }
3920 else if (TREE_CODE (off) == SSA_NAME
3921 && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3922 {
3923 wide_int min, max;
3924 enum value_range_kind rng = get_range_info (off, &min, &max);
3925
3926 if (rng == VR_RANGE)
3927 if (tree size = compute_objsize (dest, ostype, pdecl, poff))
3928 {
3929 wide_int wisiz = wi::to_wide (size);
3930
3931 /* Ignore negative offsets for now. For others,
3932 use the lower bound as the most optimistic
3933 estimate of the (remaining)size. */
3934 if (wi::neg_p (min) || wi::neg_p (max))
3935 ;
3936 else
3937 {
3938 /* FIXME: For now, since the offset is non-constant,
3939 clear *POFF to keep it from being "misused."
3940 Eventually *POFF will need to become a range that
3941 can be properly added to the outer offset if it
3942 too is one. */
3943 *poff = NULL_TREE;
3944 if (wi::ltu_p (min, wisiz))
3945 return wide_int_to_tree (TREE_TYPE (size),
3946 wi::sub (wisiz, min));
3947 return size_zero_node;
3948 }
3949 }
3950 }
3951 }
3952 else if (code != ADDR_EXPR)
3953 return NULL_TREE;
3954 }
3955
3956 /* Unless computing the largest size (for memcpy and other raw memory
3957 functions), try to determine the size of the object from its type. */
3958 if (!ostype)
3959 return NULL_TREE;
3960
3961 if (TREE_CODE (dest) == ARRAY_REF
3962 || TREE_CODE (dest) == MEM_REF)
3963 {
3964 tree ref = TREE_OPERAND (dest, 0);
3965 tree reftype = TREE_TYPE (ref);
3966 if (TREE_CODE (dest) == MEM_REF && TREE_CODE (reftype) == POINTER_TYPE)
3967 {
3968 /* Give up for MEM_REFs of vector types; those may be synthesized
3969 from multiple assignments to consecutive data members. See PR
3970 93200.
3971 FIXME: Deal with this more generally, e.g., by marking up such
3972 MEM_REFs at the time they're created. */
3973 reftype = TREE_TYPE (reftype);
3974 if (TREE_CODE (reftype) == VECTOR_TYPE)
3975 return NULL_TREE;
3976 }
3977 tree off = TREE_OPERAND (dest, 1);
3978 if (tree size = compute_objsize (ref, ostype, pdecl, poff))
3979 {
3980 /* If the declaration of the destination object is known
3981 to have zero size, return zero. */
3982 if (integer_zerop (size)
3983 && *pdecl && DECL_P (*pdecl)
3984 && *poff && integer_zerop (*poff))
3985 return size_zero_node;
3986
3987 /* A valid offset into a declared object cannot be negative.
3988 A zero size with a zero "inner" offset is still zero size
3989 regardless of the "other" offset OFF. */
3990 if (*poff
3991 && ((integer_zerop (*poff) && integer_zerop (size))
3992 || (TREE_CODE (*poff) == INTEGER_CST
3993 && tree_int_cst_sgn (*poff) < 0)))
3994 return size_zero_node;
3995
3996 wide_int offrng[2];
3997 if (!get_range (off, offrng, rvals))
3998 return NULL_TREE;
3999
4000 /* Convert to the same precision to keep wide_int from "helpfully"
4001 crashing whenever it sees other arguments. */
4002 const unsigned sizprec = TYPE_PRECISION (sizetype);
4003 offrng[0] = wide_int::from (offrng[0], sizprec, SIGNED);
4004 offrng[1] = wide_int::from (offrng[1], sizprec, SIGNED);
4005
4006 /* Adjust SIZE either up or down by the sum of *POFF and OFF
4007 above. */
4008 if (TREE_CODE (dest) == ARRAY_REF)
4009 {
4010 tree lowbnd = array_ref_low_bound (dest);
4011 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
4012 {
4013 /* Adjust the offset by the low bound of the array
4014 domain (normally zero but 1 in Fortran). */
4015 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
4016 offrng[0] -= lb;
4017 offrng[1] -= lb;
4018 }
4019
4020 /* Convert the array index into a byte offset. */
4021 tree eltype = TREE_TYPE (dest);
4022 tree tpsize = TYPE_SIZE_UNIT (eltype);
4023 if (tpsize && TREE_CODE (tpsize) == INTEGER_CST)
4024 {
4025 wide_int wsz = wi::to_wide (tpsize, offrng->get_precision ());
4026 offrng[0] *= wsz;
4027 offrng[1] *= wsz;
4028 }
4029 else
4030 return NULL_TREE;
4031 }
4032
4033 wide_int wisize = wi::to_wide (size);
4034
4035 if (!*poff)
4036 {
4037 /* If the "inner" offset is unknown and the "outer" offset
4038 is either negative or less than SIZE, return the size
4039 minus the offset. This may be overly optimistic in
4040 the first case if the inner offset happens to be less
4041 than the absolute value of the outer offset. */
4042 if (wi::neg_p (offrng[0]))
4043 return size;
4044 if (wi::ltu_p (offrng[0], wisize))
4045 return build_int_cst (sizetype, (wisize - offrng[0]).to_uhwi ());
4046 return size_zero_node;
4047 }
4048
4049 /* Convert to the same precision to keep wide_int from "helpfuly"
4050 crashing whenever it sees other argumments. */
4051 offrng[0] = wide_int::from (offrng[0], sizprec, SIGNED);
4052 offrng[1] = wide_int::from (offrng[1], sizprec, SIGNED);
4053
4054 tree dstoff = *poff;
4055 if (integer_zerop (*poff))
4056 *poff = off;
4057 else if (!integer_zerop (off))
4058 {
4059 *poff = fold_convert (ptrdiff_type_node, *poff);
4060 off = fold_convert (ptrdiff_type_node, off);
4061 *poff = size_binop (PLUS_EXPR, *poff, off);
4062 }
4063
4064 if (!wi::neg_p (offrng[0]))
4065 {
4066 if (TREE_CODE (size) != INTEGER_CST)
4067 return NULL_TREE;
4068
4069 /* Return the difference between the size and the offset
4070 or zero if the offset is greater. */
4071 wide_int wisize = wi::to_wide (size, sizprec);
4072 if (wi::ltu_p (wisize, offrng[0]))
4073 return size_zero_node;
4074
4075 return wide_int_to_tree (sizetype, wisize - offrng[0]);
4076 }
4077
4078 wide_int dstoffrng[2];
4079 if (TREE_CODE (dstoff) == INTEGER_CST)
4080 dstoffrng[0] = dstoffrng[1] = wi::to_wide (dstoff);
4081 else if (TREE_CODE (dstoff) == SSA_NAME)
4082 {
4083 enum value_range_kind rng
4084 = get_range_info (dstoff, dstoffrng, dstoffrng + 1);
4085 if (rng != VR_RANGE)
4086 return NULL_TREE;
4087 }
4088 else
4089 return NULL_TREE;
4090
4091 dstoffrng[0] = wide_int::from (dstoffrng[0], sizprec, SIGNED);
4092 dstoffrng[1] = wide_int::from (dstoffrng[1], sizprec, SIGNED);
4093
4094 if (!wi::neg_p (dstoffrng[0]))
4095 wisize += dstoffrng[0];
4096
4097 offrng[1] += dstoffrng[1];
4098 if (wi::neg_p (offrng[1]))
4099 return size_zero_node;
4100
4101 return wide_int_to_tree (sizetype, wisize);
4102 }
4103
4104 return NULL_TREE;
4105 }
4106
4107 /* Try simple DECLs not handled above. */
4108 if (tree size = addr_decl_size (dest, pdecl, poff))
4109 return size;
4110
4111 tree type = TREE_TYPE (dest);
4112 if (TREE_CODE (type) == POINTER_TYPE)
4113 type = TREE_TYPE (type);
4114
4115 type = TYPE_MAIN_VARIANT (type);
4116 if (TREE_CODE (dest) == ADDR_EXPR)
4117 dest = TREE_OPERAND (dest, 0);
4118
4119 if (TREE_CODE (type) == ARRAY_TYPE
4120 && !array_at_struct_end_p (dest))
4121 {
4122 if (tree size = TYPE_SIZE_UNIT (type))
4123 return TREE_CODE (size) == INTEGER_CST ? size : NULL_TREE;
4124 }
4125
4126 return NULL_TREE;
4127 }
4128
4129 /* Helper to determine and check the sizes of the source and the destination
4130 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
4131 call expression, DEST is the destination argument, SRC is the source
4132 argument or null, and LEN is the number of bytes. Use Object Size type-0
4133 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
4134 (no overflow or invalid sizes), false otherwise. */
4135
4136 static bool
check_memop_access(tree exp,tree dest,tree src,tree size)4137 check_memop_access (tree exp, tree dest, tree src, tree size)
4138 {
4139 /* For functions like memset and memcpy that operate on raw memory
4140 try to determine the size of the largest source and destination
4141 object using type-0 Object Size regardless of the object size
4142 type specified by the option. */
4143 tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
4144 tree dstsize = compute_objsize (dest, 0);
4145
4146 return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
4147 srcsize, dstsize);
4148 }
4149
4150 /* Validate memchr arguments without performing any expansion.
4151 Return NULL_RTX. */
4152
4153 static rtx
expand_builtin_memchr(tree exp,rtx)4154 expand_builtin_memchr (tree exp, rtx)
4155 {
4156 if (!validate_arglist (exp,
4157 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4158 return NULL_RTX;
4159
4160 tree arg1 = CALL_EXPR_ARG (exp, 0);
4161 tree len = CALL_EXPR_ARG (exp, 2);
4162
4163 /* Diagnose calls where the specified length exceeds the size
4164 of the object. */
4165 if (warn_stringop_overflow)
4166 {
4167 tree size = compute_objsize (arg1, 0);
4168 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
4169 /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
4170 }
4171
4172 return NULL_RTX;
4173 }
4174
4175 /* Expand a call EXP to the memcpy builtin.
4176 Return NULL_RTX if we failed, the caller should emit a normal call,
4177 otherwise try to get the result in TARGET, if convenient (and in
4178 mode MODE if that's convenient). */
4179
4180 static rtx
expand_builtin_memcpy(tree exp,rtx target)4181 expand_builtin_memcpy (tree exp, rtx target)
4182 {
4183 if (!validate_arglist (exp,
4184 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4185 return NULL_RTX;
4186
4187 tree dest = CALL_EXPR_ARG (exp, 0);
4188 tree src = CALL_EXPR_ARG (exp, 1);
4189 tree len = CALL_EXPR_ARG (exp, 2);
4190
4191 check_memop_access (exp, dest, src, len);
4192
4193 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
4194 /*retmode=*/ RETURN_BEGIN, false);
4195 }
4196
4197 /* Check a call EXP to the memmove built-in for validity.
4198 Return NULL_RTX on both success and failure. */
4199
4200 static rtx
expand_builtin_memmove(tree exp,rtx target)4201 expand_builtin_memmove (tree exp, rtx target)
4202 {
4203 if (!validate_arglist (exp,
4204 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4205 return NULL_RTX;
4206
4207 tree dest = CALL_EXPR_ARG (exp, 0);
4208 tree src = CALL_EXPR_ARG (exp, 1);
4209 tree len = CALL_EXPR_ARG (exp, 2);
4210
4211 check_memop_access (exp, dest, src, len);
4212
4213 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
4214 /*retmode=*/ RETURN_BEGIN, true);
4215 }
4216
4217 /* Expand a call EXP to the mempcpy builtin.
4218 Return NULL_RTX if we failed; the caller should emit a normal call,
4219 otherwise try to get the result in TARGET, if convenient (and in
4220 mode MODE if that's convenient). */
4221
4222 static rtx
expand_builtin_mempcpy(tree exp,rtx target)4223 expand_builtin_mempcpy (tree exp, rtx target)
4224 {
4225 if (!validate_arglist (exp,
4226 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4227 return NULL_RTX;
4228
4229 tree dest = CALL_EXPR_ARG (exp, 0);
4230 tree src = CALL_EXPR_ARG (exp, 1);
4231 tree len = CALL_EXPR_ARG (exp, 2);
4232
4233 /* Policy does not generally allow using compute_objsize (which
4234 is used internally by check_memop_size) to change code generation
4235 or drive optimization decisions.
4236
4237 In this instance it is safe because the code we generate has
4238 the same semantics regardless of the return value of
4239 check_memop_sizes. Exactly the same amount of data is copied
4240 and the return value is exactly the same in both cases.
4241
4242 Furthermore, check_memop_size always uses mode 0 for the call to
4243 compute_objsize, so the imprecise nature of compute_objsize is
4244 avoided. */
4245
4246 /* Avoid expanding mempcpy into memcpy when the call is determined
4247 to overflow the buffer. This also prevents the same overflow
4248 from being diagnosed again when expanding memcpy. */
4249 if (!check_memop_access (exp, dest, src, len))
4250 return NULL_RTX;
4251
4252 return expand_builtin_mempcpy_args (dest, src, len,
4253 target, exp, /*retmode=*/ RETURN_END);
4254 }
4255
4256 /* Helper function to do the actual work for expand of memory copy family
4257 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
4258 of memory from SRC to DEST and assign to TARGET if convenient. Return
4259 value is based on RETMODE argument. */
4260
4261 static rtx
expand_builtin_memory_copy_args(tree dest,tree src,tree len,rtx target,tree exp,memop_ret retmode,bool might_overlap)4262 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
4263 rtx target, tree exp, memop_ret retmode,
4264 bool might_overlap)
4265 {
4266 unsigned int src_align = get_pointer_alignment (src);
4267 unsigned int dest_align = get_pointer_alignment (dest);
4268 rtx dest_mem, src_mem, dest_addr, len_rtx;
4269 HOST_WIDE_INT expected_size = -1;
4270 unsigned int expected_align = 0;
4271 unsigned HOST_WIDE_INT min_size;
4272 unsigned HOST_WIDE_INT max_size;
4273 unsigned HOST_WIDE_INT probable_max_size;
4274
4275 bool is_move_done;
4276
4277 /* If DEST is not a pointer type, call the normal function. */
4278 if (dest_align == 0)
4279 return NULL_RTX;
4280
4281 /* If either SRC is not a pointer type, don't do this
4282 operation in-line. */
4283 if (src_align == 0)
4284 return NULL_RTX;
4285
4286 if (currently_expanding_gimple_stmt)
4287 stringop_block_profile (currently_expanding_gimple_stmt,
4288 &expected_align, &expected_size);
4289
4290 if (expected_align < dest_align)
4291 expected_align = dest_align;
4292 dest_mem = get_memory_rtx (dest, len);
4293 set_mem_align (dest_mem, dest_align);
4294 len_rtx = expand_normal (len);
4295 determine_block_size (len, len_rtx, &min_size, &max_size,
4296 &probable_max_size);
4297
4298 /* Try to get the byte representation of the constant SRC points to,
4299 with its byte size in NBYTES. */
4300 unsigned HOST_WIDE_INT nbytes;
4301 const char *rep = c_getstr (src, &nbytes);
4302
4303 /* If the function's constant bound LEN_RTX is less than or equal
4304 to the byte size of the representation of the constant argument,
4305 and if block move would be done by pieces, we can avoid loading
4306 the bytes from memory and only store the computed constant.
4307 This works in the overlap (memmove) case as well because
4308 store_by_pieces just generates a series of stores of constants
4309 from the representation returned by c_getstr(). */
4310 if (rep
4311 && CONST_INT_P (len_rtx)
4312 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
4313 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
4314 CONST_CAST (char *, rep),
4315 dest_align, false))
4316 {
4317 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
4318 builtin_memcpy_read_str,
4319 CONST_CAST (char *, rep),
4320 dest_align, false, retmode);
4321 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4322 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4323 return dest_mem;
4324 }
4325
4326 src_mem = get_memory_rtx (src, len);
4327 set_mem_align (src_mem, src_align);
4328
4329 /* Copy word part most expediently. */
4330 enum block_op_methods method = BLOCK_OP_NORMAL;
4331 if (CALL_EXPR_TAILCALL (exp)
4332 && (retmode == RETURN_BEGIN || target == const0_rtx))
4333 method = BLOCK_OP_TAILCALL;
4334 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
4335 && retmode == RETURN_END
4336 && !might_overlap
4337 && target != const0_rtx);
4338 if (use_mempcpy_call)
4339 method = BLOCK_OP_NO_LIBCALL_RET;
4340 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
4341 expected_align, expected_size,
4342 min_size, max_size, probable_max_size,
4343 use_mempcpy_call, &is_move_done,
4344 might_overlap);
4345
4346 /* Bail out when a mempcpy call would be expanded as libcall and when
4347 we have a target that provides a fast implementation
4348 of mempcpy routine. */
4349 if (!is_move_done)
4350 return NULL_RTX;
4351
4352 if (dest_addr == pc_rtx)
4353 return NULL_RTX;
4354
4355 if (dest_addr == 0)
4356 {
4357 dest_addr = force_operand (XEXP (dest_mem, 0), target);
4358 dest_addr = convert_memory_address (ptr_mode, dest_addr);
4359 }
4360
4361 if (retmode != RETURN_BEGIN && target != const0_rtx)
4362 {
4363 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
4364 /* stpcpy pointer to last byte. */
4365 if (retmode == RETURN_END_MINUS_ONE)
4366 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
4367 }
4368
4369 return dest_addr;
4370 }
4371
4372 static rtx
expand_builtin_mempcpy_args(tree dest,tree src,tree len,rtx target,tree orig_exp,memop_ret retmode)4373 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
4374 rtx target, tree orig_exp, memop_ret retmode)
4375 {
4376 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
4377 retmode, false);
4378 }
4379
4380 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
4381 we failed, the caller should emit a normal call, otherwise try to
4382 get the result in TARGET, if convenient.
4383 Return value is based on RETMODE argument. */
4384
4385 static rtx
expand_movstr(tree dest,tree src,rtx target,memop_ret retmode)4386 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
4387 {
4388 class expand_operand ops[3];
4389 rtx dest_mem;
4390 rtx src_mem;
4391
4392 if (!targetm.have_movstr ())
4393 return NULL_RTX;
4394
4395 dest_mem = get_memory_rtx (dest, NULL);
4396 src_mem = get_memory_rtx (src, NULL);
4397 if (retmode == RETURN_BEGIN)
4398 {
4399 target = force_reg (Pmode, XEXP (dest_mem, 0));
4400 dest_mem = replace_equiv_address (dest_mem, target);
4401 }
4402
4403 create_output_operand (&ops[0],
4404 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
4405 create_fixed_operand (&ops[1], dest_mem);
4406 create_fixed_operand (&ops[2], src_mem);
4407 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
4408 return NULL_RTX;
4409
4410 if (retmode != RETURN_BEGIN && target != const0_rtx)
4411 {
4412 target = ops[0].value;
4413 /* movstr is supposed to set end to the address of the NUL
4414 terminator. If the caller requested a mempcpy-like return value,
4415 adjust it. */
4416 if (retmode == RETURN_END)
4417 {
4418 rtx tem = plus_constant (GET_MODE (target),
4419 gen_lowpart (GET_MODE (target), target), 1);
4420 emit_move_insn (target, force_operand (tem, NULL_RTX));
4421 }
4422 }
4423 return target;
4424 }
4425
4426 /* Do some very basic size validation of a call to the strcpy builtin
4427 given by EXP. Return NULL_RTX to have the built-in expand to a call
4428 to the library function. */
4429
4430 static rtx
expand_builtin_strcat(tree exp)4431 expand_builtin_strcat (tree exp)
4432 {
4433 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
4434 || !warn_stringop_overflow)
4435 return NULL_RTX;
4436
4437 tree dest = CALL_EXPR_ARG (exp, 0);
4438 tree src = CALL_EXPR_ARG (exp, 1);
4439
4440 /* Detect unterminated source (only). */
4441 if (!check_nul_terminated_array (exp, src))
4442 return NULL_RTX;
4443
4444 /* There is no way here to determine the length of the string in
4445 the destination to which the SRC string is being appended so
4446 just diagnose cases when the souce string is longer than
4447 the destination object. */
4448
4449 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4450
4451 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
4452 destsize);
4453
4454 return NULL_RTX;
4455 }
4456
4457 /* Expand expression EXP, which is a call to the strcpy builtin. Return
4458 NULL_RTX if we failed the caller should emit a normal call, otherwise
4459 try to get the result in TARGET, if convenient (and in mode MODE if that's
4460 convenient). */
4461
4462 static rtx
expand_builtin_strcpy(tree exp,rtx target)4463 expand_builtin_strcpy (tree exp, rtx target)
4464 {
4465 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4466 return NULL_RTX;
4467
4468 tree dest = CALL_EXPR_ARG (exp, 0);
4469 tree src = CALL_EXPR_ARG (exp, 1);
4470
4471 if (warn_stringop_overflow)
4472 {
4473 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4474 check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4475 src, destsize);
4476 }
4477
4478 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4479 {
4480 /* Check to see if the argument was declared attribute nonstring
4481 and if so, issue a warning since at this point it's not known
4482 to be nul-terminated. */
4483 tree fndecl = get_callee_fndecl (exp);
4484 maybe_warn_nonstring_arg (fndecl, exp);
4485 return ret;
4486 }
4487
4488 return NULL_RTX;
4489 }
4490
4491 /* Helper function to do the actual work for expand_builtin_strcpy. The
4492 arguments to the builtin_strcpy call DEST and SRC are broken out
4493 so that this can also be called without constructing an actual CALL_EXPR.
4494 The other arguments and return value are the same as for
4495 expand_builtin_strcpy. */
4496
4497 static rtx
expand_builtin_strcpy_args(tree exp,tree dest,tree src,rtx target)4498 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4499 {
4500 /* Detect strcpy calls with unterminated arrays.. */
4501 if (tree nonstr = unterminated_array (src))
4502 {
4503 /* NONSTR refers to the non-nul terminated constant array. */
4504 if (!TREE_NO_WARNING (exp))
4505 warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4506 return NULL_RTX;
4507 }
4508
4509 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
4510 }
4511
4512 /* Expand a call EXP to the stpcpy builtin.
4513 Return NULL_RTX if we failed the caller should emit a normal call,
4514 otherwise try to get the result in TARGET, if convenient (and in
4515 mode MODE if that's convenient). */
4516
4517 static rtx
expand_builtin_stpcpy_1(tree exp,rtx target,machine_mode mode)4518 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4519 {
4520 tree dst, src;
4521 location_t loc = EXPR_LOCATION (exp);
4522
4523 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4524 return NULL_RTX;
4525
4526 dst = CALL_EXPR_ARG (exp, 0);
4527 src = CALL_EXPR_ARG (exp, 1);
4528
4529 if (warn_stringop_overflow)
4530 {
4531 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
4532 check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4533 src, destsize);
4534 }
4535
4536 /* If return value is ignored, transform stpcpy into strcpy. */
4537 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4538 {
4539 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4540 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4541 return expand_expr (result, target, mode, EXPAND_NORMAL);
4542 }
4543 else
4544 {
4545 tree len, lenp1;
4546 rtx ret;
4547
4548 /* Ensure we get an actual string whose length can be evaluated at
4549 compile-time, not an expression containing a string. This is
4550 because the latter will potentially produce pessimized code
4551 when used to produce the return value. */
4552 c_strlen_data lendata = { };
4553 if (!c_getstr (src, NULL)
4554 || !(len = c_strlen (src, 0, &lendata, 1)))
4555 return expand_movstr (dst, src, target,
4556 /*retmode=*/ RETURN_END_MINUS_ONE);
4557
4558 if (lendata.decl && !TREE_NO_WARNING (exp))
4559 warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
4560
4561 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4562 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4563 target, exp,
4564 /*retmode=*/ RETURN_END_MINUS_ONE);
4565
4566 if (ret)
4567 return ret;
4568
4569 if (TREE_CODE (len) == INTEGER_CST)
4570 {
4571 rtx len_rtx = expand_normal (len);
4572
4573 if (CONST_INT_P (len_rtx))
4574 {
4575 ret = expand_builtin_strcpy_args (exp, dst, src, target);
4576
4577 if (ret)
4578 {
4579 if (! target)
4580 {
4581 if (mode != VOIDmode)
4582 target = gen_reg_rtx (mode);
4583 else
4584 target = gen_reg_rtx (GET_MODE (ret));
4585 }
4586 if (GET_MODE (target) != GET_MODE (ret))
4587 ret = gen_lowpart (GET_MODE (target), ret);
4588
4589 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4590 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4591 gcc_assert (ret);
4592
4593 return target;
4594 }
4595 }
4596 }
4597
4598 return expand_movstr (dst, src, target,
4599 /*retmode=*/ RETURN_END_MINUS_ONE);
4600 }
4601 }
4602
4603 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4604 arguments while being careful to avoid duplicate warnings (which could
4605 be issued if the expander were to expand the call, resulting in it
4606 being emitted in expand_call(). */
4607
4608 static rtx
expand_builtin_stpcpy(tree exp,rtx target,machine_mode mode)4609 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4610 {
4611 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4612 {
4613 /* The call has been successfully expanded. Check for nonstring
4614 arguments and issue warnings as appropriate. */
4615 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4616 return ret;
4617 }
4618
4619 return NULL_RTX;
4620 }
4621
4622 /* Check a call EXP to the stpncpy built-in for validity.
4623 Return NULL_RTX on both success and failure. */
4624
4625 static rtx
expand_builtin_stpncpy(tree exp,rtx)4626 expand_builtin_stpncpy (tree exp, rtx)
4627 {
4628 if (!validate_arglist (exp,
4629 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4630 || !warn_stringop_overflow)
4631 return NULL_RTX;
4632
4633 /* The source and destination of the call. */
4634 tree dest = CALL_EXPR_ARG (exp, 0);
4635 tree src = CALL_EXPR_ARG (exp, 1);
4636
4637 /* The exact number of bytes to write (not the maximum). */
4638 tree len = CALL_EXPR_ARG (exp, 2);
4639 if (!check_nul_terminated_array (exp, src, len))
4640 return NULL_RTX;
4641
4642 /* The size of the destination object. */
4643 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4644
4645 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4646
4647 return NULL_RTX;
4648 }
4649
4650 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4651 bytes from constant string DATA + OFFSET and return it as target
4652 constant. */
4653
4654 rtx
builtin_strncpy_read_str(void * data,HOST_WIDE_INT offset,scalar_int_mode mode)4655 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4656 scalar_int_mode mode)
4657 {
4658 const char *str = (const char *) data;
4659
4660 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4661 return const0_rtx;
4662
4663 return c_readstr (str + offset, mode);
4664 }
4665
4666 /* Helper to check the sizes of sequences and the destination of calls
4667 to __builtin_strncat and __builtin___strncat_chk. Returns true on
4668 success (no overflow or invalid sizes), false otherwise. */
4669
4670 static bool
check_strncat_sizes(tree exp,tree objsize)4671 check_strncat_sizes (tree exp, tree objsize)
4672 {
4673 tree dest = CALL_EXPR_ARG (exp, 0);
4674 tree src = CALL_EXPR_ARG (exp, 1);
4675 tree maxread = CALL_EXPR_ARG (exp, 2);
4676
4677 /* Try to determine the range of lengths that the source expression
4678 refers to. */
4679 c_strlen_data lendata = { };
4680 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4681
4682 /* Try to verify that the destination is big enough for the shortest
4683 string. */
4684
4685 if (!objsize && warn_stringop_overflow)
4686 {
4687 /* If it hasn't been provided by __strncat_chk, try to determine
4688 the size of the destination object into which the source is
4689 being copied. */
4690 objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4691 }
4692
4693 /* Add one for the terminating nul. */
4694 tree srclen = (lendata.minlen
4695 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4696 size_one_node)
4697 : NULL_TREE);
4698
4699 /* The strncat function copies at most MAXREAD bytes and always appends
4700 the terminating nul so the specified upper bound should never be equal
4701 to (or greater than) the size of the destination. */
4702 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4703 && tree_int_cst_equal (objsize, maxread))
4704 {
4705 location_t loc = tree_nonartificial_location (exp);
4706 loc = expansion_point_location_if_in_system_header (loc);
4707
4708 warning_at (loc, OPT_Wstringop_overflow_,
4709 "%K%qD specified bound %E equals destination size",
4710 exp, get_callee_fndecl (exp), maxread);
4711
4712 return false;
4713 }
4714
4715 if (!srclen
4716 || (maxread && tree_fits_uhwi_p (maxread)
4717 && tree_fits_uhwi_p (srclen)
4718 && tree_int_cst_lt (maxread, srclen)))
4719 srclen = maxread;
4720
4721 /* The number of bytes to write is LEN but check_access will also
4722 check SRCLEN if LEN's value isn't known. */
4723 return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4724 objsize);
4725 }
4726
4727 /* Similar to expand_builtin_strcat, do some very basic size validation
4728 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
4729 the built-in expand to a call to the library function. */
4730
4731 static rtx
expand_builtin_strncat(tree exp,rtx)4732 expand_builtin_strncat (tree exp, rtx)
4733 {
4734 if (!validate_arglist (exp,
4735 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4736 || !warn_stringop_overflow)
4737 return NULL_RTX;
4738
4739 tree dest = CALL_EXPR_ARG (exp, 0);
4740 tree src = CALL_EXPR_ARG (exp, 1);
4741 /* The upper bound on the number of bytes to write. */
4742 tree maxread = CALL_EXPR_ARG (exp, 2);
4743
4744 /* Detect unterminated source (only). */
4745 if (!check_nul_terminated_array (exp, src, maxread))
4746 return NULL_RTX;
4747
4748 /* The length of the source sequence. */
4749 tree slen = c_strlen (src, 1);
4750
4751 /* Try to determine the range of lengths that the source expression
4752 refers to. Since the lengths are only used for warning and not
4753 for code generation disable strict mode below. */
4754 tree maxlen = slen;
4755 if (!maxlen)
4756 {
4757 c_strlen_data lendata = { };
4758 get_range_strlen (src, &lendata, /* eltsize = */ 1);
4759 maxlen = lendata.maxbound;
4760 }
4761
4762 /* Try to verify that the destination is big enough for the shortest
4763 string. First try to determine the size of the destination object
4764 into which the source is being copied. */
4765 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4766
4767 /* Add one for the terminating nul. */
4768 tree srclen = (maxlen
4769 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
4770 size_one_node)
4771 : NULL_TREE);
4772
4773 /* The strncat function copies at most MAXREAD bytes and always appends
4774 the terminating nul so the specified upper bound should never be equal
4775 to (or greater than) the size of the destination. */
4776 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4777 && tree_int_cst_equal (destsize, maxread))
4778 {
4779 location_t loc = tree_nonartificial_location (exp);
4780 loc = expansion_point_location_if_in_system_header (loc);
4781
4782 warning_at (loc, OPT_Wstringop_overflow_,
4783 "%K%qD specified bound %E equals destination size",
4784 exp, get_callee_fndecl (exp), maxread);
4785
4786 return NULL_RTX;
4787 }
4788
4789 if (!srclen
4790 || (maxread && tree_fits_uhwi_p (maxread)
4791 && tree_fits_uhwi_p (srclen)
4792 && tree_int_cst_lt (maxread, srclen)))
4793 srclen = maxread;
4794
4795 /* The number of bytes to write is SRCLEN. */
4796 check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4797
4798 return NULL_RTX;
4799 }
4800
4801 /* Expand expression EXP, which is a call to the strncpy builtin. Return
4802 NULL_RTX if we failed the caller should emit a normal call. */
4803
4804 static rtx
expand_builtin_strncpy(tree exp,rtx target)4805 expand_builtin_strncpy (tree exp, rtx target)
4806 {
4807 location_t loc = EXPR_LOCATION (exp);
4808
4809 if (!validate_arglist (exp,
4810 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4811 return NULL_RTX;
4812 tree dest = CALL_EXPR_ARG (exp, 0);
4813 tree src = CALL_EXPR_ARG (exp, 1);
4814 /* The number of bytes to write (not the maximum). */
4815 tree len = CALL_EXPR_ARG (exp, 2);
4816
4817 if (!check_nul_terminated_array (exp, src, len))
4818 return NULL_RTX;
4819
4820 /* The length of the source sequence. */
4821 tree slen = c_strlen (src, 1);
4822
4823 if (warn_stringop_overflow)
4824 {
4825 tree destsize = compute_objsize (dest,
4826 warn_stringop_overflow - 1);
4827
4828 /* The number of bytes to write is LEN but check_access will also
4829 check SLEN if LEN's value isn't known. */
4830 check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4831 destsize);
4832 }
4833
4834 /* We must be passed a constant len and src parameter. */
4835 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4836 return NULL_RTX;
4837
4838 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4839
4840 /* We're required to pad with trailing zeros if the requested
4841 len is greater than strlen(s2)+1. In that case try to
4842 use store_by_pieces, if it fails, punt. */
4843 if (tree_int_cst_lt (slen, len))
4844 {
4845 unsigned int dest_align = get_pointer_alignment (dest);
4846 const char *p = c_getstr (src);
4847 rtx dest_mem;
4848
4849 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4850 || !can_store_by_pieces (tree_to_uhwi (len),
4851 builtin_strncpy_read_str,
4852 CONST_CAST (char *, p),
4853 dest_align, false))
4854 return NULL_RTX;
4855
4856 dest_mem = get_memory_rtx (dest, len);
4857 store_by_pieces (dest_mem, tree_to_uhwi (len),
4858 builtin_strncpy_read_str,
4859 CONST_CAST (char *, p), dest_align, false,
4860 RETURN_BEGIN);
4861 dest_mem = force_operand (XEXP (dest_mem, 0), target);
4862 dest_mem = convert_memory_address (ptr_mode, dest_mem);
4863 return dest_mem;
4864 }
4865
4866 return NULL_RTX;
4867 }
4868
4869 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
4870 bytes from constant string DATA + OFFSET and return it as target
4871 constant. */
4872
4873 rtx
builtin_memset_read_str(void * data,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,scalar_int_mode mode)4874 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4875 scalar_int_mode mode)
4876 {
4877 const char *c = (const char *) data;
4878 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4879
4880 memset (p, *c, GET_MODE_SIZE (mode));
4881
4882 return c_readstr (p, mode);
4883 }
4884
4885 /* Callback routine for store_by_pieces. Return the RTL of a register
4886 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4887 char value given in the RTL register data. For example, if mode is
4888 4 bytes wide, return the RTL for 0x01010101*data. */
4889
4890 static rtx
builtin_memset_gen_str(void * data,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,scalar_int_mode mode)4891 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4892 scalar_int_mode mode)
4893 {
4894 rtx target, coeff;
4895 size_t size;
4896 char *p;
4897
4898 size = GET_MODE_SIZE (mode);
4899 if (size == 1)
4900 return (rtx) data;
4901
4902 p = XALLOCAVEC (char, size);
4903 memset (p, 1, size);
4904 coeff = c_readstr (p, mode);
4905
4906 target = convert_to_mode (mode, (rtx) data, 1);
4907 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4908 return force_reg (mode, target);
4909 }
4910
4911 /* Expand expression EXP, which is a call to the memset builtin. Return
4912 NULL_RTX if we failed the caller should emit a normal call, otherwise
4913 try to get the result in TARGET, if convenient (and in mode MODE if that's
4914 convenient). */
4915
4916 static rtx
expand_builtin_memset(tree exp,rtx target,machine_mode mode)4917 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4918 {
4919 if (!validate_arglist (exp,
4920 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4921 return NULL_RTX;
4922
4923 tree dest = CALL_EXPR_ARG (exp, 0);
4924 tree val = CALL_EXPR_ARG (exp, 1);
4925 tree len = CALL_EXPR_ARG (exp, 2);
4926
4927 check_memop_access (exp, dest, NULL_TREE, len);
4928
4929 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4930 }
4931
4932 /* Helper function to do the actual work for expand_builtin_memset. The
4933 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4934 so that this can also be called without constructing an actual CALL_EXPR.
4935 The other arguments and return value are the same as for
4936 expand_builtin_memset. */
4937
4938 static rtx
expand_builtin_memset_args(tree dest,tree val,tree len,rtx target,machine_mode mode,tree orig_exp)4939 expand_builtin_memset_args (tree dest, tree val, tree len,
4940 rtx target, machine_mode mode, tree orig_exp)
4941 {
4942 tree fndecl, fn;
4943 enum built_in_function fcode;
4944 machine_mode val_mode;
4945 char c;
4946 unsigned int dest_align;
4947 rtx dest_mem, dest_addr, len_rtx;
4948 HOST_WIDE_INT expected_size = -1;
4949 unsigned int expected_align = 0;
4950 unsigned HOST_WIDE_INT min_size;
4951 unsigned HOST_WIDE_INT max_size;
4952 unsigned HOST_WIDE_INT probable_max_size;
4953
4954 dest_align = get_pointer_alignment (dest);
4955
4956 /* If DEST is not a pointer type, don't do this operation in-line. */
4957 if (dest_align == 0)
4958 return NULL_RTX;
4959
4960 if (currently_expanding_gimple_stmt)
4961 stringop_block_profile (currently_expanding_gimple_stmt,
4962 &expected_align, &expected_size);
4963
4964 if (expected_align < dest_align)
4965 expected_align = dest_align;
4966
4967 /* If the LEN parameter is zero, return DEST. */
4968 if (integer_zerop (len))
4969 {
4970 /* Evaluate and ignore VAL in case it has side-effects. */
4971 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4972 return expand_expr (dest, target, mode, EXPAND_NORMAL);
4973 }
4974
4975 /* Stabilize the arguments in case we fail. */
4976 dest = builtin_save_expr (dest);
4977 val = builtin_save_expr (val);
4978 len = builtin_save_expr (len);
4979
4980 len_rtx = expand_normal (len);
4981 determine_block_size (len, len_rtx, &min_size, &max_size,
4982 &probable_max_size);
4983 dest_mem = get_memory_rtx (dest, len);
4984 val_mode = TYPE_MODE (unsigned_char_type_node);
4985
4986 if (TREE_CODE (val) != INTEGER_CST)
4987 {
4988 rtx val_rtx;
4989
4990 val_rtx = expand_normal (val);
4991 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4992
4993 /* Assume that we can memset by pieces if we can store
4994 * the coefficients by pieces (in the required modes).
4995 * We can't pass builtin_memset_gen_str as that emits RTL. */
4996 c = 1;
4997 if (tree_fits_uhwi_p (len)
4998 && can_store_by_pieces (tree_to_uhwi (len),
4999 builtin_memset_read_str, &c, dest_align,
5000 true))
5001 {
5002 val_rtx = force_reg (val_mode, val_rtx);
5003 store_by_pieces (dest_mem, tree_to_uhwi (len),
5004 builtin_memset_gen_str, val_rtx, dest_align,
5005 true, RETURN_BEGIN);
5006 }
5007 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
5008 dest_align, expected_align,
5009 expected_size, min_size, max_size,
5010 probable_max_size))
5011 goto do_libcall;
5012
5013 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5014 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5015 return dest_mem;
5016 }
5017
5018 if (target_char_cast (val, &c))
5019 goto do_libcall;
5020
5021 if (c)
5022 {
5023 if (tree_fits_uhwi_p (len)
5024 && can_store_by_pieces (tree_to_uhwi (len),
5025 builtin_memset_read_str, &c, dest_align,
5026 true))
5027 store_by_pieces (dest_mem, tree_to_uhwi (len),
5028 builtin_memset_read_str, &c, dest_align, true,
5029 RETURN_BEGIN);
5030 else if (!set_storage_via_setmem (dest_mem, len_rtx,
5031 gen_int_mode (c, val_mode),
5032 dest_align, expected_align,
5033 expected_size, min_size, max_size,
5034 probable_max_size))
5035 goto do_libcall;
5036
5037 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5038 dest_mem = convert_memory_address (ptr_mode, dest_mem);
5039 return dest_mem;
5040 }
5041
5042 set_mem_align (dest_mem, dest_align);
5043 dest_addr = clear_storage_hints (dest_mem, len_rtx,
5044 CALL_EXPR_TAILCALL (orig_exp)
5045 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
5046 expected_align, expected_size,
5047 min_size, max_size,
5048 probable_max_size);
5049
5050 if (dest_addr == 0)
5051 {
5052 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
5053 dest_addr = convert_memory_address (ptr_mode, dest_addr);
5054 }
5055
5056 return dest_addr;
5057
5058 do_libcall:
5059 fndecl = get_callee_fndecl (orig_exp);
5060 fcode = DECL_FUNCTION_CODE (fndecl);
5061 if (fcode == BUILT_IN_MEMSET)
5062 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
5063 dest, val, len);
5064 else if (fcode == BUILT_IN_BZERO)
5065 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
5066 dest, len);
5067 else
5068 gcc_unreachable ();
5069 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5070 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
5071 return expand_call (fn, target, target == const0_rtx);
5072 }
5073
5074 /* Expand expression EXP, which is a call to the bzero builtin. Return
5075 NULL_RTX if we failed the caller should emit a normal call. */
5076
5077 static rtx
expand_builtin_bzero(tree exp)5078 expand_builtin_bzero (tree exp)
5079 {
5080 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5081 return NULL_RTX;
5082
5083 tree dest = CALL_EXPR_ARG (exp, 0);
5084 tree size = CALL_EXPR_ARG (exp, 1);
5085
5086 check_memop_access (exp, dest, NULL_TREE, size);
5087
5088 /* New argument list transforming bzero(ptr x, int y) to
5089 memset(ptr x, int 0, size_t y). This is done this way
5090 so that if it isn't expanded inline, we fallback to
5091 calling bzero instead of memset. */
5092
5093 location_t loc = EXPR_LOCATION (exp);
5094
5095 return expand_builtin_memset_args (dest, integer_zero_node,
5096 fold_convert_loc (loc,
5097 size_type_node, size),
5098 const0_rtx, VOIDmode, exp);
5099 }
5100
5101 /* Try to expand cmpstr operation ICODE with the given operands.
5102 Return the result rtx on success, otherwise return null. */
5103
5104 static rtx
expand_cmpstr(insn_code icode,rtx target,rtx arg1_rtx,rtx arg2_rtx,HOST_WIDE_INT align)5105 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
5106 HOST_WIDE_INT align)
5107 {
5108 machine_mode insn_mode = insn_data[icode].operand[0].mode;
5109
5110 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
5111 target = NULL_RTX;
5112
5113 class expand_operand ops[4];
5114 create_output_operand (&ops[0], target, insn_mode);
5115 create_fixed_operand (&ops[1], arg1_rtx);
5116 create_fixed_operand (&ops[2], arg2_rtx);
5117 create_integer_operand (&ops[3], align);
5118 if (maybe_expand_insn (icode, 4, ops))
5119 return ops[0].value;
5120 return NULL_RTX;
5121 }
5122
5123 /* Expand expression EXP, which is a call to the memcmp built-in function.
5124 Return NULL_RTX if we failed and the caller should emit a normal call,
5125 otherwise try to get the result in TARGET, if convenient.
5126 RESULT_EQ is true if we can relax the returned value to be either zero
5127 or nonzero, without caring about the sign. */
5128
5129 static rtx
expand_builtin_memcmp(tree exp,rtx target,bool result_eq)5130 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
5131 {
5132 if (!validate_arglist (exp,
5133 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5134 return NULL_RTX;
5135
5136 tree arg1 = CALL_EXPR_ARG (exp, 0);
5137 tree arg2 = CALL_EXPR_ARG (exp, 1);
5138 tree len = CALL_EXPR_ARG (exp, 2);
5139 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
5140 bool no_overflow = true;
5141
5142 /* Diagnose calls where the specified length exceeds the size of either
5143 object. */
5144 tree size = compute_objsize (arg1, 0);
5145 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
5146 len, /*maxread=*/NULL_TREE, size,
5147 /*objsize=*/NULL_TREE);
5148 if (no_overflow)
5149 {
5150 size = compute_objsize (arg2, 0);
5151 no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
5152 len, /*maxread=*/NULL_TREE, size,
5153 /*objsize=*/NULL_TREE);
5154 }
5155
5156 /* If the specified length exceeds the size of either object,
5157 call the function. */
5158 if (!no_overflow)
5159 return NULL_RTX;
5160
5161 /* Due to the performance benefit, always inline the calls first
5162 when result_eq is false. */
5163 rtx result = NULL_RTX;
5164
5165 if (!result_eq && fcode != BUILT_IN_BCMP)
5166 {
5167 result = inline_expand_builtin_bytecmp (exp, target);
5168 if (result)
5169 return result;
5170 }
5171
5172 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5173 location_t loc = EXPR_LOCATION (exp);
5174
5175 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5176 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5177
5178 /* If we don't have POINTER_TYPE, call the function. */
5179 if (arg1_align == 0 || arg2_align == 0)
5180 return NULL_RTX;
5181
5182 rtx arg1_rtx = get_memory_rtx (arg1, len);
5183 rtx arg2_rtx = get_memory_rtx (arg2, len);
5184 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
5185
5186 /* Set MEM_SIZE as appropriate. */
5187 if (CONST_INT_P (len_rtx))
5188 {
5189 set_mem_size (arg1_rtx, INTVAL (len_rtx));
5190 set_mem_size (arg2_rtx, INTVAL (len_rtx));
5191 }
5192
5193 by_pieces_constfn constfn = NULL;
5194
5195 /* Try to get the byte representation of the constant ARG2 (or, only
5196 when the function's result is used for equality to zero, ARG1)
5197 points to, with its byte size in NBYTES. */
5198 unsigned HOST_WIDE_INT nbytes;
5199 const char *rep = c_getstr (arg2, &nbytes);
5200 if (result_eq && rep == NULL)
5201 {
5202 /* For equality to zero the arguments are interchangeable. */
5203 rep = c_getstr (arg1, &nbytes);
5204 if (rep != NULL)
5205 std::swap (arg1_rtx, arg2_rtx);
5206 }
5207
5208 /* If the function's constant bound LEN_RTX is less than or equal
5209 to the byte size of the representation of the constant argument,
5210 and if block move would be done by pieces, we can avoid loading
5211 the bytes from memory and only store the computed constant result. */
5212 if (rep
5213 && CONST_INT_P (len_rtx)
5214 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
5215 constfn = builtin_memcpy_read_str;
5216
5217 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
5218 TREE_TYPE (len), target,
5219 result_eq, constfn,
5220 CONST_CAST (char *, rep));
5221
5222 if (result)
5223 {
5224 /* Return the value in the proper mode for this function. */
5225 if (GET_MODE (result) == mode)
5226 return result;
5227
5228 if (target != 0)
5229 {
5230 convert_move (target, result, 0);
5231 return target;
5232 }
5233
5234 return convert_to_mode (mode, result, 0);
5235 }
5236
5237 return NULL_RTX;
5238 }
5239
5240 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
5241 if we failed the caller should emit a normal call, otherwise try to get
5242 the result in TARGET, if convenient. */
5243
5244 static rtx
expand_builtin_strcmp(tree exp,ATTRIBUTE_UNUSED rtx target)5245 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
5246 {
5247 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5248 return NULL_RTX;
5249
5250 tree arg1 = CALL_EXPR_ARG (exp, 0);
5251 tree arg2 = CALL_EXPR_ARG (exp, 1);
5252
5253 if (!check_nul_terminated_array (exp, arg1)
5254 || !check_nul_terminated_array (exp, arg2))
5255 return NULL_RTX;
5256
5257 /* Due to the performance benefit, always inline the calls first. */
5258 rtx result = NULL_RTX;
5259 result = inline_expand_builtin_bytecmp (exp, target);
5260 if (result)
5261 return result;
5262
5263 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
5264 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5265 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
5266 return NULL_RTX;
5267
5268 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5269 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5270
5271 /* If we don't have POINTER_TYPE, call the function. */
5272 if (arg1_align == 0 || arg2_align == 0)
5273 return NULL_RTX;
5274
5275 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
5276 arg1 = builtin_save_expr (arg1);
5277 arg2 = builtin_save_expr (arg2);
5278
5279 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
5280 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
5281
5282 /* Try to call cmpstrsi. */
5283 if (cmpstr_icode != CODE_FOR_nothing)
5284 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
5285 MIN (arg1_align, arg2_align));
5286
5287 /* Try to determine at least one length and call cmpstrnsi. */
5288 if (!result && cmpstrn_icode != CODE_FOR_nothing)
5289 {
5290 tree len;
5291 rtx arg3_rtx;
5292
5293 tree len1 = c_strlen (arg1, 1);
5294 tree len2 = c_strlen (arg2, 1);
5295
5296 if (len1)
5297 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
5298 if (len2)
5299 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
5300
5301 /* If we don't have a constant length for the first, use the length
5302 of the second, if we know it. We don't require a constant for
5303 this case; some cost analysis could be done if both are available
5304 but neither is constant. For now, assume they're equally cheap,
5305 unless one has side effects. If both strings have constant lengths,
5306 use the smaller. */
5307
5308 if (!len1)
5309 len = len2;
5310 else if (!len2)
5311 len = len1;
5312 else if (TREE_SIDE_EFFECTS (len1))
5313 len = len2;
5314 else if (TREE_SIDE_EFFECTS (len2))
5315 len = len1;
5316 else if (TREE_CODE (len1) != INTEGER_CST)
5317 len = len2;
5318 else if (TREE_CODE (len2) != INTEGER_CST)
5319 len = len1;
5320 else if (tree_int_cst_lt (len1, len2))
5321 len = len1;
5322 else
5323 len = len2;
5324
5325 /* If both arguments have side effects, we cannot optimize. */
5326 if (len && !TREE_SIDE_EFFECTS (len))
5327 {
5328 arg3_rtx = expand_normal (len);
5329 result = expand_cmpstrn_or_cmpmem
5330 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
5331 arg3_rtx, MIN (arg1_align, arg2_align));
5332 }
5333 }
5334
5335 tree fndecl = get_callee_fndecl (exp);
5336 if (result)
5337 {
5338 /* Check to see if the argument was declared attribute nonstring
5339 and if so, issue a warning since at this point it's not known
5340 to be nul-terminated. */
5341 maybe_warn_nonstring_arg (fndecl, exp);
5342
5343 /* Return the value in the proper mode for this function. */
5344 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
5345 if (GET_MODE (result) == mode)
5346 return result;
5347 if (target == 0)
5348 return convert_to_mode (mode, result, 0);
5349 convert_move (target, result, 0);
5350 return target;
5351 }
5352
5353 /* Expand the library call ourselves using a stabilized argument
5354 list to avoid re-evaluating the function's arguments twice. */
5355 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
5356 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5357 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5358 return expand_call (fn, target, target == const0_rtx);
5359 }
5360
5361 /* Expand expression EXP, which is a call to the strncmp builtin. Return
5362 NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
5363 the result in TARGET, if convenient. */
5364
5365 static rtx
expand_builtin_strncmp(tree exp,ATTRIBUTE_UNUSED rtx target,ATTRIBUTE_UNUSED machine_mode mode)5366 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
5367 ATTRIBUTE_UNUSED machine_mode mode)
5368 {
5369 if (!validate_arglist (exp,
5370 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5371 return NULL_RTX;
5372
5373 tree arg1 = CALL_EXPR_ARG (exp, 0);
5374 tree arg2 = CALL_EXPR_ARG (exp, 1);
5375 tree arg3 = CALL_EXPR_ARG (exp, 2);
5376
5377 if (!check_nul_terminated_array (exp, arg1, arg3)
5378 || !check_nul_terminated_array (exp, arg2, arg3))
5379 return NULL_RTX;
5380
5381 /* Due to the performance benefit, always inline the calls first. */
5382 rtx result = NULL_RTX;
5383 result = inline_expand_builtin_bytecmp (exp, target);
5384 if (result)
5385 return result;
5386
5387 /* If c_strlen can determine an expression for one of the string
5388 lengths, and it doesn't have side effects, then emit cmpstrnsi
5389 using length MIN(strlen(string)+1, arg3). */
5390 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
5391 if (cmpstrn_icode == CODE_FOR_nothing)
5392 return NULL_RTX;
5393
5394 tree len;
5395
5396 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
5397 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
5398
5399 tree len1 = c_strlen (arg1, 1);
5400 tree len2 = c_strlen (arg2, 1);
5401
5402 location_t loc = EXPR_LOCATION (exp);
5403
5404 if (len1)
5405 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
5406 if (len2)
5407 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
5408
5409 tree len3 = fold_convert_loc (loc, sizetype, arg3);
5410
5411 /* If we don't have a constant length for the first, use the length
5412 of the second, if we know it. If neither string is constant length,
5413 use the given length argument. We don't require a constant for
5414 this case; some cost analysis could be done if both are available
5415 but neither is constant. For now, assume they're equally cheap,
5416 unless one has side effects. If both strings have constant lengths,
5417 use the smaller. */
5418
5419 if (!len1 && !len2)
5420 len = len3;
5421 else if (!len1)
5422 len = len2;
5423 else if (!len2)
5424 len = len1;
5425 else if (TREE_SIDE_EFFECTS (len1))
5426 len = len2;
5427 else if (TREE_SIDE_EFFECTS (len2))
5428 len = len1;
5429 else if (TREE_CODE (len1) != INTEGER_CST)
5430 len = len2;
5431 else if (TREE_CODE (len2) != INTEGER_CST)
5432 len = len1;
5433 else if (tree_int_cst_lt (len1, len2))
5434 len = len1;
5435 else
5436 len = len2;
5437
5438 /* If we are not using the given length, we must incorporate it here.
5439 The actual new length parameter will be MIN(len,arg3) in this case. */
5440 if (len != len3)
5441 {
5442 len = fold_convert_loc (loc, sizetype, len);
5443 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
5444 }
5445 rtx arg1_rtx = get_memory_rtx (arg1, len);
5446 rtx arg2_rtx = get_memory_rtx (arg2, len);
5447 rtx arg3_rtx = expand_normal (len);
5448 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
5449 arg2_rtx, TREE_TYPE (len), arg3_rtx,
5450 MIN (arg1_align, arg2_align));
5451
5452 tree fndecl = get_callee_fndecl (exp);
5453 if (result)
5454 {
5455 /* Check to see if the argument was declared attribute nonstring
5456 and if so, issue a warning since at this point it's not known
5457 to be nul-terminated. */
5458 maybe_warn_nonstring_arg (fndecl, exp);
5459
5460 /* Return the value in the proper mode for this function. */
5461 mode = TYPE_MODE (TREE_TYPE (exp));
5462 if (GET_MODE (result) == mode)
5463 return result;
5464 if (target == 0)
5465 return convert_to_mode (mode, result, 0);
5466 convert_move (target, result, 0);
5467 return target;
5468 }
5469
5470 /* Expand the library call ourselves using a stabilized argument
5471 list to avoid re-evaluating the function's arguments twice. */
5472 tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
5473 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
5474 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
5475 return expand_call (fn, target, target == const0_rtx);
5476 }
5477
5478 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
5479 if that's convenient. */
5480
5481 rtx
expand_builtin_saveregs(void)5482 expand_builtin_saveregs (void)
5483 {
5484 rtx val;
5485 rtx_insn *seq;
5486
5487 /* Don't do __builtin_saveregs more than once in a function.
5488 Save the result of the first call and reuse it. */
5489 if (saveregs_value != 0)
5490 return saveregs_value;
5491
5492 /* When this function is called, it means that registers must be
5493 saved on entry to this function. So we migrate the call to the
5494 first insn of this function. */
5495
5496 start_sequence ();
5497
5498 /* Do whatever the machine needs done in this case. */
5499 val = targetm.calls.expand_builtin_saveregs ();
5500
5501 seq = get_insns ();
5502 end_sequence ();
5503
5504 saveregs_value = val;
5505
5506 /* Put the insns after the NOTE that starts the function. If this
5507 is inside a start_sequence, make the outer-level insn chain current, so
5508 the code is placed at the start of the function. */
5509 push_topmost_sequence ();
5510 emit_insn_after (seq, entry_of_function ());
5511 pop_topmost_sequence ();
5512
5513 return val;
5514 }
5515
5516 /* Expand a call to __builtin_next_arg. */
5517
5518 static rtx
expand_builtin_next_arg(void)5519 expand_builtin_next_arg (void)
5520 {
5521 /* Checking arguments is already done in fold_builtin_next_arg
5522 that must be called before this function. */
5523 return expand_binop (ptr_mode, add_optab,
5524 crtl->args.internal_arg_pointer,
5525 crtl->args.arg_offset_rtx,
5526 NULL_RTX, 0, OPTAB_LIB_WIDEN);
5527 }
5528
5529 /* Make it easier for the backends by protecting the valist argument
5530 from multiple evaluations. */
5531
5532 static tree
stabilize_va_list_loc(location_t loc,tree valist,int needs_lvalue)5533 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5534 {
5535 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5536
5537 /* The current way of determining the type of valist is completely
5538 bogus. We should have the information on the va builtin instead. */
5539 if (!vatype)
5540 vatype = targetm.fn_abi_va_list (cfun->decl);
5541
5542 if (TREE_CODE (vatype) == ARRAY_TYPE)
5543 {
5544 if (TREE_SIDE_EFFECTS (valist))
5545 valist = save_expr (valist);
5546
5547 /* For this case, the backends will be expecting a pointer to
5548 vatype, but it's possible we've actually been given an array
5549 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5550 So fix it. */
5551 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5552 {
5553 tree p1 = build_pointer_type (TREE_TYPE (vatype));
5554 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5555 }
5556 }
5557 else
5558 {
5559 tree pt = build_pointer_type (vatype);
5560
5561 if (! needs_lvalue)
5562 {
5563 if (! TREE_SIDE_EFFECTS (valist))
5564 return valist;
5565
5566 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5567 TREE_SIDE_EFFECTS (valist) = 1;
5568 }
5569
5570 if (TREE_SIDE_EFFECTS (valist))
5571 valist = save_expr (valist);
5572 valist = fold_build2_loc (loc, MEM_REF,
5573 vatype, valist, build_int_cst (pt, 0));
5574 }
5575
5576 return valist;
5577 }
5578
5579 /* The "standard" definition of va_list is void*. */
5580
5581 tree
std_build_builtin_va_list(void)5582 std_build_builtin_va_list (void)
5583 {
5584 return ptr_type_node;
5585 }
5586
5587 /* The "standard" abi va_list is va_list_type_node. */
5588
5589 tree
std_fn_abi_va_list(tree fndecl ATTRIBUTE_UNUSED)5590 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5591 {
5592 return va_list_type_node;
5593 }
5594
5595 /* The "standard" type of va_list is va_list_type_node. */
5596
5597 tree
std_canonical_va_list_type(tree type)5598 std_canonical_va_list_type (tree type)
5599 {
5600 tree wtype, htype;
5601
5602 wtype = va_list_type_node;
5603 htype = type;
5604
5605 if (TREE_CODE (wtype) == ARRAY_TYPE)
5606 {
5607 /* If va_list is an array type, the argument may have decayed
5608 to a pointer type, e.g. by being passed to another function.
5609 In that case, unwrap both types so that we can compare the
5610 underlying records. */
5611 if (TREE_CODE (htype) == ARRAY_TYPE
5612 || POINTER_TYPE_P (htype))
5613 {
5614 wtype = TREE_TYPE (wtype);
5615 htype = TREE_TYPE (htype);
5616 }
5617 }
5618 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5619 return va_list_type_node;
5620
5621 return NULL_TREE;
5622 }
5623
5624 /* The "standard" implementation of va_start: just assign `nextarg' to
5625 the variable. */
5626
5627 void
std_expand_builtin_va_start(tree valist,rtx nextarg)5628 std_expand_builtin_va_start (tree valist, rtx nextarg)
5629 {
5630 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5631 convert_move (va_r, nextarg, 0);
5632 }
5633
5634 /* Expand EXP, a call to __builtin_va_start. */
5635
5636 static rtx
expand_builtin_va_start(tree exp)5637 expand_builtin_va_start (tree exp)
5638 {
5639 rtx nextarg;
5640 tree valist;
5641 location_t loc = EXPR_LOCATION (exp);
5642
5643 if (call_expr_nargs (exp) < 2)
5644 {
5645 error_at (loc, "too few arguments to function %<va_start%>");
5646 return const0_rtx;
5647 }
5648
5649 if (fold_builtin_next_arg (exp, true))
5650 return const0_rtx;
5651
5652 nextarg = expand_builtin_next_arg ();
5653 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5654
5655 if (targetm.expand_builtin_va_start)
5656 targetm.expand_builtin_va_start (valist, nextarg);
5657 else
5658 std_expand_builtin_va_start (valist, nextarg);
5659
5660 return const0_rtx;
5661 }
5662
5663 /* Expand EXP, a call to __builtin_va_end. */
5664
5665 static rtx
expand_builtin_va_end(tree exp)5666 expand_builtin_va_end (tree exp)
5667 {
5668 tree valist = CALL_EXPR_ARG (exp, 0);
5669
5670 /* Evaluate for side effects, if needed. I hate macros that don't
5671 do that. */
5672 if (TREE_SIDE_EFFECTS (valist))
5673 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5674
5675 return const0_rtx;
5676 }
5677
5678 /* Expand EXP, a call to __builtin_va_copy. We do this as a
5679 builtin rather than just as an assignment in stdarg.h because of the
5680 nastiness of array-type va_list types. */
5681
5682 static rtx
expand_builtin_va_copy(tree exp)5683 expand_builtin_va_copy (tree exp)
5684 {
5685 tree dst, src, t;
5686 location_t loc = EXPR_LOCATION (exp);
5687
5688 dst = CALL_EXPR_ARG (exp, 0);
5689 src = CALL_EXPR_ARG (exp, 1);
5690
5691 dst = stabilize_va_list_loc (loc, dst, 1);
5692 src = stabilize_va_list_loc (loc, src, 0);
5693
5694 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5695
5696 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5697 {
5698 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5699 TREE_SIDE_EFFECTS (t) = 1;
5700 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5701 }
5702 else
5703 {
5704 rtx dstb, srcb, size;
5705
5706 /* Evaluate to pointers. */
5707 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5708 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5709 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5710 NULL_RTX, VOIDmode, EXPAND_NORMAL);
5711
5712 dstb = convert_memory_address (Pmode, dstb);
5713 srcb = convert_memory_address (Pmode, srcb);
5714
5715 /* "Dereference" to BLKmode memories. */
5716 dstb = gen_rtx_MEM (BLKmode, dstb);
5717 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5718 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5719 srcb = gen_rtx_MEM (BLKmode, srcb);
5720 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5721 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5722
5723 /* Copy. */
5724 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5725 }
5726
5727 return const0_rtx;
5728 }
5729
5730 /* Expand a call to one of the builtin functions __builtin_frame_address or
5731 __builtin_return_address. */
5732
5733 static rtx
expand_builtin_frame_address(tree fndecl,tree exp)5734 expand_builtin_frame_address (tree fndecl, tree exp)
5735 {
5736 /* The argument must be a nonnegative integer constant.
5737 It counts the number of frames to scan up the stack.
5738 The value is either the frame pointer value or the return
5739 address saved in that frame. */
5740 if (call_expr_nargs (exp) == 0)
5741 /* Warning about missing arg was already issued. */
5742 return const0_rtx;
5743 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5744 {
5745 error ("invalid argument to %qD", fndecl);
5746 return const0_rtx;
5747 }
5748 else
5749 {
5750 /* Number of frames to scan up the stack. */
5751 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5752
5753 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5754
5755 /* Some ports cannot access arbitrary stack frames. */
5756 if (tem == NULL)
5757 {
5758 warning (0, "unsupported argument to %qD", fndecl);
5759 return const0_rtx;
5760 }
5761
5762 if (count)
5763 {
5764 /* Warn since no effort is made to ensure that any frame
5765 beyond the current one exists or can be safely reached. */
5766 warning (OPT_Wframe_address, "calling %qD with "
5767 "a nonzero argument is unsafe", fndecl);
5768 }
5769
5770 /* For __builtin_frame_address, return what we've got. */
5771 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5772 return tem;
5773
5774 if (!REG_P (tem)
5775 && ! CONSTANT_P (tem))
5776 tem = copy_addr_to_reg (tem);
5777 return tem;
5778 }
5779 }
5780
5781 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
5782 failed and the caller should emit a normal call. */
5783
5784 static rtx
expand_builtin_alloca(tree exp)5785 expand_builtin_alloca (tree exp)
5786 {
5787 rtx op0;
5788 rtx result;
5789 unsigned int align;
5790 tree fndecl = get_callee_fndecl (exp);
5791 HOST_WIDE_INT max_size;
5792 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5793 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5794 bool valid_arglist
5795 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5796 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5797 VOID_TYPE)
5798 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5799 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5800 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5801
5802 if (!valid_arglist)
5803 return NULL_RTX;
5804
5805 if ((alloca_for_var
5806 && warn_vla_limit >= HOST_WIDE_INT_MAX
5807 && warn_alloc_size_limit < warn_vla_limit)
5808 || (!alloca_for_var
5809 && warn_alloca_limit >= HOST_WIDE_INT_MAX
5810 && warn_alloc_size_limit < warn_alloca_limit
5811 ))
5812 {
5813 /* -Walloca-larger-than and -Wvla-larger-than settings of
5814 less than HOST_WIDE_INT_MAX override the more general
5815 -Walloc-size-larger-than so unless either of the former
5816 options is smaller than the last one (wchich would imply
5817 that the call was already checked), check the alloca
5818 arguments for overflow. */
5819 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5820 int idx[] = { 0, -1 };
5821 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5822 }
5823
5824 /* Compute the argument. */
5825 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5826
5827 /* Compute the alignment. */
5828 align = (fcode == BUILT_IN_ALLOCA
5829 ? BIGGEST_ALIGNMENT
5830 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5831
5832 /* Compute the maximum size. */
5833 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5834 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5835 : -1);
5836
5837 /* Allocate the desired space. If the allocation stems from the declaration
5838 of a variable-sized object, it cannot accumulate. */
5839 result
5840 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5841 result = convert_memory_address (ptr_mode, result);
5842
5843 /* Dynamic allocations for variables are recorded during gimplification. */
5844 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
5845 record_dynamic_alloc (exp);
5846
5847 return result;
5848 }
5849
5850 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
5851 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5852 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
5853 handle_builtin_stack_restore function. */
5854
5855 static rtx
expand_asan_emit_allocas_unpoison(tree exp)5856 expand_asan_emit_allocas_unpoison (tree exp)
5857 {
5858 tree arg0 = CALL_EXPR_ARG (exp, 0);
5859 tree arg1 = CALL_EXPR_ARG (exp, 1);
5860 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5861 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5862 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5863 stack_pointer_rtx, NULL_RTX, 0,
5864 OPTAB_LIB_WIDEN);
5865 off = convert_modes (ptr_mode, Pmode, off, 0);
5866 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5867 OPTAB_LIB_WIDEN);
5868 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5869 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5870 top, ptr_mode, bot, ptr_mode);
5871 return ret;
5872 }
5873
5874 /* Expand a call to bswap builtin in EXP.
5875 Return NULL_RTX if a normal call should be emitted rather than expanding the
5876 function in-line. If convenient, the result should be placed in TARGET.
5877 SUBTARGET may be used as the target for computing one of EXP's operands. */
5878
5879 static rtx
expand_builtin_bswap(machine_mode target_mode,tree exp,rtx target,rtx subtarget)5880 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5881 rtx subtarget)
5882 {
5883 tree arg;
5884 rtx op0;
5885
5886 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5887 return NULL_RTX;
5888
5889 arg = CALL_EXPR_ARG (exp, 0);
5890 op0 = expand_expr (arg,
5891 subtarget && GET_MODE (subtarget) == target_mode
5892 ? subtarget : NULL_RTX,
5893 target_mode, EXPAND_NORMAL);
5894 if (GET_MODE (op0) != target_mode)
5895 op0 = convert_to_mode (target_mode, op0, 1);
5896
5897 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5898
5899 gcc_assert (target);
5900
5901 return convert_to_mode (target_mode, target, 1);
5902 }
5903
5904 /* Expand a call to a unary builtin in EXP.
5905 Return NULL_RTX if a normal call should be emitted rather than expanding the
5906 function in-line. If convenient, the result should be placed in TARGET.
5907 SUBTARGET may be used as the target for computing one of EXP's operands. */
5908
5909 static rtx
expand_builtin_unop(machine_mode target_mode,tree exp,rtx target,rtx subtarget,optab op_optab)5910 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5911 rtx subtarget, optab op_optab)
5912 {
5913 rtx op0;
5914
5915 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5916 return NULL_RTX;
5917
5918 /* Compute the argument. */
5919 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5920 (subtarget
5921 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5922 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5923 VOIDmode, EXPAND_NORMAL);
5924 /* Compute op, into TARGET if possible.
5925 Set TARGET to wherever the result comes back. */
5926 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5927 op_optab, op0, target, op_optab != clrsb_optab);
5928 gcc_assert (target);
5929
5930 return convert_to_mode (target_mode, target, 0);
5931 }
5932
5933 /* Expand a call to __builtin_expect. We just return our argument
5934 as the builtin_expect semantic should've been already executed by
5935 tree branch prediction pass. */
5936
5937 static rtx
expand_builtin_expect(tree exp,rtx target)5938 expand_builtin_expect (tree exp, rtx target)
5939 {
5940 tree arg;
5941
5942 if (call_expr_nargs (exp) < 2)
5943 return const0_rtx;
5944 arg = CALL_EXPR_ARG (exp, 0);
5945
5946 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5947 /* When guessing was done, the hints should be already stripped away. */
5948 gcc_assert (!flag_guess_branch_prob
5949 || optimize == 0 || seen_error ());
5950 return target;
5951 }
5952
5953 /* Expand a call to __builtin_expect_with_probability. We just return our
5954 argument as the builtin_expect semantic should've been already executed by
5955 tree branch prediction pass. */
5956
5957 static rtx
expand_builtin_expect_with_probability(tree exp,rtx target)5958 expand_builtin_expect_with_probability (tree exp, rtx target)
5959 {
5960 tree arg;
5961
5962 if (call_expr_nargs (exp) < 3)
5963 return const0_rtx;
5964 arg = CALL_EXPR_ARG (exp, 0);
5965
5966 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5967 /* When guessing was done, the hints should be already stripped away. */
5968 gcc_assert (!flag_guess_branch_prob
5969 || optimize == 0 || seen_error ());
5970 return target;
5971 }
5972
5973
5974 /* Expand a call to __builtin_assume_aligned. We just return our first
5975 argument as the builtin_assume_aligned semantic should've been already
5976 executed by CCP. */
5977
5978 static rtx
expand_builtin_assume_aligned(tree exp,rtx target)5979 expand_builtin_assume_aligned (tree exp, rtx target)
5980 {
5981 if (call_expr_nargs (exp) < 2)
5982 return const0_rtx;
5983 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5984 EXPAND_NORMAL);
5985 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5986 && (call_expr_nargs (exp) < 3
5987 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5988 return target;
5989 }
5990
5991 void
expand_builtin_trap(void)5992 expand_builtin_trap (void)
5993 {
5994 if (targetm.have_trap ())
5995 {
5996 rtx_insn *insn = emit_insn (targetm.gen_trap ());
5997 /* For trap insns when not accumulating outgoing args force
5998 REG_ARGS_SIZE note to prevent crossjumping of calls with
5999 different args sizes. */
6000 if (!ACCUMULATE_OUTGOING_ARGS)
6001 add_args_size_note (insn, stack_pointer_delta);
6002 }
6003 else
6004 {
6005 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
6006 tree call_expr = build_call_expr (fn, 0);
6007 expand_call (call_expr, NULL_RTX, false);
6008 }
6009
6010 emit_barrier ();
6011 }
6012
6013 /* Expand a call to __builtin_unreachable. We do nothing except emit
6014 a barrier saying that control flow will not pass here.
6015
6016 It is the responsibility of the program being compiled to ensure
6017 that control flow does never reach __builtin_unreachable. */
6018 static void
expand_builtin_unreachable(void)6019 expand_builtin_unreachable (void)
6020 {
6021 emit_barrier ();
6022 }
6023
6024 /* Expand EXP, a call to fabs, fabsf or fabsl.
6025 Return NULL_RTX if a normal call should be emitted rather than expanding
6026 the function inline. If convenient, the result should be placed
6027 in TARGET. SUBTARGET may be used as the target for computing
6028 the operand. */
6029
6030 static rtx
expand_builtin_fabs(tree exp,rtx target,rtx subtarget)6031 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
6032 {
6033 machine_mode mode;
6034 tree arg;
6035 rtx op0;
6036
6037 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6038 return NULL_RTX;
6039
6040 arg = CALL_EXPR_ARG (exp, 0);
6041 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
6042 mode = TYPE_MODE (TREE_TYPE (arg));
6043 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6044 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
6045 }
6046
6047 /* Expand EXP, a call to copysign, copysignf, or copysignl.
6048 Return NULL is a normal call should be emitted rather than expanding the
6049 function inline. If convenient, the result should be placed in TARGET.
6050 SUBTARGET may be used as the target for computing the operand. */
6051
6052 static rtx
expand_builtin_copysign(tree exp,rtx target,rtx subtarget)6053 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
6054 {
6055 rtx op0, op1;
6056 tree arg;
6057
6058 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
6059 return NULL_RTX;
6060
6061 arg = CALL_EXPR_ARG (exp, 0);
6062 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
6063
6064 arg = CALL_EXPR_ARG (exp, 1);
6065 op1 = expand_normal (arg);
6066
6067 return expand_copysign (op0, op1, target);
6068 }
6069
6070 /* Expand a call to __builtin___clear_cache. */
6071
6072 static rtx
expand_builtin___clear_cache(tree exp)6073 expand_builtin___clear_cache (tree exp)
6074 {
6075 if (!targetm.code_for_clear_cache)
6076 {
6077 #ifdef CLEAR_INSN_CACHE
6078 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6079 does something. Just do the default expansion to a call to
6080 __clear_cache(). */
6081 return NULL_RTX;
6082 #else
6083 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
6084 does nothing. There is no need to call it. Do nothing. */
6085 return const0_rtx;
6086 #endif /* CLEAR_INSN_CACHE */
6087 }
6088
6089 /* We have a "clear_cache" insn, and it will handle everything. */
6090 tree begin, end;
6091 rtx begin_rtx, end_rtx;
6092
6093 /* We must not expand to a library call. If we did, any
6094 fallback library function in libgcc that might contain a call to
6095 __builtin___clear_cache() would recurse infinitely. */
6096 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6097 {
6098 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
6099 return const0_rtx;
6100 }
6101
6102 if (targetm.have_clear_cache ())
6103 {
6104 class expand_operand ops[2];
6105
6106 begin = CALL_EXPR_ARG (exp, 0);
6107 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
6108
6109 end = CALL_EXPR_ARG (exp, 1);
6110 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
6111
6112 create_address_operand (&ops[0], begin_rtx);
6113 create_address_operand (&ops[1], end_rtx);
6114 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
6115 return const0_rtx;
6116 }
6117 return const0_rtx;
6118 }
6119
6120 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
6121
6122 static rtx
round_trampoline_addr(rtx tramp)6123 round_trampoline_addr (rtx tramp)
6124 {
6125 rtx temp, addend, mask;
6126
6127 /* If we don't need too much alignment, we'll have been guaranteed
6128 proper alignment by get_trampoline_type. */
6129 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
6130 return tramp;
6131
6132 /* Round address up to desired boundary. */
6133 temp = gen_reg_rtx (Pmode);
6134 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
6135 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
6136
6137 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
6138 temp, 0, OPTAB_LIB_WIDEN);
6139 tramp = expand_simple_binop (Pmode, AND, temp, mask,
6140 temp, 0, OPTAB_LIB_WIDEN);
6141
6142 return tramp;
6143 }
6144
6145 static rtx
expand_builtin_init_trampoline(tree exp,bool onstack)6146 expand_builtin_init_trampoline (tree exp, bool onstack)
6147 {
6148 tree t_tramp, t_func, t_chain;
6149 rtx m_tramp, r_tramp, r_chain, tmp;
6150
6151 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
6152 POINTER_TYPE, VOID_TYPE))
6153 return NULL_RTX;
6154
6155 t_tramp = CALL_EXPR_ARG (exp, 0);
6156 t_func = CALL_EXPR_ARG (exp, 1);
6157 t_chain = CALL_EXPR_ARG (exp, 2);
6158
6159 r_tramp = expand_normal (t_tramp);
6160 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
6161 MEM_NOTRAP_P (m_tramp) = 1;
6162
6163 /* If ONSTACK, the TRAMP argument should be the address of a field
6164 within the local function's FRAME decl. Either way, let's see if
6165 we can fill in the MEM_ATTRs for this memory. */
6166 if (TREE_CODE (t_tramp) == ADDR_EXPR)
6167 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
6168
6169 /* Creator of a heap trampoline is responsible for making sure the
6170 address is aligned to at least STACK_BOUNDARY. Normally malloc
6171 will ensure this anyhow. */
6172 tmp = round_trampoline_addr (r_tramp);
6173 if (tmp != r_tramp)
6174 {
6175 m_tramp = change_address (m_tramp, BLKmode, tmp);
6176 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
6177 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
6178 }
6179
6180 /* The FUNC argument should be the address of the nested function.
6181 Extract the actual function decl to pass to the hook. */
6182 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
6183 t_func = TREE_OPERAND (t_func, 0);
6184 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
6185
6186 r_chain = expand_normal (t_chain);
6187
6188 /* Generate insns to initialize the trampoline. */
6189 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
6190
6191 if (onstack)
6192 {
6193 trampolines_created = 1;
6194
6195 if (targetm.calls.custom_function_descriptors != 0)
6196 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
6197 "trampoline generated for nested function %qD", t_func);
6198 }
6199
6200 return const0_rtx;
6201 }
6202
6203 static rtx
expand_builtin_adjust_trampoline(tree exp)6204 expand_builtin_adjust_trampoline (tree exp)
6205 {
6206 rtx tramp;
6207
6208 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6209 return NULL_RTX;
6210
6211 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6212 tramp = round_trampoline_addr (tramp);
6213 if (targetm.calls.trampoline_adjust_address)
6214 tramp = targetm.calls.trampoline_adjust_address (tramp);
6215
6216 return tramp;
6217 }
6218
6219 /* Expand a call to the builtin descriptor initialization routine.
6220 A descriptor is made up of a couple of pointers to the static
6221 chain and the code entry in this order. */
6222
6223 static rtx
expand_builtin_init_descriptor(tree exp)6224 expand_builtin_init_descriptor (tree exp)
6225 {
6226 tree t_descr, t_func, t_chain;
6227 rtx m_descr, r_descr, r_func, r_chain;
6228
6229 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
6230 VOID_TYPE))
6231 return NULL_RTX;
6232
6233 t_descr = CALL_EXPR_ARG (exp, 0);
6234 t_func = CALL_EXPR_ARG (exp, 1);
6235 t_chain = CALL_EXPR_ARG (exp, 2);
6236
6237 r_descr = expand_normal (t_descr);
6238 m_descr = gen_rtx_MEM (BLKmode, r_descr);
6239 MEM_NOTRAP_P (m_descr) = 1;
6240 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
6241
6242 r_func = expand_normal (t_func);
6243 r_chain = expand_normal (t_chain);
6244
6245 /* Generate insns to initialize the descriptor. */
6246 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
6247 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
6248 POINTER_SIZE / BITS_PER_UNIT), r_func);
6249
6250 return const0_rtx;
6251 }
6252
6253 /* Expand a call to the builtin descriptor adjustment routine. */
6254
6255 static rtx
expand_builtin_adjust_descriptor(tree exp)6256 expand_builtin_adjust_descriptor (tree exp)
6257 {
6258 rtx tramp;
6259
6260 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6261 return NULL_RTX;
6262
6263 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
6264
6265 /* Unalign the descriptor to allow runtime identification. */
6266 tramp = plus_constant (ptr_mode, tramp,
6267 targetm.calls.custom_function_descriptors);
6268
6269 return force_operand (tramp, NULL_RTX);
6270 }
6271
6272 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
6273 function. The function first checks whether the back end provides
6274 an insn to implement signbit for the respective mode. If not, it
6275 checks whether the floating point format of the value is such that
6276 the sign bit can be extracted. If that is not the case, error out.
6277 EXP is the expression that is a call to the builtin function; if
6278 convenient, the result should be placed in TARGET. */
6279 static rtx
expand_builtin_signbit(tree exp,rtx target)6280 expand_builtin_signbit (tree exp, rtx target)
6281 {
6282 const struct real_format *fmt;
6283 scalar_float_mode fmode;
6284 scalar_int_mode rmode, imode;
6285 tree arg;
6286 int word, bitpos;
6287 enum insn_code icode;
6288 rtx temp;
6289 location_t loc = EXPR_LOCATION (exp);
6290
6291 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
6292 return NULL_RTX;
6293
6294 arg = CALL_EXPR_ARG (exp, 0);
6295 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
6296 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
6297 fmt = REAL_MODE_FORMAT (fmode);
6298
6299 arg = builtin_save_expr (arg);
6300
6301 /* Expand the argument yielding a RTX expression. */
6302 temp = expand_normal (arg);
6303
6304 /* Check if the back end provides an insn that handles signbit for the
6305 argument's mode. */
6306 icode = optab_handler (signbit_optab, fmode);
6307 if (icode != CODE_FOR_nothing)
6308 {
6309 rtx_insn *last = get_last_insn ();
6310 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6311 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
6312 return target;
6313 delete_insns_since (last);
6314 }
6315
6316 /* For floating point formats without a sign bit, implement signbit
6317 as "ARG < 0.0". */
6318 bitpos = fmt->signbit_ro;
6319 if (bitpos < 0)
6320 {
6321 /* But we can't do this if the format supports signed zero. */
6322 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
6323
6324 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
6325 build_real (TREE_TYPE (arg), dconst0));
6326 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
6327 }
6328
6329 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
6330 {
6331 imode = int_mode_for_mode (fmode).require ();
6332 temp = gen_lowpart (imode, temp);
6333 }
6334 else
6335 {
6336 imode = word_mode;
6337 /* Handle targets with different FP word orders. */
6338 if (FLOAT_WORDS_BIG_ENDIAN)
6339 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
6340 else
6341 word = bitpos / BITS_PER_WORD;
6342 temp = operand_subword_force (temp, word, fmode);
6343 bitpos = bitpos % BITS_PER_WORD;
6344 }
6345
6346 /* Force the intermediate word_mode (or narrower) result into a
6347 register. This avoids attempting to create paradoxical SUBREGs
6348 of floating point modes below. */
6349 temp = force_reg (imode, temp);
6350
6351 /* If the bitpos is within the "result mode" lowpart, the operation
6352 can be implement with a single bitwise AND. Otherwise, we need
6353 a right shift and an AND. */
6354
6355 if (bitpos < GET_MODE_BITSIZE (rmode))
6356 {
6357 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
6358
6359 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
6360 temp = gen_lowpart (rmode, temp);
6361 temp = expand_binop (rmode, and_optab, temp,
6362 immed_wide_int_const (mask, rmode),
6363 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6364 }
6365 else
6366 {
6367 /* Perform a logical right shift to place the signbit in the least
6368 significant bit, then truncate the result to the desired mode
6369 and mask just this bit. */
6370 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
6371 temp = gen_lowpart (rmode, temp);
6372 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
6373 NULL_RTX, 1, OPTAB_LIB_WIDEN);
6374 }
6375
6376 return temp;
6377 }
6378
6379 /* Expand fork or exec calls. TARGET is the desired target of the
6380 call. EXP is the call. FN is the
6381 identificator of the actual function. IGNORE is nonzero if the
6382 value is to be ignored. */
6383
6384 static rtx
expand_builtin_fork_or_exec(tree fn,tree exp,rtx target,int ignore)6385 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
6386 {
6387 tree id, decl;
6388 tree call;
6389
6390 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
6391 {
6392 /* Detect unterminated path. */
6393 if (!check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0)))
6394 return NULL_RTX;
6395
6396 /* Also detect unterminated first argument. */
6397 switch (DECL_FUNCTION_CODE (fn))
6398 {
6399 case BUILT_IN_EXECL:
6400 case BUILT_IN_EXECLE:
6401 case BUILT_IN_EXECLP:
6402 if (!check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0)))
6403 return NULL_RTX;
6404 default:
6405 break;
6406 }
6407 }
6408
6409
6410 /* If we are not profiling, just call the function. */
6411 if (!profile_arc_flag)
6412 return NULL_RTX;
6413
6414 /* Otherwise call the wrapper. This should be equivalent for the rest of
6415 compiler, so the code does not diverge, and the wrapper may run the
6416 code necessary for keeping the profiling sane. */
6417
6418 switch (DECL_FUNCTION_CODE (fn))
6419 {
6420 case BUILT_IN_FORK:
6421 id = get_identifier ("__gcov_fork");
6422 break;
6423
6424 case BUILT_IN_EXECL:
6425 id = get_identifier ("__gcov_execl");
6426 break;
6427
6428 case BUILT_IN_EXECV:
6429 id = get_identifier ("__gcov_execv");
6430 break;
6431
6432 case BUILT_IN_EXECLP:
6433 id = get_identifier ("__gcov_execlp");
6434 break;
6435
6436 case BUILT_IN_EXECLE:
6437 id = get_identifier ("__gcov_execle");
6438 break;
6439
6440 case BUILT_IN_EXECVP:
6441 id = get_identifier ("__gcov_execvp");
6442 break;
6443
6444 case BUILT_IN_EXECVE:
6445 id = get_identifier ("__gcov_execve");
6446 break;
6447
6448 default:
6449 gcc_unreachable ();
6450 }
6451
6452 decl = build_decl (DECL_SOURCE_LOCATION (fn),
6453 FUNCTION_DECL, id, TREE_TYPE (fn));
6454 DECL_EXTERNAL (decl) = 1;
6455 TREE_PUBLIC (decl) = 1;
6456 DECL_ARTIFICIAL (decl) = 1;
6457 TREE_NOTHROW (decl) = 1;
6458 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
6459 DECL_VISIBILITY_SPECIFIED (decl) = 1;
6460 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
6461 return expand_call (call, target, ignore);
6462 }
6463
6464
6465
6466 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
6467 the pointer in these functions is void*, the tree optimizers may remove
6468 casts. The mode computed in expand_builtin isn't reliable either, due
6469 to __sync_bool_compare_and_swap.
6470
6471 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
6472 group of builtins. This gives us log2 of the mode size. */
6473
6474 static inline machine_mode
get_builtin_sync_mode(int fcode_diff)6475 get_builtin_sync_mode (int fcode_diff)
6476 {
6477 /* The size is not negotiable, so ask not to get BLKmode in return
6478 if the target indicates that a smaller size would be better. */
6479 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
6480 }
6481
6482 /* Expand the memory expression LOC and return the appropriate memory operand
6483 for the builtin_sync operations. */
6484
6485 static rtx
get_builtin_sync_mem(tree loc,machine_mode mode)6486 get_builtin_sync_mem (tree loc, machine_mode mode)
6487 {
6488 rtx addr, mem;
6489 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
6490 ? TREE_TYPE (TREE_TYPE (loc))
6491 : TREE_TYPE (loc));
6492 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
6493
6494 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
6495 addr = convert_memory_address (addr_mode, addr);
6496
6497 /* Note that we explicitly do not want any alias information for this
6498 memory, so that we kill all other live memories. Otherwise we don't
6499 satisfy the full barrier semantics of the intrinsic. */
6500 mem = gen_rtx_MEM (mode, addr);
6501
6502 set_mem_addr_space (mem, addr_space);
6503
6504 mem = validize_mem (mem);
6505
6506 /* The alignment needs to be at least according to that of the mode. */
6507 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
6508 get_pointer_alignment (loc)));
6509 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
6510 MEM_VOLATILE_P (mem) = 1;
6511
6512 return mem;
6513 }
6514
6515 /* Make sure an argument is in the right mode.
6516 EXP is the tree argument.
6517 MODE is the mode it should be in. */
6518
6519 static rtx
expand_expr_force_mode(tree exp,machine_mode mode)6520 expand_expr_force_mode (tree exp, machine_mode mode)
6521 {
6522 rtx val;
6523 machine_mode old_mode;
6524
6525 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6526 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
6527 of CONST_INTs, where we know the old_mode only from the call argument. */
6528
6529 old_mode = GET_MODE (val);
6530 if (old_mode == VOIDmode)
6531 old_mode = TYPE_MODE (TREE_TYPE (exp));
6532 val = convert_modes (mode, old_mode, val, 1);
6533 return val;
6534 }
6535
6536
6537 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6538 EXP is the CALL_EXPR. CODE is the rtx code
6539 that corresponds to the arithmetic or logical operation from the name;
6540 an exception here is that NOT actually means NAND. TARGET is an optional
6541 place for us to store the results; AFTER is true if this is the
6542 fetch_and_xxx form. */
6543
6544 static rtx
expand_builtin_sync_operation(machine_mode mode,tree exp,enum rtx_code code,bool after,rtx target)6545 expand_builtin_sync_operation (machine_mode mode, tree exp,
6546 enum rtx_code code, bool after,
6547 rtx target)
6548 {
6549 rtx val, mem;
6550 location_t loc = EXPR_LOCATION (exp);
6551
6552 if (code == NOT && warn_sync_nand)
6553 {
6554 tree fndecl = get_callee_fndecl (exp);
6555 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6556
6557 static bool warned_f_a_n, warned_n_a_f;
6558
6559 switch (fcode)
6560 {
6561 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6562 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6563 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6564 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6565 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6566 if (warned_f_a_n)
6567 break;
6568
6569 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6570 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6571 warned_f_a_n = true;
6572 break;
6573
6574 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6575 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6576 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6577 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6578 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6579 if (warned_n_a_f)
6580 break;
6581
6582 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6583 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6584 warned_n_a_f = true;
6585 break;
6586
6587 default:
6588 gcc_unreachable ();
6589 }
6590 }
6591
6592 /* Expand the operands. */
6593 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6594 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6595
6596 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6597 after);
6598 }
6599
6600 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6601 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
6602 true if this is the boolean form. TARGET is a place for us to store the
6603 results; this is NOT optional if IS_BOOL is true. */
6604
6605 static rtx
expand_builtin_compare_and_swap(machine_mode mode,tree exp,bool is_bool,rtx target)6606 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6607 bool is_bool, rtx target)
6608 {
6609 rtx old_val, new_val, mem;
6610 rtx *pbool, *poval;
6611
6612 /* Expand the operands. */
6613 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6614 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6615 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6616
6617 pbool = poval = NULL;
6618 if (target != const0_rtx)
6619 {
6620 if (is_bool)
6621 pbool = ⌖
6622 else
6623 poval = ⌖
6624 }
6625 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6626 false, MEMMODEL_SYNC_SEQ_CST,
6627 MEMMODEL_SYNC_SEQ_CST))
6628 return NULL_RTX;
6629
6630 return target;
6631 }
6632
6633 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
6634 general form is actually an atomic exchange, and some targets only
6635 support a reduced form with the second argument being a constant 1.
6636 EXP is the CALL_EXPR; TARGET is an optional place for us to store
6637 the results. */
6638
6639 static rtx
expand_builtin_sync_lock_test_and_set(machine_mode mode,tree exp,rtx target)6640 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6641 rtx target)
6642 {
6643 rtx val, mem;
6644
6645 /* Expand the operands. */
6646 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6647 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6648
6649 return expand_sync_lock_test_and_set (target, mem, val);
6650 }
6651
6652 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
6653
6654 static void
expand_builtin_sync_lock_release(machine_mode mode,tree exp)6655 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6656 {
6657 rtx mem;
6658
6659 /* Expand the operands. */
6660 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6661
6662 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6663 }
6664
6665 /* Given an integer representing an ``enum memmodel'', verify its
6666 correctness and return the memory model enum. */
6667
6668 static enum memmodel
get_memmodel(tree exp)6669 get_memmodel (tree exp)
6670 {
6671 rtx op;
6672 unsigned HOST_WIDE_INT val;
6673 location_t loc
6674 = expansion_point_location_if_in_system_header (input_location);
6675
6676 /* If the parameter is not a constant, it's a run time value so we'll just
6677 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
6678 if (TREE_CODE (exp) != INTEGER_CST)
6679 return MEMMODEL_SEQ_CST;
6680
6681 op = expand_normal (exp);
6682
6683 val = INTVAL (op);
6684 if (targetm.memmodel_check)
6685 val = targetm.memmodel_check (val);
6686 else if (val & ~MEMMODEL_MASK)
6687 {
6688 warning_at (loc, OPT_Winvalid_memory_model,
6689 "unknown architecture specifier in memory model to builtin");
6690 return MEMMODEL_SEQ_CST;
6691 }
6692
6693 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6694 if (memmodel_base (val) >= MEMMODEL_LAST)
6695 {
6696 warning_at (loc, OPT_Winvalid_memory_model,
6697 "invalid memory model argument to builtin");
6698 return MEMMODEL_SEQ_CST;
6699 }
6700
6701 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6702 be conservative and promote consume to acquire. */
6703 if (val == MEMMODEL_CONSUME)
6704 val = MEMMODEL_ACQUIRE;
6705
6706 return (enum memmodel) val;
6707 }
6708
6709 /* Expand the __atomic_exchange intrinsic:
6710 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6711 EXP is the CALL_EXPR.
6712 TARGET is an optional place for us to store the results. */
6713
6714 static rtx
expand_builtin_atomic_exchange(machine_mode mode,tree exp,rtx target)6715 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6716 {
6717 rtx val, mem;
6718 enum memmodel model;
6719
6720 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6721
6722 if (!flag_inline_atomics)
6723 return NULL_RTX;
6724
6725 /* Expand the operands. */
6726 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6727 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6728
6729 return expand_atomic_exchange (target, mem, val, model);
6730 }
6731
6732 /* Expand the __atomic_compare_exchange intrinsic:
6733 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6734 TYPE desired, BOOL weak,
6735 enum memmodel success,
6736 enum memmodel failure)
6737 EXP is the CALL_EXPR.
6738 TARGET is an optional place for us to store the results. */
6739
6740 static rtx
expand_builtin_atomic_compare_exchange(machine_mode mode,tree exp,rtx target)6741 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6742 rtx target)
6743 {
6744 rtx expect, desired, mem, oldval;
6745 rtx_code_label *label;
6746 enum memmodel success, failure;
6747 tree weak;
6748 bool is_weak;
6749 location_t loc
6750 = expansion_point_location_if_in_system_header (input_location);
6751
6752 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6753 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6754
6755 if (failure > success)
6756 {
6757 warning_at (loc, OPT_Winvalid_memory_model,
6758 "failure memory model cannot be stronger than success "
6759 "memory model for %<__atomic_compare_exchange%>");
6760 success = MEMMODEL_SEQ_CST;
6761 }
6762
6763 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6764 {
6765 warning_at (loc, OPT_Winvalid_memory_model,
6766 "invalid failure memory model for "
6767 "%<__atomic_compare_exchange%>");
6768 failure = MEMMODEL_SEQ_CST;
6769 success = MEMMODEL_SEQ_CST;
6770 }
6771
6772
6773 if (!flag_inline_atomics)
6774 return NULL_RTX;
6775
6776 /* Expand the operands. */
6777 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6778
6779 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6780 expect = convert_memory_address (Pmode, expect);
6781 expect = gen_rtx_MEM (mode, expect);
6782 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6783
6784 weak = CALL_EXPR_ARG (exp, 3);
6785 is_weak = false;
6786 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6787 is_weak = true;
6788
6789 if (target == const0_rtx)
6790 target = NULL;
6791
6792 /* Lest the rtl backend create a race condition with an imporoper store
6793 to memory, always create a new pseudo for OLDVAL. */
6794 oldval = NULL;
6795
6796 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6797 is_weak, success, failure))
6798 return NULL_RTX;
6799
6800 /* Conditionally store back to EXPECT, lest we create a race condition
6801 with an improper store to memory. */
6802 /* ??? With a rearrangement of atomics at the gimple level, we can handle
6803 the normal case where EXPECT is totally private, i.e. a register. At
6804 which point the store can be unconditional. */
6805 label = gen_label_rtx ();
6806 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6807 GET_MODE (target), 1, label);
6808 emit_move_insn (expect, oldval);
6809 emit_label (label);
6810
6811 return target;
6812 }
6813
6814 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6815 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6816 call. The weak parameter must be dropped to match the expected parameter
6817 list and the expected argument changed from value to pointer to memory
6818 slot. */
6819
6820 static void
expand_ifn_atomic_compare_exchange_into_call(gcall * call,machine_mode mode)6821 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6822 {
6823 unsigned int z;
6824 vec<tree, va_gc> *vec;
6825
6826 vec_alloc (vec, 5);
6827 vec->quick_push (gimple_call_arg (call, 0));
6828 tree expected = gimple_call_arg (call, 1);
6829 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6830 TREE_TYPE (expected));
6831 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6832 if (expd != x)
6833 emit_move_insn (x, expd);
6834 tree v = make_tree (TREE_TYPE (expected), x);
6835 vec->quick_push (build1 (ADDR_EXPR,
6836 build_pointer_type (TREE_TYPE (expected)), v));
6837 vec->quick_push (gimple_call_arg (call, 2));
6838 /* Skip the boolean weak parameter. */
6839 for (z = 4; z < 6; z++)
6840 vec->quick_push (gimple_call_arg (call, z));
6841 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
6842 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6843 gcc_assert (bytes_log2 < 5);
6844 built_in_function fncode
6845 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6846 + bytes_log2);
6847 tree fndecl = builtin_decl_explicit (fncode);
6848 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6849 fndecl);
6850 tree exp = build_call_vec (boolean_type_node, fn, vec);
6851 tree lhs = gimple_call_lhs (call);
6852 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6853 if (lhs)
6854 {
6855 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6856 if (GET_MODE (boolret) != mode)
6857 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6858 x = force_reg (mode, x);
6859 write_complex_part (target, boolret, true);
6860 write_complex_part (target, x, false);
6861 }
6862 }
6863
6864 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
6865
6866 void
expand_ifn_atomic_compare_exchange(gcall * call)6867 expand_ifn_atomic_compare_exchange (gcall *call)
6868 {
6869 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6870 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6871 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6872 rtx expect, desired, mem, oldval, boolret;
6873 enum memmodel success, failure;
6874 tree lhs;
6875 bool is_weak;
6876 location_t loc
6877 = expansion_point_location_if_in_system_header (gimple_location (call));
6878
6879 success = get_memmodel (gimple_call_arg (call, 4));
6880 failure = get_memmodel (gimple_call_arg (call, 5));
6881
6882 if (failure > success)
6883 {
6884 warning_at (loc, OPT_Winvalid_memory_model,
6885 "failure memory model cannot be stronger than success "
6886 "memory model for %<__atomic_compare_exchange%>");
6887 success = MEMMODEL_SEQ_CST;
6888 }
6889
6890 if (is_mm_release (failure) || is_mm_acq_rel (failure))
6891 {
6892 warning_at (loc, OPT_Winvalid_memory_model,
6893 "invalid failure memory model for "
6894 "%<__atomic_compare_exchange%>");
6895 failure = MEMMODEL_SEQ_CST;
6896 success = MEMMODEL_SEQ_CST;
6897 }
6898
6899 if (!flag_inline_atomics)
6900 {
6901 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6902 return;
6903 }
6904
6905 /* Expand the operands. */
6906 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6907
6908 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6909 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6910
6911 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6912
6913 boolret = NULL;
6914 oldval = NULL;
6915
6916 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6917 is_weak, success, failure))
6918 {
6919 expand_ifn_atomic_compare_exchange_into_call (call, mode);
6920 return;
6921 }
6922
6923 lhs = gimple_call_lhs (call);
6924 if (lhs)
6925 {
6926 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6927 if (GET_MODE (boolret) != mode)
6928 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6929 write_complex_part (target, boolret, true);
6930 write_complex_part (target, oldval, false);
6931 }
6932 }
6933
6934 /* Expand the __atomic_load intrinsic:
6935 TYPE __atomic_load (TYPE *object, enum memmodel)
6936 EXP is the CALL_EXPR.
6937 TARGET is an optional place for us to store the results. */
6938
6939 static rtx
expand_builtin_atomic_load(machine_mode mode,tree exp,rtx target)6940 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6941 {
6942 rtx mem;
6943 enum memmodel model;
6944
6945 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6946 if (is_mm_release (model) || is_mm_acq_rel (model))
6947 {
6948 location_t loc
6949 = expansion_point_location_if_in_system_header (input_location);
6950 warning_at (loc, OPT_Winvalid_memory_model,
6951 "invalid memory model for %<__atomic_load%>");
6952 model = MEMMODEL_SEQ_CST;
6953 }
6954
6955 if (!flag_inline_atomics)
6956 return NULL_RTX;
6957
6958 /* Expand the operand. */
6959 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6960
6961 return expand_atomic_load (target, mem, model);
6962 }
6963
6964
6965 /* Expand the __atomic_store intrinsic:
6966 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6967 EXP is the CALL_EXPR.
6968 TARGET is an optional place for us to store the results. */
6969
6970 static rtx
expand_builtin_atomic_store(machine_mode mode,tree exp)6971 expand_builtin_atomic_store (machine_mode mode, tree exp)
6972 {
6973 rtx mem, val;
6974 enum memmodel model;
6975
6976 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6977 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6978 || is_mm_release (model)))
6979 {
6980 location_t loc
6981 = expansion_point_location_if_in_system_header (input_location);
6982 warning_at (loc, OPT_Winvalid_memory_model,
6983 "invalid memory model for %<__atomic_store%>");
6984 model = MEMMODEL_SEQ_CST;
6985 }
6986
6987 if (!flag_inline_atomics)
6988 return NULL_RTX;
6989
6990 /* Expand the operands. */
6991 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6992 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6993
6994 return expand_atomic_store (mem, val, model, false);
6995 }
6996
6997 /* Expand the __atomic_fetch_XXX intrinsic:
6998 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6999 EXP is the CALL_EXPR.
7000 TARGET is an optional place for us to store the results.
7001 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
7002 FETCH_AFTER is true if returning the result of the operation.
7003 FETCH_AFTER is false if returning the value before the operation.
7004 IGNORE is true if the result is not used.
7005 EXT_CALL is the correct builtin for an external call if this cannot be
7006 resolved to an instruction sequence. */
7007
7008 static rtx
expand_builtin_atomic_fetch_op(machine_mode mode,tree exp,rtx target,enum rtx_code code,bool fetch_after,bool ignore,enum built_in_function ext_call)7009 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
7010 enum rtx_code code, bool fetch_after,
7011 bool ignore, enum built_in_function ext_call)
7012 {
7013 rtx val, mem, ret;
7014 enum memmodel model;
7015 tree fndecl;
7016 tree addr;
7017
7018 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
7019
7020 /* Expand the operands. */
7021 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7022 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
7023
7024 /* Only try generating instructions if inlining is turned on. */
7025 if (flag_inline_atomics)
7026 {
7027 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
7028 if (ret)
7029 return ret;
7030 }
7031
7032 /* Return if a different routine isn't needed for the library call. */
7033 if (ext_call == BUILT_IN_NONE)
7034 return NULL_RTX;
7035
7036 /* Change the call to the specified function. */
7037 fndecl = get_callee_fndecl (exp);
7038 addr = CALL_EXPR_FN (exp);
7039 STRIP_NOPS (addr);
7040
7041 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
7042 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
7043
7044 /* If we will emit code after the call, the call cannot be a tail call.
7045 If it is emitted as a tail call, a barrier is emitted after it, and
7046 then all trailing code is removed. */
7047 if (!ignore)
7048 CALL_EXPR_TAILCALL (exp) = 0;
7049
7050 /* Expand the call here so we can emit trailing code. */
7051 ret = expand_call (exp, target, ignore);
7052
7053 /* Replace the original function just in case it matters. */
7054 TREE_OPERAND (addr, 0) = fndecl;
7055
7056 /* Then issue the arithmetic correction to return the right result. */
7057 if (!ignore)
7058 {
7059 if (code == NOT)
7060 {
7061 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
7062 OPTAB_LIB_WIDEN);
7063 ret = expand_simple_unop (mode, NOT, ret, target, true);
7064 }
7065 else
7066 ret = expand_simple_binop (mode, code, ret, val, target, true,
7067 OPTAB_LIB_WIDEN);
7068 }
7069 return ret;
7070 }
7071
7072 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
7073
7074 void
expand_ifn_atomic_bit_test_and(gcall * call)7075 expand_ifn_atomic_bit_test_and (gcall *call)
7076 {
7077 tree ptr = gimple_call_arg (call, 0);
7078 tree bit = gimple_call_arg (call, 1);
7079 tree flag = gimple_call_arg (call, 2);
7080 tree lhs = gimple_call_lhs (call);
7081 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
7082 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
7083 enum rtx_code code;
7084 optab optab;
7085 class expand_operand ops[5];
7086
7087 gcc_assert (flag_inline_atomics);
7088
7089 if (gimple_call_num_args (call) == 4)
7090 model = get_memmodel (gimple_call_arg (call, 3));
7091
7092 rtx mem = get_builtin_sync_mem (ptr, mode);
7093 rtx val = expand_expr_force_mode (bit, mode);
7094
7095 switch (gimple_call_internal_fn (call))
7096 {
7097 case IFN_ATOMIC_BIT_TEST_AND_SET:
7098 code = IOR;
7099 optab = atomic_bit_test_and_set_optab;
7100 break;
7101 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
7102 code = XOR;
7103 optab = atomic_bit_test_and_complement_optab;
7104 break;
7105 case IFN_ATOMIC_BIT_TEST_AND_RESET:
7106 code = AND;
7107 optab = atomic_bit_test_and_reset_optab;
7108 break;
7109 default:
7110 gcc_unreachable ();
7111 }
7112
7113 if (lhs == NULL_TREE)
7114 {
7115 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7116 val, NULL_RTX, true, OPTAB_DIRECT);
7117 if (code == AND)
7118 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7119 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
7120 return;
7121 }
7122
7123 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
7124 enum insn_code icode = direct_optab_handler (optab, mode);
7125 gcc_assert (icode != CODE_FOR_nothing);
7126 create_output_operand (&ops[0], target, mode);
7127 create_fixed_operand (&ops[1], mem);
7128 create_convert_operand_to (&ops[2], val, mode, true);
7129 create_integer_operand (&ops[3], model);
7130 create_integer_operand (&ops[4], integer_onep (flag));
7131 if (maybe_expand_insn (icode, 5, ops))
7132 return;
7133
7134 rtx bitval = val;
7135 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
7136 val, NULL_RTX, true, OPTAB_DIRECT);
7137 rtx maskval = val;
7138 if (code == AND)
7139 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
7140 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
7141 code, model, false);
7142 if (integer_onep (flag))
7143 {
7144 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
7145 NULL_RTX, true, OPTAB_DIRECT);
7146 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
7147 true, OPTAB_DIRECT);
7148 }
7149 else
7150 result = expand_simple_binop (mode, AND, result, maskval, target, true,
7151 OPTAB_DIRECT);
7152 if (result != target)
7153 emit_move_insn (target, result);
7154 }
7155
7156 /* Expand an atomic clear operation.
7157 void _atomic_clear (BOOL *obj, enum memmodel)
7158 EXP is the call expression. */
7159
7160 static rtx
expand_builtin_atomic_clear(tree exp)7161 expand_builtin_atomic_clear (tree exp)
7162 {
7163 machine_mode mode;
7164 rtx mem, ret;
7165 enum memmodel model;
7166
7167 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7168 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7169 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7170
7171 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
7172 {
7173 location_t loc
7174 = expansion_point_location_if_in_system_header (input_location);
7175 warning_at (loc, OPT_Winvalid_memory_model,
7176 "invalid memory model for %<__atomic_store%>");
7177 model = MEMMODEL_SEQ_CST;
7178 }
7179
7180 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
7181 Failing that, a store is issued by __atomic_store. The only way this can
7182 fail is if the bool type is larger than a word size. Unlikely, but
7183 handle it anyway for completeness. Assume a single threaded model since
7184 there is no atomic support in this case, and no barriers are required. */
7185 ret = expand_atomic_store (mem, const0_rtx, model, true);
7186 if (!ret)
7187 emit_move_insn (mem, const0_rtx);
7188 return const0_rtx;
7189 }
7190
7191 /* Expand an atomic test_and_set operation.
7192 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
7193 EXP is the call expression. */
7194
7195 static rtx
expand_builtin_atomic_test_and_set(tree exp,rtx target)7196 expand_builtin_atomic_test_and_set (tree exp, rtx target)
7197 {
7198 rtx mem;
7199 enum memmodel model;
7200 machine_mode mode;
7201
7202 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
7203 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
7204 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
7205
7206 return expand_atomic_test_and_set (target, mem, model);
7207 }
7208
7209
7210 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
7211 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
7212
7213 static tree
fold_builtin_atomic_always_lock_free(tree arg0,tree arg1)7214 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
7215 {
7216 int size;
7217 machine_mode mode;
7218 unsigned int mode_align, type_align;
7219
7220 if (TREE_CODE (arg0) != INTEGER_CST)
7221 return NULL_TREE;
7222
7223 /* We need a corresponding integer mode for the access to be lock-free. */
7224 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
7225 if (!int_mode_for_size (size, 0).exists (&mode))
7226 return boolean_false_node;
7227
7228 mode_align = GET_MODE_ALIGNMENT (mode);
7229
7230 if (TREE_CODE (arg1) == INTEGER_CST)
7231 {
7232 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
7233
7234 /* Either this argument is null, or it's a fake pointer encoding
7235 the alignment of the object. */
7236 val = least_bit_hwi (val);
7237 val *= BITS_PER_UNIT;
7238
7239 if (val == 0 || mode_align < val)
7240 type_align = mode_align;
7241 else
7242 type_align = val;
7243 }
7244 else
7245 {
7246 tree ttype = TREE_TYPE (arg1);
7247
7248 /* This function is usually invoked and folded immediately by the front
7249 end before anything else has a chance to look at it. The pointer
7250 parameter at this point is usually cast to a void *, so check for that
7251 and look past the cast. */
7252 if (CONVERT_EXPR_P (arg1)
7253 && POINTER_TYPE_P (ttype)
7254 && VOID_TYPE_P (TREE_TYPE (ttype))
7255 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
7256 arg1 = TREE_OPERAND (arg1, 0);
7257
7258 ttype = TREE_TYPE (arg1);
7259 gcc_assert (POINTER_TYPE_P (ttype));
7260
7261 /* Get the underlying type of the object. */
7262 ttype = TREE_TYPE (ttype);
7263 type_align = TYPE_ALIGN (ttype);
7264 }
7265
7266 /* If the object has smaller alignment, the lock free routines cannot
7267 be used. */
7268 if (type_align < mode_align)
7269 return boolean_false_node;
7270
7271 /* Check if a compare_and_swap pattern exists for the mode which represents
7272 the required size. The pattern is not allowed to fail, so the existence
7273 of the pattern indicates support is present. Also require that an
7274 atomic load exists for the required size. */
7275 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
7276 return boolean_true_node;
7277 else
7278 return boolean_false_node;
7279 }
7280
7281 /* Return true if the parameters to call EXP represent an object which will
7282 always generate lock free instructions. The first argument represents the
7283 size of the object, and the second parameter is a pointer to the object
7284 itself. If NULL is passed for the object, then the result is based on
7285 typical alignment for an object of the specified size. Otherwise return
7286 false. */
7287
7288 static rtx
expand_builtin_atomic_always_lock_free(tree exp)7289 expand_builtin_atomic_always_lock_free (tree exp)
7290 {
7291 tree size;
7292 tree arg0 = CALL_EXPR_ARG (exp, 0);
7293 tree arg1 = CALL_EXPR_ARG (exp, 1);
7294
7295 if (TREE_CODE (arg0) != INTEGER_CST)
7296 {
7297 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
7298 return const0_rtx;
7299 }
7300
7301 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
7302 if (size == boolean_true_node)
7303 return const1_rtx;
7304 return const0_rtx;
7305 }
7306
7307 /* Return a one or zero if it can be determined that object ARG1 of size ARG
7308 is lock free on this architecture. */
7309
7310 static tree
fold_builtin_atomic_is_lock_free(tree arg0,tree arg1)7311 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
7312 {
7313 if (!flag_inline_atomics)
7314 return NULL_TREE;
7315
7316 /* If it isn't always lock free, don't generate a result. */
7317 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
7318 return boolean_true_node;
7319
7320 return NULL_TREE;
7321 }
7322
7323 /* Return true if the parameters to call EXP represent an object which will
7324 always generate lock free instructions. The first argument represents the
7325 size of the object, and the second parameter is a pointer to the object
7326 itself. If NULL is passed for the object, then the result is based on
7327 typical alignment for an object of the specified size. Otherwise return
7328 NULL*/
7329
7330 static rtx
expand_builtin_atomic_is_lock_free(tree exp)7331 expand_builtin_atomic_is_lock_free (tree exp)
7332 {
7333 tree size;
7334 tree arg0 = CALL_EXPR_ARG (exp, 0);
7335 tree arg1 = CALL_EXPR_ARG (exp, 1);
7336
7337 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
7338 {
7339 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
7340 return NULL_RTX;
7341 }
7342
7343 if (!flag_inline_atomics)
7344 return NULL_RTX;
7345
7346 /* If the value is known at compile time, return the RTX for it. */
7347 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
7348 if (size == boolean_true_node)
7349 return const1_rtx;
7350
7351 return NULL_RTX;
7352 }
7353
7354 /* Expand the __atomic_thread_fence intrinsic:
7355 void __atomic_thread_fence (enum memmodel)
7356 EXP is the CALL_EXPR. */
7357
7358 static void
expand_builtin_atomic_thread_fence(tree exp)7359 expand_builtin_atomic_thread_fence (tree exp)
7360 {
7361 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7362 expand_mem_thread_fence (model);
7363 }
7364
7365 /* Expand the __atomic_signal_fence intrinsic:
7366 void __atomic_signal_fence (enum memmodel)
7367 EXP is the CALL_EXPR. */
7368
7369 static void
expand_builtin_atomic_signal_fence(tree exp)7370 expand_builtin_atomic_signal_fence (tree exp)
7371 {
7372 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
7373 expand_mem_signal_fence (model);
7374 }
7375
7376 /* Expand the __sync_synchronize intrinsic. */
7377
7378 static void
expand_builtin_sync_synchronize(void)7379 expand_builtin_sync_synchronize (void)
7380 {
7381 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
7382 }
7383
7384 static rtx
expand_builtin_thread_pointer(tree exp,rtx target)7385 expand_builtin_thread_pointer (tree exp, rtx target)
7386 {
7387 enum insn_code icode;
7388 if (!validate_arglist (exp, VOID_TYPE))
7389 return const0_rtx;
7390 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
7391 if (icode != CODE_FOR_nothing)
7392 {
7393 class expand_operand op;
7394 /* If the target is not sutitable then create a new target. */
7395 if (target == NULL_RTX
7396 || !REG_P (target)
7397 || GET_MODE (target) != Pmode)
7398 target = gen_reg_rtx (Pmode);
7399 create_output_operand (&op, target, Pmode);
7400 expand_insn (icode, 1, &op);
7401 return target;
7402 }
7403 error ("%<__builtin_thread_pointer%> is not supported on this target");
7404 return const0_rtx;
7405 }
7406
7407 static void
expand_builtin_set_thread_pointer(tree exp)7408 expand_builtin_set_thread_pointer (tree exp)
7409 {
7410 enum insn_code icode;
7411 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7412 return;
7413 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
7414 if (icode != CODE_FOR_nothing)
7415 {
7416 class expand_operand op;
7417 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
7418 Pmode, EXPAND_NORMAL);
7419 create_input_operand (&op, val, Pmode);
7420 expand_insn (icode, 1, &op);
7421 return;
7422 }
7423 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
7424 }
7425
7426
7427 /* Emit code to restore the current value of stack. */
7428
7429 static void
expand_stack_restore(tree var)7430 expand_stack_restore (tree var)
7431 {
7432 rtx_insn *prev;
7433 rtx sa = expand_normal (var);
7434
7435 sa = convert_memory_address (Pmode, sa);
7436
7437 prev = get_last_insn ();
7438 emit_stack_restore (SAVE_BLOCK, sa);
7439
7440 record_new_stack_level ();
7441
7442 fixup_args_size_notes (prev, get_last_insn (), 0);
7443 }
7444
7445 /* Emit code to save the current value of stack. */
7446
7447 static rtx
expand_stack_save(void)7448 expand_stack_save (void)
7449 {
7450 rtx ret = NULL_RTX;
7451
7452 emit_stack_save (SAVE_BLOCK, &ret);
7453 return ret;
7454 }
7455
7456 /* Emit code to get the openacc gang, worker or vector id or size. */
7457
7458 static rtx
expand_builtin_goacc_parlevel_id_size(tree exp,rtx target,int ignore)7459 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
7460 {
7461 const char *name;
7462 rtx fallback_retval;
7463 rtx_insn *(*gen_fn) (rtx, rtx);
7464 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
7465 {
7466 case BUILT_IN_GOACC_PARLEVEL_ID:
7467 name = "__builtin_goacc_parlevel_id";
7468 fallback_retval = const0_rtx;
7469 gen_fn = targetm.gen_oacc_dim_pos;
7470 break;
7471 case BUILT_IN_GOACC_PARLEVEL_SIZE:
7472 name = "__builtin_goacc_parlevel_size";
7473 fallback_retval = const1_rtx;
7474 gen_fn = targetm.gen_oacc_dim_size;
7475 break;
7476 default:
7477 gcc_unreachable ();
7478 }
7479
7480 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
7481 {
7482 error ("%qs only supported in OpenACC code", name);
7483 return const0_rtx;
7484 }
7485
7486 tree arg = CALL_EXPR_ARG (exp, 0);
7487 if (TREE_CODE (arg) != INTEGER_CST)
7488 {
7489 error ("non-constant argument 0 to %qs", name);
7490 return const0_rtx;
7491 }
7492
7493 int dim = TREE_INT_CST_LOW (arg);
7494 switch (dim)
7495 {
7496 case GOMP_DIM_GANG:
7497 case GOMP_DIM_WORKER:
7498 case GOMP_DIM_VECTOR:
7499 break;
7500 default:
7501 error ("illegal argument 0 to %qs", name);
7502 return const0_rtx;
7503 }
7504
7505 if (ignore)
7506 return target;
7507
7508 if (target == NULL_RTX)
7509 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7510
7511 if (!targetm.have_oacc_dim_size ())
7512 {
7513 emit_move_insn (target, fallback_retval);
7514 return target;
7515 }
7516
7517 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7518 emit_insn (gen_fn (reg, GEN_INT (dim)));
7519 if (reg != target)
7520 emit_move_insn (target, reg);
7521
7522 return target;
7523 }
7524
7525 /* Expand a string compare operation using a sequence of char comparison
7526 to get rid of the calling overhead, with result going to TARGET if
7527 that's convenient.
7528
7529 VAR_STR is the variable string source;
7530 CONST_STR is the constant string source;
7531 LENGTH is the number of chars to compare;
7532 CONST_STR_N indicates which source string is the constant string;
7533 IS_MEMCMP indicates whether it's a memcmp or strcmp.
7534
7535 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7536
7537 target = (int) (unsigned char) var_str[0]
7538 - (int) (unsigned char) const_str[0];
7539 if (target != 0)
7540 goto ne_label;
7541 ...
7542 target = (int) (unsigned char) var_str[length - 2]
7543 - (int) (unsigned char) const_str[length - 2];
7544 if (target != 0)
7545 goto ne_label;
7546 target = (int) (unsigned char) var_str[length - 1]
7547 - (int) (unsigned char) const_str[length - 1];
7548 ne_label:
7549 */
7550
7551 static rtx
inline_string_cmp(rtx target,tree var_str,const char * const_str,unsigned HOST_WIDE_INT length,int const_str_n,machine_mode mode)7552 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7553 unsigned HOST_WIDE_INT length,
7554 int const_str_n, machine_mode mode)
7555 {
7556 HOST_WIDE_INT offset = 0;
7557 rtx var_rtx_array
7558 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7559 rtx var_rtx = NULL_RTX;
7560 rtx const_rtx = NULL_RTX;
7561 rtx result = target ? target : gen_reg_rtx (mode);
7562 rtx_code_label *ne_label = gen_label_rtx ();
7563 tree unit_type_node = unsigned_char_type_node;
7564 scalar_int_mode unit_mode
7565 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7566
7567 start_sequence ();
7568
7569 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7570 {
7571 var_rtx
7572 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7573 const_rtx = c_readstr (const_str + offset, unit_mode);
7574 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7575 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7576
7577 op0 = convert_modes (mode, unit_mode, op0, 1);
7578 op1 = convert_modes (mode, unit_mode, op1, 1);
7579 result = expand_simple_binop (mode, MINUS, op0, op1,
7580 result, 1, OPTAB_WIDEN);
7581 if (i < length - 1)
7582 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7583 mode, true, ne_label);
7584 offset += GET_MODE_SIZE (unit_mode);
7585 }
7586
7587 emit_label (ne_label);
7588 rtx_insn *insns = get_insns ();
7589 end_sequence ();
7590 emit_insn (insns);
7591
7592 return result;
7593 }
7594
7595 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
7596 to TARGET if that's convenient.
7597 If the call is not been inlined, return NULL_RTX. */
7598
7599 static rtx
inline_expand_builtin_bytecmp(tree exp,rtx target)7600 inline_expand_builtin_bytecmp (tree exp, rtx target)
7601 {
7602 tree fndecl = get_callee_fndecl (exp);
7603 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7604 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7605
7606 /* Do NOT apply this inlining expansion when optimizing for size or
7607 optimization level below 2. */
7608 if (optimize < 2 || optimize_insn_for_size_p ())
7609 return NULL_RTX;
7610
7611 gcc_checking_assert (fcode == BUILT_IN_STRCMP
7612 || fcode == BUILT_IN_STRNCMP
7613 || fcode == BUILT_IN_MEMCMP);
7614
7615 /* On a target where the type of the call (int) has same or narrower presicion
7616 than unsigned char, give up the inlining expansion. */
7617 if (TYPE_PRECISION (unsigned_char_type_node)
7618 >= TYPE_PRECISION (TREE_TYPE (exp)))
7619 return NULL_RTX;
7620
7621 tree arg1 = CALL_EXPR_ARG (exp, 0);
7622 tree arg2 = CALL_EXPR_ARG (exp, 1);
7623 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7624
7625 unsigned HOST_WIDE_INT len1 = 0;
7626 unsigned HOST_WIDE_INT len2 = 0;
7627 unsigned HOST_WIDE_INT len3 = 0;
7628
7629 /* Get the object representation of the initializers of ARG1 and ARG2
7630 as strings, provided they refer to constant objects, with their byte
7631 sizes in LEN1 and LEN2, respectively. */
7632 const char *bytes1 = c_getstr (arg1, &len1);
7633 const char *bytes2 = c_getstr (arg2, &len2);
7634
7635 /* Fail if neither argument refers to an initialized constant. */
7636 if (!bytes1 && !bytes2)
7637 return NULL_RTX;
7638
7639 if (is_ncmp)
7640 {
7641 /* Fail if the memcmp/strncmp bound is not a constant. */
7642 if (!tree_fits_uhwi_p (len3_tree))
7643 return NULL_RTX;
7644
7645 len3 = tree_to_uhwi (len3_tree);
7646
7647 if (fcode == BUILT_IN_MEMCMP)
7648 {
7649 /* Fail if the memcmp bound is greater than the size of either
7650 of the two constant objects. */
7651 if ((bytes1 && len1 < len3)
7652 || (bytes2 && len2 < len3))
7653 return NULL_RTX;
7654 }
7655 }
7656
7657 if (fcode != BUILT_IN_MEMCMP)
7658 {
7659 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
7660 and LEN2 to the length of the nul-terminated string stored
7661 in each. */
7662 if (bytes1 != NULL)
7663 len1 = strnlen (bytes1, len1) + 1;
7664 if (bytes2 != NULL)
7665 len2 = strnlen (bytes2, len2) + 1;
7666 }
7667
7668 /* See inline_string_cmp. */
7669 int const_str_n;
7670 if (!len1)
7671 const_str_n = 2;
7672 else if (!len2)
7673 const_str_n = 1;
7674 else if (len2 > len1)
7675 const_str_n = 1;
7676 else
7677 const_str_n = 2;
7678
7679 /* For strncmp only, compute the new bound as the smallest of
7680 the lengths of the two strings (plus 1) and the bound provided
7681 to the function. */
7682 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
7683 if (is_ncmp && len3 < bound)
7684 bound = len3;
7685
7686 /* If the bound of the comparison is larger than the threshold,
7687 do nothing. */
7688 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
7689 return NULL_RTX;
7690
7691 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7692
7693 /* Now, start inline expansion the call. */
7694 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7695 (const_str_n == 1) ? bytes1 : bytes2, bound,
7696 const_str_n, mode);
7697 }
7698
7699 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
7700 represents the size of the first argument to that call, or VOIDmode
7701 if the argument is a pointer. IGNORE will be true if the result
7702 isn't used. */
7703 static rtx
expand_speculation_safe_value(machine_mode mode,tree exp,rtx target,bool ignore)7704 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7705 bool ignore)
7706 {
7707 rtx val, failsafe;
7708 unsigned nargs = call_expr_nargs (exp);
7709
7710 tree arg0 = CALL_EXPR_ARG (exp, 0);
7711
7712 if (mode == VOIDmode)
7713 {
7714 mode = TYPE_MODE (TREE_TYPE (arg0));
7715 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7716 }
7717
7718 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7719
7720 /* An optional second argument can be used as a failsafe value on
7721 some machines. If it isn't present, then the failsafe value is
7722 assumed to be 0. */
7723 if (nargs > 1)
7724 {
7725 tree arg1 = CALL_EXPR_ARG (exp, 1);
7726 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7727 }
7728 else
7729 failsafe = const0_rtx;
7730
7731 /* If the result isn't used, the behavior is undefined. It would be
7732 nice to emit a warning here, but path splitting means this might
7733 happen with legitimate code. So simply drop the builtin
7734 expansion in that case; we've handled any side-effects above. */
7735 if (ignore)
7736 return const0_rtx;
7737
7738 /* If we don't have a suitable target, create one to hold the result. */
7739 if (target == NULL || GET_MODE (target) != mode)
7740 target = gen_reg_rtx (mode);
7741
7742 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7743 val = convert_modes (mode, VOIDmode, val, false);
7744
7745 return targetm.speculation_safe_value (mode, target, val, failsafe);
7746 }
7747
7748 /* Expand an expression EXP that calls a built-in function,
7749 with result going to TARGET if that's convenient
7750 (and in mode MODE if that's convenient).
7751 SUBTARGET may be used as the target for computing one of EXP's operands.
7752 IGNORE is nonzero if the value is to be ignored. */
7753
7754 rtx
expand_builtin(tree exp,rtx target,rtx subtarget,machine_mode mode,int ignore)7755 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7756 int ignore)
7757 {
7758 tree fndecl = get_callee_fndecl (exp);
7759 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7760 int flags;
7761
7762 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7763 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7764
7765 /* When ASan is enabled, we don't want to expand some memory/string
7766 builtins and rely on libsanitizer's hooks. This allows us to avoid
7767 redundant checks and be sure, that possible overflow will be detected
7768 by ASan. */
7769
7770 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7771 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7772 return expand_call (exp, target, ignore);
7773
7774 /* When not optimizing, generate calls to library functions for a certain
7775 set of builtins. */
7776 if (!optimize
7777 && !called_as_built_in (fndecl)
7778 && fcode != BUILT_IN_FORK
7779 && fcode != BUILT_IN_EXECL
7780 && fcode != BUILT_IN_EXECV
7781 && fcode != BUILT_IN_EXECLP
7782 && fcode != BUILT_IN_EXECLE
7783 && fcode != BUILT_IN_EXECVP
7784 && fcode != BUILT_IN_EXECVE
7785 && !ALLOCA_FUNCTION_CODE_P (fcode)
7786 && fcode != BUILT_IN_FREE)
7787 return expand_call (exp, target, ignore);
7788
7789 /* The built-in function expanders test for target == const0_rtx
7790 to determine whether the function's result will be ignored. */
7791 if (ignore)
7792 target = const0_rtx;
7793
7794 /* If the result of a pure or const built-in function is ignored, and
7795 none of its arguments are volatile, we can avoid expanding the
7796 built-in call and just evaluate the arguments for side-effects. */
7797 if (target == const0_rtx
7798 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7799 && !(flags & ECF_LOOPING_CONST_OR_PURE))
7800 {
7801 bool volatilep = false;
7802 tree arg;
7803 call_expr_arg_iterator iter;
7804
7805 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7806 if (TREE_THIS_VOLATILE (arg))
7807 {
7808 volatilep = true;
7809 break;
7810 }
7811
7812 if (! volatilep)
7813 {
7814 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7815 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7816 return const0_rtx;
7817 }
7818 }
7819
7820 switch (fcode)
7821 {
7822 CASE_FLT_FN (BUILT_IN_FABS):
7823 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7824 case BUILT_IN_FABSD32:
7825 case BUILT_IN_FABSD64:
7826 case BUILT_IN_FABSD128:
7827 target = expand_builtin_fabs (exp, target, subtarget);
7828 if (target)
7829 return target;
7830 break;
7831
7832 CASE_FLT_FN (BUILT_IN_COPYSIGN):
7833 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7834 target = expand_builtin_copysign (exp, target, subtarget);
7835 if (target)
7836 return target;
7837 break;
7838
7839 /* Just do a normal library call if we were unable to fold
7840 the values. */
7841 CASE_FLT_FN (BUILT_IN_CABS):
7842 break;
7843
7844 CASE_FLT_FN (BUILT_IN_FMA):
7845 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7846 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7847 if (target)
7848 return target;
7849 break;
7850
7851 CASE_FLT_FN (BUILT_IN_ILOGB):
7852 if (! flag_unsafe_math_optimizations)
7853 break;
7854 gcc_fallthrough ();
7855 CASE_FLT_FN (BUILT_IN_ISINF):
7856 CASE_FLT_FN (BUILT_IN_FINITE):
7857 case BUILT_IN_ISFINITE:
7858 case BUILT_IN_ISNORMAL:
7859 target = expand_builtin_interclass_mathfn (exp, target);
7860 if (target)
7861 return target;
7862 break;
7863
7864 CASE_FLT_FN (BUILT_IN_ICEIL):
7865 CASE_FLT_FN (BUILT_IN_LCEIL):
7866 CASE_FLT_FN (BUILT_IN_LLCEIL):
7867 CASE_FLT_FN (BUILT_IN_LFLOOR):
7868 CASE_FLT_FN (BUILT_IN_IFLOOR):
7869 CASE_FLT_FN (BUILT_IN_LLFLOOR):
7870 target = expand_builtin_int_roundingfn (exp, target);
7871 if (target)
7872 return target;
7873 break;
7874
7875 CASE_FLT_FN (BUILT_IN_IRINT):
7876 CASE_FLT_FN (BUILT_IN_LRINT):
7877 CASE_FLT_FN (BUILT_IN_LLRINT):
7878 CASE_FLT_FN (BUILT_IN_IROUND):
7879 CASE_FLT_FN (BUILT_IN_LROUND):
7880 CASE_FLT_FN (BUILT_IN_LLROUND):
7881 target = expand_builtin_int_roundingfn_2 (exp, target);
7882 if (target)
7883 return target;
7884 break;
7885
7886 CASE_FLT_FN (BUILT_IN_POWI):
7887 target = expand_builtin_powi (exp, target);
7888 if (target)
7889 return target;
7890 break;
7891
7892 CASE_FLT_FN (BUILT_IN_CEXPI):
7893 target = expand_builtin_cexpi (exp, target);
7894 gcc_assert (target);
7895 return target;
7896
7897 CASE_FLT_FN (BUILT_IN_SIN):
7898 CASE_FLT_FN (BUILT_IN_COS):
7899 if (! flag_unsafe_math_optimizations)
7900 break;
7901 target = expand_builtin_mathfn_3 (exp, target, subtarget);
7902 if (target)
7903 return target;
7904 break;
7905
7906 CASE_FLT_FN (BUILT_IN_SINCOS):
7907 if (! flag_unsafe_math_optimizations)
7908 break;
7909 target = expand_builtin_sincos (exp);
7910 if (target)
7911 return target;
7912 break;
7913
7914 case BUILT_IN_APPLY_ARGS:
7915 return expand_builtin_apply_args ();
7916
7917 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7918 FUNCTION with a copy of the parameters described by
7919 ARGUMENTS, and ARGSIZE. It returns a block of memory
7920 allocated on the stack into which is stored all the registers
7921 that might possibly be used for returning the result of a
7922 function. ARGUMENTS is the value returned by
7923 __builtin_apply_args. ARGSIZE is the number of bytes of
7924 arguments that must be copied. ??? How should this value be
7925 computed? We'll also need a safe worst case value for varargs
7926 functions. */
7927 case BUILT_IN_APPLY:
7928 if (!validate_arglist (exp, POINTER_TYPE,
7929 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7930 && !validate_arglist (exp, REFERENCE_TYPE,
7931 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7932 return const0_rtx;
7933 else
7934 {
7935 rtx ops[3];
7936
7937 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7938 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7939 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7940
7941 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7942 }
7943
7944 /* __builtin_return (RESULT) causes the function to return the
7945 value described by RESULT. RESULT is address of the block of
7946 memory returned by __builtin_apply. */
7947 case BUILT_IN_RETURN:
7948 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7949 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7950 return const0_rtx;
7951
7952 case BUILT_IN_SAVEREGS:
7953 return expand_builtin_saveregs ();
7954
7955 case BUILT_IN_VA_ARG_PACK:
7956 /* All valid uses of __builtin_va_arg_pack () are removed during
7957 inlining. */
7958 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7959 return const0_rtx;
7960
7961 case BUILT_IN_VA_ARG_PACK_LEN:
7962 /* All valid uses of __builtin_va_arg_pack_len () are removed during
7963 inlining. */
7964 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7965 return const0_rtx;
7966
7967 /* Return the address of the first anonymous stack arg. */
7968 case BUILT_IN_NEXT_ARG:
7969 if (fold_builtin_next_arg (exp, false))
7970 return const0_rtx;
7971 return expand_builtin_next_arg ();
7972
7973 case BUILT_IN_CLEAR_CACHE:
7974 target = expand_builtin___clear_cache (exp);
7975 if (target)
7976 return target;
7977 break;
7978
7979 case BUILT_IN_CLASSIFY_TYPE:
7980 return expand_builtin_classify_type (exp);
7981
7982 case BUILT_IN_CONSTANT_P:
7983 return const0_rtx;
7984
7985 case BUILT_IN_FRAME_ADDRESS:
7986 case BUILT_IN_RETURN_ADDRESS:
7987 return expand_builtin_frame_address (fndecl, exp);
7988
7989 /* Returns the address of the area where the structure is returned.
7990 0 otherwise. */
7991 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7992 if (call_expr_nargs (exp) != 0
7993 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7994 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7995 return const0_rtx;
7996 else
7997 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7998
7999 CASE_BUILT_IN_ALLOCA:
8000 target = expand_builtin_alloca (exp);
8001 if (target)
8002 return target;
8003 break;
8004
8005 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
8006 return expand_asan_emit_allocas_unpoison (exp);
8007
8008 case BUILT_IN_STACK_SAVE:
8009 return expand_stack_save ();
8010
8011 case BUILT_IN_STACK_RESTORE:
8012 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
8013 return const0_rtx;
8014
8015 case BUILT_IN_BSWAP16:
8016 case BUILT_IN_BSWAP32:
8017 case BUILT_IN_BSWAP64:
8018 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
8019 if (target)
8020 return target;
8021 break;
8022
8023 CASE_INT_FN (BUILT_IN_FFS):
8024 target = expand_builtin_unop (target_mode, exp, target,
8025 subtarget, ffs_optab);
8026 if (target)
8027 return target;
8028 break;
8029
8030 CASE_INT_FN (BUILT_IN_CLZ):
8031 target = expand_builtin_unop (target_mode, exp, target,
8032 subtarget, clz_optab);
8033 if (target)
8034 return target;
8035 break;
8036
8037 CASE_INT_FN (BUILT_IN_CTZ):
8038 target = expand_builtin_unop (target_mode, exp, target,
8039 subtarget, ctz_optab);
8040 if (target)
8041 return target;
8042 break;
8043
8044 CASE_INT_FN (BUILT_IN_CLRSB):
8045 target = expand_builtin_unop (target_mode, exp, target,
8046 subtarget, clrsb_optab);
8047 if (target)
8048 return target;
8049 break;
8050
8051 CASE_INT_FN (BUILT_IN_POPCOUNT):
8052 target = expand_builtin_unop (target_mode, exp, target,
8053 subtarget, popcount_optab);
8054 if (target)
8055 return target;
8056 break;
8057
8058 CASE_INT_FN (BUILT_IN_PARITY):
8059 target = expand_builtin_unop (target_mode, exp, target,
8060 subtarget, parity_optab);
8061 if (target)
8062 return target;
8063 break;
8064
8065 case BUILT_IN_STRLEN:
8066 target = expand_builtin_strlen (exp, target, target_mode);
8067 if (target)
8068 return target;
8069 break;
8070
8071 case BUILT_IN_STRNLEN:
8072 target = expand_builtin_strnlen (exp, target, target_mode);
8073 if (target)
8074 return target;
8075 break;
8076
8077 case BUILT_IN_STRCAT:
8078 target = expand_builtin_strcat (exp);
8079 if (target)
8080 return target;
8081 break;
8082
8083 case BUILT_IN_GETTEXT:
8084 case BUILT_IN_PUTS:
8085 case BUILT_IN_PUTS_UNLOCKED:
8086 case BUILT_IN_STRDUP:
8087 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8088 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8089 break;
8090
8091 case BUILT_IN_INDEX:
8092 case BUILT_IN_RINDEX:
8093 case BUILT_IN_STRCHR:
8094 case BUILT_IN_STRRCHR:
8095 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8096 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8097 break;
8098
8099 case BUILT_IN_FPUTS:
8100 case BUILT_IN_FPUTS_UNLOCKED:
8101 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8102 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8103 break;
8104
8105 case BUILT_IN_STRNDUP:
8106 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8107 check_nul_terminated_array (exp,
8108 CALL_EXPR_ARG (exp, 0),
8109 CALL_EXPR_ARG (exp, 1));
8110 break;
8111
8112 case BUILT_IN_STRCASECMP:
8113 case BUILT_IN_STRSTR:
8114 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8115 {
8116 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 0));
8117 check_nul_terminated_array (exp, CALL_EXPR_ARG (exp, 1));
8118 }
8119 break;
8120
8121 case BUILT_IN_STRCPY:
8122 target = expand_builtin_strcpy (exp, target);
8123 if (target)
8124 return target;
8125 break;
8126
8127 case BUILT_IN_STRNCAT:
8128 target = expand_builtin_strncat (exp, target);
8129 if (target)
8130 return target;
8131 break;
8132
8133 case BUILT_IN_STRNCPY:
8134 target = expand_builtin_strncpy (exp, target);
8135 if (target)
8136 return target;
8137 break;
8138
8139 case BUILT_IN_STPCPY:
8140 target = expand_builtin_stpcpy (exp, target, mode);
8141 if (target)
8142 return target;
8143 break;
8144
8145 case BUILT_IN_STPNCPY:
8146 target = expand_builtin_stpncpy (exp, target);
8147 if (target)
8148 return target;
8149 break;
8150
8151 case BUILT_IN_MEMCHR:
8152 target = expand_builtin_memchr (exp, target);
8153 if (target)
8154 return target;
8155 break;
8156
8157 case BUILT_IN_MEMCPY:
8158 target = expand_builtin_memcpy (exp, target);
8159 if (target)
8160 return target;
8161 break;
8162
8163 case BUILT_IN_MEMMOVE:
8164 target = expand_builtin_memmove (exp, target);
8165 if (target)
8166 return target;
8167 break;
8168
8169 case BUILT_IN_MEMPCPY:
8170 target = expand_builtin_mempcpy (exp, target);
8171 if (target)
8172 return target;
8173 break;
8174
8175 case BUILT_IN_MEMSET:
8176 target = expand_builtin_memset (exp, target, mode);
8177 if (target)
8178 return target;
8179 break;
8180
8181 case BUILT_IN_BZERO:
8182 target = expand_builtin_bzero (exp);
8183 if (target)
8184 return target;
8185 break;
8186
8187 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8188 back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
8189 when changing it to a strcmp call. */
8190 case BUILT_IN_STRCMP_EQ:
8191 target = expand_builtin_memcmp (exp, target, true);
8192 if (target)
8193 return target;
8194
8195 /* Change this call back to a BUILT_IN_STRCMP. */
8196 TREE_OPERAND (exp, 1)
8197 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
8198
8199 /* Delete the last parameter. */
8200 unsigned int i;
8201 vec<tree, va_gc> *arg_vec;
8202 vec_alloc (arg_vec, 2);
8203 for (i = 0; i < 2; i++)
8204 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
8205 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
8206 /* FALLTHROUGH */
8207
8208 case BUILT_IN_STRCMP:
8209 target = expand_builtin_strcmp (exp, target);
8210 if (target)
8211 return target;
8212 break;
8213
8214 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
8215 back to a BUILT_IN_STRNCMP. */
8216 case BUILT_IN_STRNCMP_EQ:
8217 target = expand_builtin_memcmp (exp, target, true);
8218 if (target)
8219 return target;
8220
8221 /* Change it back to a BUILT_IN_STRNCMP. */
8222 TREE_OPERAND (exp, 1)
8223 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
8224 /* FALLTHROUGH */
8225
8226 case BUILT_IN_STRNCMP:
8227 target = expand_builtin_strncmp (exp, target, mode);
8228 if (target)
8229 return target;
8230 break;
8231
8232 case BUILT_IN_BCMP:
8233 case BUILT_IN_MEMCMP:
8234 case BUILT_IN_MEMCMP_EQ:
8235 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
8236 if (target)
8237 return target;
8238 if (fcode == BUILT_IN_MEMCMP_EQ)
8239 {
8240 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
8241 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
8242 }
8243 break;
8244
8245 case BUILT_IN_SETJMP:
8246 /* This should have been lowered to the builtins below. */
8247 gcc_unreachable ();
8248
8249 case BUILT_IN_SETJMP_SETUP:
8250 /* __builtin_setjmp_setup is passed a pointer to an array of five words
8251 and the receiver label. */
8252 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8253 {
8254 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8255 VOIDmode, EXPAND_NORMAL);
8256 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
8257 rtx_insn *label_r = label_rtx (label);
8258
8259 /* This is copied from the handling of non-local gotos. */
8260 expand_builtin_setjmp_setup (buf_addr, label_r);
8261 nonlocal_goto_handler_labels
8262 = gen_rtx_INSN_LIST (VOIDmode, label_r,
8263 nonlocal_goto_handler_labels);
8264 /* ??? Do not let expand_label treat us as such since we would
8265 not want to be both on the list of non-local labels and on
8266 the list of forced labels. */
8267 FORCED_LABEL (label) = 0;
8268 return const0_rtx;
8269 }
8270 break;
8271
8272 case BUILT_IN_SETJMP_RECEIVER:
8273 /* __builtin_setjmp_receiver is passed the receiver label. */
8274 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8275 {
8276 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
8277 rtx_insn *label_r = label_rtx (label);
8278
8279 expand_builtin_setjmp_receiver (label_r);
8280 return const0_rtx;
8281 }
8282 break;
8283
8284 /* __builtin_longjmp is passed a pointer to an array of five words.
8285 It's similar to the C library longjmp function but works with
8286 __builtin_setjmp above. */
8287 case BUILT_IN_LONGJMP:
8288 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
8289 {
8290 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
8291 VOIDmode, EXPAND_NORMAL);
8292 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
8293
8294 if (value != const1_rtx)
8295 {
8296 error ("%<__builtin_longjmp%> second argument must be 1");
8297 return const0_rtx;
8298 }
8299
8300 expand_builtin_longjmp (buf_addr, value);
8301 return const0_rtx;
8302 }
8303 break;
8304
8305 case BUILT_IN_NONLOCAL_GOTO:
8306 target = expand_builtin_nonlocal_goto (exp);
8307 if (target)
8308 return target;
8309 break;
8310
8311 /* This updates the setjmp buffer that is its argument with the value
8312 of the current stack pointer. */
8313 case BUILT_IN_UPDATE_SETJMP_BUF:
8314 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8315 {
8316 rtx buf_addr
8317 = expand_normal (CALL_EXPR_ARG (exp, 0));
8318
8319 expand_builtin_update_setjmp_buf (buf_addr);
8320 return const0_rtx;
8321 }
8322 break;
8323
8324 case BUILT_IN_TRAP:
8325 expand_builtin_trap ();
8326 return const0_rtx;
8327
8328 case BUILT_IN_UNREACHABLE:
8329 expand_builtin_unreachable ();
8330 return const0_rtx;
8331
8332 CASE_FLT_FN (BUILT_IN_SIGNBIT):
8333 case BUILT_IN_SIGNBITD32:
8334 case BUILT_IN_SIGNBITD64:
8335 case BUILT_IN_SIGNBITD128:
8336 target = expand_builtin_signbit (exp, target);
8337 if (target)
8338 return target;
8339 break;
8340
8341 /* Various hooks for the DWARF 2 __throw routine. */
8342 case BUILT_IN_UNWIND_INIT:
8343 expand_builtin_unwind_init ();
8344 return const0_rtx;
8345 case BUILT_IN_DWARF_CFA:
8346 return virtual_cfa_rtx;
8347 #ifdef DWARF2_UNWIND_INFO
8348 case BUILT_IN_DWARF_SP_COLUMN:
8349 return expand_builtin_dwarf_sp_column ();
8350 case BUILT_IN_INIT_DWARF_REG_SIZES:
8351 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
8352 return const0_rtx;
8353 #endif
8354 case BUILT_IN_FROB_RETURN_ADDR:
8355 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
8356 case BUILT_IN_EXTRACT_RETURN_ADDR:
8357 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
8358 case BUILT_IN_EH_RETURN:
8359 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
8360 CALL_EXPR_ARG (exp, 1));
8361 return const0_rtx;
8362 case BUILT_IN_EH_RETURN_DATA_REGNO:
8363 return expand_builtin_eh_return_data_regno (exp);
8364 case BUILT_IN_EXTEND_POINTER:
8365 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
8366 case BUILT_IN_EH_POINTER:
8367 return expand_builtin_eh_pointer (exp);
8368 case BUILT_IN_EH_FILTER:
8369 return expand_builtin_eh_filter (exp);
8370 case BUILT_IN_EH_COPY_VALUES:
8371 return expand_builtin_eh_copy_values (exp);
8372
8373 case BUILT_IN_VA_START:
8374 return expand_builtin_va_start (exp);
8375 case BUILT_IN_VA_END:
8376 return expand_builtin_va_end (exp);
8377 case BUILT_IN_VA_COPY:
8378 return expand_builtin_va_copy (exp);
8379 case BUILT_IN_EXPECT:
8380 return expand_builtin_expect (exp, target);
8381 case BUILT_IN_EXPECT_WITH_PROBABILITY:
8382 return expand_builtin_expect_with_probability (exp, target);
8383 case BUILT_IN_ASSUME_ALIGNED:
8384 return expand_builtin_assume_aligned (exp, target);
8385 case BUILT_IN_PREFETCH:
8386 expand_builtin_prefetch (exp);
8387 return const0_rtx;
8388
8389 case BUILT_IN_INIT_TRAMPOLINE:
8390 return expand_builtin_init_trampoline (exp, true);
8391 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
8392 return expand_builtin_init_trampoline (exp, false);
8393 case BUILT_IN_ADJUST_TRAMPOLINE:
8394 return expand_builtin_adjust_trampoline (exp);
8395
8396 case BUILT_IN_INIT_DESCRIPTOR:
8397 return expand_builtin_init_descriptor (exp);
8398 case BUILT_IN_ADJUST_DESCRIPTOR:
8399 return expand_builtin_adjust_descriptor (exp);
8400
8401 case BUILT_IN_FORK:
8402 case BUILT_IN_EXECL:
8403 case BUILT_IN_EXECV:
8404 case BUILT_IN_EXECLP:
8405 case BUILT_IN_EXECLE:
8406 case BUILT_IN_EXECVP:
8407 case BUILT_IN_EXECVE:
8408 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
8409 if (target)
8410 return target;
8411 break;
8412
8413 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
8414 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
8415 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
8416 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
8417 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
8418 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
8419 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
8420 if (target)
8421 return target;
8422 break;
8423
8424 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
8425 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
8426 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
8427 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
8428 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
8429 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
8430 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
8431 if (target)
8432 return target;
8433 break;
8434
8435 case BUILT_IN_SYNC_FETCH_AND_OR_1:
8436 case BUILT_IN_SYNC_FETCH_AND_OR_2:
8437 case BUILT_IN_SYNC_FETCH_AND_OR_4:
8438 case BUILT_IN_SYNC_FETCH_AND_OR_8:
8439 case BUILT_IN_SYNC_FETCH_AND_OR_16:
8440 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
8441 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
8442 if (target)
8443 return target;
8444 break;
8445
8446 case BUILT_IN_SYNC_FETCH_AND_AND_1:
8447 case BUILT_IN_SYNC_FETCH_AND_AND_2:
8448 case BUILT_IN_SYNC_FETCH_AND_AND_4:
8449 case BUILT_IN_SYNC_FETCH_AND_AND_8:
8450 case BUILT_IN_SYNC_FETCH_AND_AND_16:
8451 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
8452 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
8453 if (target)
8454 return target;
8455 break;
8456
8457 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
8458 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
8459 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
8460 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
8461 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
8462 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
8463 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
8464 if (target)
8465 return target;
8466 break;
8467
8468 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8469 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8470 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8471 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8472 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8473 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
8474 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
8475 if (target)
8476 return target;
8477 break;
8478
8479 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
8480 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
8481 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
8482 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
8483 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
8484 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
8485 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
8486 if (target)
8487 return target;
8488 break;
8489
8490 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
8491 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
8492 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
8493 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
8494 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
8495 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
8496 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
8497 if (target)
8498 return target;
8499 break;
8500
8501 case BUILT_IN_SYNC_OR_AND_FETCH_1:
8502 case BUILT_IN_SYNC_OR_AND_FETCH_2:
8503 case BUILT_IN_SYNC_OR_AND_FETCH_4:
8504 case BUILT_IN_SYNC_OR_AND_FETCH_8:
8505 case BUILT_IN_SYNC_OR_AND_FETCH_16:
8506 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
8507 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
8508 if (target)
8509 return target;
8510 break;
8511
8512 case BUILT_IN_SYNC_AND_AND_FETCH_1:
8513 case BUILT_IN_SYNC_AND_AND_FETCH_2:
8514 case BUILT_IN_SYNC_AND_AND_FETCH_4:
8515 case BUILT_IN_SYNC_AND_AND_FETCH_8:
8516 case BUILT_IN_SYNC_AND_AND_FETCH_16:
8517 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
8518 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
8519 if (target)
8520 return target;
8521 break;
8522
8523 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
8524 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
8525 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
8526 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
8527 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
8528 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
8529 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
8530 if (target)
8531 return target;
8532 break;
8533
8534 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8535 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8536 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8537 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8538 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8539 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
8540 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
8541 if (target)
8542 return target;
8543 break;
8544
8545 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
8546 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
8547 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
8548 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
8549 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
8550 if (mode == VOIDmode)
8551 mode = TYPE_MODE (boolean_type_node);
8552 if (!target || !register_operand (target, mode))
8553 target = gen_reg_rtx (mode);
8554
8555 mode = get_builtin_sync_mode
8556 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
8557 target = expand_builtin_compare_and_swap (mode, exp, true, target);
8558 if (target)
8559 return target;
8560 break;
8561
8562 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
8563 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
8564 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
8565 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
8566 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
8567 mode = get_builtin_sync_mode
8568 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8569 target = expand_builtin_compare_and_swap (mode, exp, false, target);
8570 if (target)
8571 return target;
8572 break;
8573
8574 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8575 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8576 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8577 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8578 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8579 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8580 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8581 if (target)
8582 return target;
8583 break;
8584
8585 case BUILT_IN_SYNC_LOCK_RELEASE_1:
8586 case BUILT_IN_SYNC_LOCK_RELEASE_2:
8587 case BUILT_IN_SYNC_LOCK_RELEASE_4:
8588 case BUILT_IN_SYNC_LOCK_RELEASE_8:
8589 case BUILT_IN_SYNC_LOCK_RELEASE_16:
8590 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8591 expand_builtin_sync_lock_release (mode, exp);
8592 return const0_rtx;
8593
8594 case BUILT_IN_SYNC_SYNCHRONIZE:
8595 expand_builtin_sync_synchronize ();
8596 return const0_rtx;
8597
8598 case BUILT_IN_ATOMIC_EXCHANGE_1:
8599 case BUILT_IN_ATOMIC_EXCHANGE_2:
8600 case BUILT_IN_ATOMIC_EXCHANGE_4:
8601 case BUILT_IN_ATOMIC_EXCHANGE_8:
8602 case BUILT_IN_ATOMIC_EXCHANGE_16:
8603 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8604 target = expand_builtin_atomic_exchange (mode, exp, target);
8605 if (target)
8606 return target;
8607 break;
8608
8609 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8610 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8611 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8612 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8613 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8614 {
8615 unsigned int nargs, z;
8616 vec<tree, va_gc> *vec;
8617
8618 mode =
8619 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8620 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8621 if (target)
8622 return target;
8623
8624 /* If this is turned into an external library call, the weak parameter
8625 must be dropped to match the expected parameter list. */
8626 nargs = call_expr_nargs (exp);
8627 vec_alloc (vec, nargs - 1);
8628 for (z = 0; z < 3; z++)
8629 vec->quick_push (CALL_EXPR_ARG (exp, z));
8630 /* Skip the boolean weak parameter. */
8631 for (z = 4; z < 6; z++)
8632 vec->quick_push (CALL_EXPR_ARG (exp, z));
8633 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8634 break;
8635 }
8636
8637 case BUILT_IN_ATOMIC_LOAD_1:
8638 case BUILT_IN_ATOMIC_LOAD_2:
8639 case BUILT_IN_ATOMIC_LOAD_4:
8640 case BUILT_IN_ATOMIC_LOAD_8:
8641 case BUILT_IN_ATOMIC_LOAD_16:
8642 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8643 target = expand_builtin_atomic_load (mode, exp, target);
8644 if (target)
8645 return target;
8646 break;
8647
8648 case BUILT_IN_ATOMIC_STORE_1:
8649 case BUILT_IN_ATOMIC_STORE_2:
8650 case BUILT_IN_ATOMIC_STORE_4:
8651 case BUILT_IN_ATOMIC_STORE_8:
8652 case BUILT_IN_ATOMIC_STORE_16:
8653 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8654 target = expand_builtin_atomic_store (mode, exp);
8655 if (target)
8656 return const0_rtx;
8657 break;
8658
8659 case BUILT_IN_ATOMIC_ADD_FETCH_1:
8660 case BUILT_IN_ATOMIC_ADD_FETCH_2:
8661 case BUILT_IN_ATOMIC_ADD_FETCH_4:
8662 case BUILT_IN_ATOMIC_ADD_FETCH_8:
8663 case BUILT_IN_ATOMIC_ADD_FETCH_16:
8664 {
8665 enum built_in_function lib;
8666 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8667 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8668 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8669 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8670 ignore, lib);
8671 if (target)
8672 return target;
8673 break;
8674 }
8675 case BUILT_IN_ATOMIC_SUB_FETCH_1:
8676 case BUILT_IN_ATOMIC_SUB_FETCH_2:
8677 case BUILT_IN_ATOMIC_SUB_FETCH_4:
8678 case BUILT_IN_ATOMIC_SUB_FETCH_8:
8679 case BUILT_IN_ATOMIC_SUB_FETCH_16:
8680 {
8681 enum built_in_function lib;
8682 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8683 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8684 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8685 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8686 ignore, lib);
8687 if (target)
8688 return target;
8689 break;
8690 }
8691 case BUILT_IN_ATOMIC_AND_FETCH_1:
8692 case BUILT_IN_ATOMIC_AND_FETCH_2:
8693 case BUILT_IN_ATOMIC_AND_FETCH_4:
8694 case BUILT_IN_ATOMIC_AND_FETCH_8:
8695 case BUILT_IN_ATOMIC_AND_FETCH_16:
8696 {
8697 enum built_in_function lib;
8698 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8699 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8700 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8701 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8702 ignore, lib);
8703 if (target)
8704 return target;
8705 break;
8706 }
8707 case BUILT_IN_ATOMIC_NAND_FETCH_1:
8708 case BUILT_IN_ATOMIC_NAND_FETCH_2:
8709 case BUILT_IN_ATOMIC_NAND_FETCH_4:
8710 case BUILT_IN_ATOMIC_NAND_FETCH_8:
8711 case BUILT_IN_ATOMIC_NAND_FETCH_16:
8712 {
8713 enum built_in_function lib;
8714 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8715 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8716 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8717 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8718 ignore, lib);
8719 if (target)
8720 return target;
8721 break;
8722 }
8723 case BUILT_IN_ATOMIC_XOR_FETCH_1:
8724 case BUILT_IN_ATOMIC_XOR_FETCH_2:
8725 case BUILT_IN_ATOMIC_XOR_FETCH_4:
8726 case BUILT_IN_ATOMIC_XOR_FETCH_8:
8727 case BUILT_IN_ATOMIC_XOR_FETCH_16:
8728 {
8729 enum built_in_function lib;
8730 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8731 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8732 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8733 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8734 ignore, lib);
8735 if (target)
8736 return target;
8737 break;
8738 }
8739 case BUILT_IN_ATOMIC_OR_FETCH_1:
8740 case BUILT_IN_ATOMIC_OR_FETCH_2:
8741 case BUILT_IN_ATOMIC_OR_FETCH_4:
8742 case BUILT_IN_ATOMIC_OR_FETCH_8:
8743 case BUILT_IN_ATOMIC_OR_FETCH_16:
8744 {
8745 enum built_in_function lib;
8746 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8747 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8748 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8749 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8750 ignore, lib);
8751 if (target)
8752 return target;
8753 break;
8754 }
8755 case BUILT_IN_ATOMIC_FETCH_ADD_1:
8756 case BUILT_IN_ATOMIC_FETCH_ADD_2:
8757 case BUILT_IN_ATOMIC_FETCH_ADD_4:
8758 case BUILT_IN_ATOMIC_FETCH_ADD_8:
8759 case BUILT_IN_ATOMIC_FETCH_ADD_16:
8760 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8761 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8762 ignore, BUILT_IN_NONE);
8763 if (target)
8764 return target;
8765 break;
8766
8767 case BUILT_IN_ATOMIC_FETCH_SUB_1:
8768 case BUILT_IN_ATOMIC_FETCH_SUB_2:
8769 case BUILT_IN_ATOMIC_FETCH_SUB_4:
8770 case BUILT_IN_ATOMIC_FETCH_SUB_8:
8771 case BUILT_IN_ATOMIC_FETCH_SUB_16:
8772 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8773 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8774 ignore, BUILT_IN_NONE);
8775 if (target)
8776 return target;
8777 break;
8778
8779 case BUILT_IN_ATOMIC_FETCH_AND_1:
8780 case BUILT_IN_ATOMIC_FETCH_AND_2:
8781 case BUILT_IN_ATOMIC_FETCH_AND_4:
8782 case BUILT_IN_ATOMIC_FETCH_AND_8:
8783 case BUILT_IN_ATOMIC_FETCH_AND_16:
8784 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8785 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8786 ignore, BUILT_IN_NONE);
8787 if (target)
8788 return target;
8789 break;
8790
8791 case BUILT_IN_ATOMIC_FETCH_NAND_1:
8792 case BUILT_IN_ATOMIC_FETCH_NAND_2:
8793 case BUILT_IN_ATOMIC_FETCH_NAND_4:
8794 case BUILT_IN_ATOMIC_FETCH_NAND_8:
8795 case BUILT_IN_ATOMIC_FETCH_NAND_16:
8796 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8797 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8798 ignore, BUILT_IN_NONE);
8799 if (target)
8800 return target;
8801 break;
8802
8803 case BUILT_IN_ATOMIC_FETCH_XOR_1:
8804 case BUILT_IN_ATOMIC_FETCH_XOR_2:
8805 case BUILT_IN_ATOMIC_FETCH_XOR_4:
8806 case BUILT_IN_ATOMIC_FETCH_XOR_8:
8807 case BUILT_IN_ATOMIC_FETCH_XOR_16:
8808 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8809 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8810 ignore, BUILT_IN_NONE);
8811 if (target)
8812 return target;
8813 break;
8814
8815 case BUILT_IN_ATOMIC_FETCH_OR_1:
8816 case BUILT_IN_ATOMIC_FETCH_OR_2:
8817 case BUILT_IN_ATOMIC_FETCH_OR_4:
8818 case BUILT_IN_ATOMIC_FETCH_OR_8:
8819 case BUILT_IN_ATOMIC_FETCH_OR_16:
8820 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8821 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8822 ignore, BUILT_IN_NONE);
8823 if (target)
8824 return target;
8825 break;
8826
8827 case BUILT_IN_ATOMIC_TEST_AND_SET:
8828 return expand_builtin_atomic_test_and_set (exp, target);
8829
8830 case BUILT_IN_ATOMIC_CLEAR:
8831 return expand_builtin_atomic_clear (exp);
8832
8833 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8834 return expand_builtin_atomic_always_lock_free (exp);
8835
8836 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8837 target = expand_builtin_atomic_is_lock_free (exp);
8838 if (target)
8839 return target;
8840 break;
8841
8842 case BUILT_IN_ATOMIC_THREAD_FENCE:
8843 expand_builtin_atomic_thread_fence (exp);
8844 return const0_rtx;
8845
8846 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8847 expand_builtin_atomic_signal_fence (exp);
8848 return const0_rtx;
8849
8850 case BUILT_IN_OBJECT_SIZE:
8851 return expand_builtin_object_size (exp);
8852
8853 case BUILT_IN_MEMCPY_CHK:
8854 case BUILT_IN_MEMPCPY_CHK:
8855 case BUILT_IN_MEMMOVE_CHK:
8856 case BUILT_IN_MEMSET_CHK:
8857 target = expand_builtin_memory_chk (exp, target, mode, fcode);
8858 if (target)
8859 return target;
8860 break;
8861
8862 case BUILT_IN_STRCPY_CHK:
8863 case BUILT_IN_STPCPY_CHK:
8864 case BUILT_IN_STRNCPY_CHK:
8865 case BUILT_IN_STPNCPY_CHK:
8866 case BUILT_IN_STRCAT_CHK:
8867 case BUILT_IN_STRNCAT_CHK:
8868 case BUILT_IN_SNPRINTF_CHK:
8869 case BUILT_IN_VSNPRINTF_CHK:
8870 maybe_emit_chk_warning (exp, fcode);
8871 break;
8872
8873 case BUILT_IN_SPRINTF_CHK:
8874 case BUILT_IN_VSPRINTF_CHK:
8875 maybe_emit_sprintf_chk_warning (exp, fcode);
8876 break;
8877
8878 case BUILT_IN_FREE:
8879 if (warn_free_nonheap_object)
8880 maybe_emit_free_warning (exp);
8881 break;
8882
8883 case BUILT_IN_THREAD_POINTER:
8884 return expand_builtin_thread_pointer (exp, target);
8885
8886 case BUILT_IN_SET_THREAD_POINTER:
8887 expand_builtin_set_thread_pointer (exp);
8888 return const0_rtx;
8889
8890 case BUILT_IN_ACC_ON_DEVICE:
8891 /* Do library call, if we failed to expand the builtin when
8892 folding. */
8893 break;
8894
8895 case BUILT_IN_GOACC_PARLEVEL_ID:
8896 case BUILT_IN_GOACC_PARLEVEL_SIZE:
8897 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8898
8899 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8900 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8901
8902 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8903 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8904 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8905 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8906 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8907 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8908 return expand_speculation_safe_value (mode, exp, target, ignore);
8909
8910 default: /* just do library call, if unknown builtin */
8911 break;
8912 }
8913
8914 /* The switch statement above can drop through to cause the function
8915 to be called normally. */
8916 return expand_call (exp, target, ignore);
8917 }
8918
8919 /* Determine whether a tree node represents a call to a built-in
8920 function. If the tree T is a call to a built-in function with
8921 the right number of arguments of the appropriate types, return
8922 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8923 Otherwise the return value is END_BUILTINS. */
8924
8925 enum built_in_function
builtin_mathfn_code(const_tree t)8926 builtin_mathfn_code (const_tree t)
8927 {
8928 const_tree fndecl, arg, parmlist;
8929 const_tree argtype, parmtype;
8930 const_call_expr_arg_iterator iter;
8931
8932 if (TREE_CODE (t) != CALL_EXPR)
8933 return END_BUILTINS;
8934
8935 fndecl = get_callee_fndecl (t);
8936 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8937 return END_BUILTINS;
8938
8939 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8940 init_const_call_expr_arg_iterator (t, &iter);
8941 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8942 {
8943 /* If a function doesn't take a variable number of arguments,
8944 the last element in the list will have type `void'. */
8945 parmtype = TREE_VALUE (parmlist);
8946 if (VOID_TYPE_P (parmtype))
8947 {
8948 if (more_const_call_expr_args_p (&iter))
8949 return END_BUILTINS;
8950 return DECL_FUNCTION_CODE (fndecl);
8951 }
8952
8953 if (! more_const_call_expr_args_p (&iter))
8954 return END_BUILTINS;
8955
8956 arg = next_const_call_expr_arg (&iter);
8957 argtype = TREE_TYPE (arg);
8958
8959 if (SCALAR_FLOAT_TYPE_P (parmtype))
8960 {
8961 if (! SCALAR_FLOAT_TYPE_P (argtype))
8962 return END_BUILTINS;
8963 }
8964 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8965 {
8966 if (! COMPLEX_FLOAT_TYPE_P (argtype))
8967 return END_BUILTINS;
8968 }
8969 else if (POINTER_TYPE_P (parmtype))
8970 {
8971 if (! POINTER_TYPE_P (argtype))
8972 return END_BUILTINS;
8973 }
8974 else if (INTEGRAL_TYPE_P (parmtype))
8975 {
8976 if (! INTEGRAL_TYPE_P (argtype))
8977 return END_BUILTINS;
8978 }
8979 else
8980 return END_BUILTINS;
8981 }
8982
8983 /* Variable-length argument list. */
8984 return DECL_FUNCTION_CODE (fndecl);
8985 }
8986
8987 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8988 evaluate to a constant. */
8989
8990 static tree
fold_builtin_constant_p(tree arg)8991 fold_builtin_constant_p (tree arg)
8992 {
8993 /* We return 1 for a numeric type that's known to be a constant
8994 value at compile-time or for an aggregate type that's a
8995 literal constant. */
8996 STRIP_NOPS (arg);
8997
8998 /* If we know this is a constant, emit the constant of one. */
8999 if (CONSTANT_CLASS_P (arg)
9000 || (TREE_CODE (arg) == CONSTRUCTOR
9001 && TREE_CONSTANT (arg)))
9002 return integer_one_node;
9003 if (TREE_CODE (arg) == ADDR_EXPR)
9004 {
9005 tree op = TREE_OPERAND (arg, 0);
9006 if (TREE_CODE (op) == STRING_CST
9007 || (TREE_CODE (op) == ARRAY_REF
9008 && integer_zerop (TREE_OPERAND (op, 1))
9009 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
9010 return integer_one_node;
9011 }
9012
9013 /* If this expression has side effects, show we don't know it to be a
9014 constant. Likewise if it's a pointer or aggregate type since in
9015 those case we only want literals, since those are only optimized
9016 when generating RTL, not later.
9017 And finally, if we are compiling an initializer, not code, we
9018 need to return a definite result now; there's not going to be any
9019 more optimization done. */
9020 if (TREE_SIDE_EFFECTS (arg)
9021 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9022 || POINTER_TYPE_P (TREE_TYPE (arg))
9023 || cfun == 0
9024 || folding_initializer
9025 || force_folding_builtin_constant_p)
9026 return integer_zero_node;
9027
9028 return NULL_TREE;
9029 }
9030
9031 /* Create builtin_expect or builtin_expect_with_probability
9032 with PRED and EXPECTED as its arguments and return it as a truthvalue.
9033 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
9034 builtin_expect_with_probability instead uses third argument as PROBABILITY
9035 value. */
9036
9037 static tree
build_builtin_expect_predicate(location_t loc,tree pred,tree expected,tree predictor,tree probability)9038 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
9039 tree predictor, tree probability)
9040 {
9041 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
9042
9043 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
9044 : BUILT_IN_EXPECT_WITH_PROBABILITY);
9045 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
9046 ret_type = TREE_TYPE (TREE_TYPE (fn));
9047 pred_type = TREE_VALUE (arg_types);
9048 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
9049
9050 pred = fold_convert_loc (loc, pred_type, pred);
9051 expected = fold_convert_loc (loc, expected_type, expected);
9052
9053 if (probability)
9054 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
9055 else
9056 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
9057 predictor);
9058
9059 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
9060 build_int_cst (ret_type, 0));
9061 }
9062
9063 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
9064 NULL_TREE if no simplification is possible. */
9065
9066 tree
fold_builtin_expect(location_t loc,tree arg0,tree arg1,tree arg2,tree arg3)9067 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
9068 tree arg3)
9069 {
9070 tree inner, fndecl, inner_arg0;
9071 enum tree_code code;
9072
9073 /* Distribute the expected value over short-circuiting operators.
9074 See through the cast from truthvalue_type_node to long. */
9075 inner_arg0 = arg0;
9076 while (CONVERT_EXPR_P (inner_arg0)
9077 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
9078 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
9079 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
9080
9081 /* If this is a builtin_expect within a builtin_expect keep the
9082 inner one. See through a comparison against a constant. It
9083 might have been added to create a thruthvalue. */
9084 inner = inner_arg0;
9085
9086 if (COMPARISON_CLASS_P (inner)
9087 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
9088 inner = TREE_OPERAND (inner, 0);
9089
9090 if (TREE_CODE (inner) == CALL_EXPR
9091 && (fndecl = get_callee_fndecl (inner))
9092 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
9093 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
9094 return arg0;
9095
9096 inner = inner_arg0;
9097 code = TREE_CODE (inner);
9098 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
9099 {
9100 tree op0 = TREE_OPERAND (inner, 0);
9101 tree op1 = TREE_OPERAND (inner, 1);
9102 arg1 = save_expr (arg1);
9103
9104 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
9105 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
9106 inner = build2 (code, TREE_TYPE (inner), op0, op1);
9107
9108 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
9109 }
9110
9111 /* If the argument isn't invariant then there's nothing else we can do. */
9112 if (!TREE_CONSTANT (inner_arg0))
9113 return NULL_TREE;
9114
9115 /* If we expect that a comparison against the argument will fold to
9116 a constant return the constant. In practice, this means a true
9117 constant or the address of a non-weak symbol. */
9118 inner = inner_arg0;
9119 STRIP_NOPS (inner);
9120 if (TREE_CODE (inner) == ADDR_EXPR)
9121 {
9122 do
9123 {
9124 inner = TREE_OPERAND (inner, 0);
9125 }
9126 while (TREE_CODE (inner) == COMPONENT_REF
9127 || TREE_CODE (inner) == ARRAY_REF);
9128 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
9129 return NULL_TREE;
9130 }
9131
9132 /* Otherwise, ARG0 already has the proper type for the return value. */
9133 return arg0;
9134 }
9135
9136 /* Fold a call to __builtin_classify_type with argument ARG. */
9137
9138 static tree
fold_builtin_classify_type(tree arg)9139 fold_builtin_classify_type (tree arg)
9140 {
9141 if (arg == 0)
9142 return build_int_cst (integer_type_node, no_type_class);
9143
9144 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
9145 }
9146
9147 /* Fold a call to __builtin_strlen with argument ARG. */
9148
9149 static tree
fold_builtin_strlen(location_t loc,tree type,tree arg)9150 fold_builtin_strlen (location_t loc, tree type, tree arg)
9151 {
9152 if (!validate_arg (arg, POINTER_TYPE))
9153 return NULL_TREE;
9154 else
9155 {
9156 c_strlen_data lendata = { };
9157 tree len = c_strlen (arg, 0, &lendata);
9158
9159 if (len)
9160 return fold_convert_loc (loc, type, len);
9161
9162 if (!lendata.decl)
9163 c_strlen (arg, 1, &lendata);
9164
9165 if (lendata.decl)
9166 {
9167 if (EXPR_HAS_LOCATION (arg))
9168 loc = EXPR_LOCATION (arg);
9169 else if (loc == UNKNOWN_LOCATION)
9170 loc = input_location;
9171 warn_string_no_nul (loc, "strlen", arg, lendata.decl);
9172 }
9173
9174 return NULL_TREE;
9175 }
9176 }
9177
9178 /* Fold a call to __builtin_inf or __builtin_huge_val. */
9179
9180 static tree
fold_builtin_inf(location_t loc,tree type,int warn)9181 fold_builtin_inf (location_t loc, tree type, int warn)
9182 {
9183 REAL_VALUE_TYPE real;
9184
9185 /* __builtin_inff is intended to be usable to define INFINITY on all
9186 targets. If an infinity is not available, INFINITY expands "to a
9187 positive constant of type float that overflows at translation
9188 time", footnote "In this case, using INFINITY will violate the
9189 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
9190 Thus we pedwarn to ensure this constraint violation is
9191 diagnosed. */
9192 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
9193 pedwarn (loc, 0, "target format does not support infinity");
9194
9195 real_inf (&real);
9196 return build_real (type, real);
9197 }
9198
9199 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
9200 NULL_TREE if no simplification can be made. */
9201
9202 static tree
fold_builtin_sincos(location_t loc,tree arg0,tree arg1,tree arg2)9203 fold_builtin_sincos (location_t loc,
9204 tree arg0, tree arg1, tree arg2)
9205 {
9206 tree type;
9207 tree fndecl, call = NULL_TREE;
9208
9209 if (!validate_arg (arg0, REAL_TYPE)
9210 || !validate_arg (arg1, POINTER_TYPE)
9211 || !validate_arg (arg2, POINTER_TYPE))
9212 return NULL_TREE;
9213
9214 type = TREE_TYPE (arg0);
9215
9216 /* Calculate the result when the argument is a constant. */
9217 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
9218 if (fn == END_BUILTINS)
9219 return NULL_TREE;
9220
9221 /* Canonicalize sincos to cexpi. */
9222 if (TREE_CODE (arg0) == REAL_CST)
9223 {
9224 tree complex_type = build_complex_type (type);
9225 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
9226 }
9227 if (!call)
9228 {
9229 if (!targetm.libc_has_function (function_c99_math_complex)
9230 || !builtin_decl_implicit_p (fn))
9231 return NULL_TREE;
9232 fndecl = builtin_decl_explicit (fn);
9233 call = build_call_expr_loc (loc, fndecl, 1, arg0);
9234 call = builtin_save_expr (call);
9235 }
9236
9237 tree ptype = build_pointer_type (type);
9238 arg1 = fold_convert (ptype, arg1);
9239 arg2 = fold_convert (ptype, arg2);
9240 return build2 (COMPOUND_EXPR, void_type_node,
9241 build2 (MODIFY_EXPR, void_type_node,
9242 build_fold_indirect_ref_loc (loc, arg1),
9243 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
9244 build2 (MODIFY_EXPR, void_type_node,
9245 build_fold_indirect_ref_loc (loc, arg2),
9246 fold_build1_loc (loc, REALPART_EXPR, type, call)));
9247 }
9248
9249 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
9250 Return NULL_TREE if no simplification can be made. */
9251
9252 static tree
fold_builtin_memcmp(location_t loc,tree arg1,tree arg2,tree len)9253 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
9254 {
9255 if (!validate_arg (arg1, POINTER_TYPE)
9256 || !validate_arg (arg2, POINTER_TYPE)
9257 || !validate_arg (len, INTEGER_TYPE))
9258 return NULL_TREE;
9259
9260 /* If the LEN parameter is zero, return zero. */
9261 if (integer_zerop (len))
9262 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
9263 arg1, arg2);
9264
9265 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
9266 if (operand_equal_p (arg1, arg2, 0))
9267 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
9268
9269 /* If len parameter is one, return an expression corresponding to
9270 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
9271 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
9272 {
9273 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
9274 tree cst_uchar_ptr_node
9275 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
9276
9277 tree ind1
9278 = fold_convert_loc (loc, integer_type_node,
9279 build1 (INDIRECT_REF, cst_uchar_node,
9280 fold_convert_loc (loc,
9281 cst_uchar_ptr_node,
9282 arg1)));
9283 tree ind2
9284 = fold_convert_loc (loc, integer_type_node,
9285 build1 (INDIRECT_REF, cst_uchar_node,
9286 fold_convert_loc (loc,
9287 cst_uchar_ptr_node,
9288 arg2)));
9289 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
9290 }
9291
9292 return NULL_TREE;
9293 }
9294
9295 /* Fold a call to builtin isascii with argument ARG. */
9296
9297 static tree
fold_builtin_isascii(location_t loc,tree arg)9298 fold_builtin_isascii (location_t loc, tree arg)
9299 {
9300 if (!validate_arg (arg, INTEGER_TYPE))
9301 return NULL_TREE;
9302 else
9303 {
9304 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
9305 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
9306 build_int_cst (integer_type_node,
9307 ~ (unsigned HOST_WIDE_INT) 0x7f));
9308 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
9309 arg, integer_zero_node);
9310 }
9311 }
9312
9313 /* Fold a call to builtin toascii with argument ARG. */
9314
9315 static tree
fold_builtin_toascii(location_t loc,tree arg)9316 fold_builtin_toascii (location_t loc, tree arg)
9317 {
9318 if (!validate_arg (arg, INTEGER_TYPE))
9319 return NULL_TREE;
9320
9321 /* Transform toascii(c) -> (c & 0x7f). */
9322 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
9323 build_int_cst (integer_type_node, 0x7f));
9324 }
9325
9326 /* Fold a call to builtin isdigit with argument ARG. */
9327
9328 static tree
fold_builtin_isdigit(location_t loc,tree arg)9329 fold_builtin_isdigit (location_t loc, tree arg)
9330 {
9331 if (!validate_arg (arg, INTEGER_TYPE))
9332 return NULL_TREE;
9333 else
9334 {
9335 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
9336 /* According to the C standard, isdigit is unaffected by locale.
9337 However, it definitely is affected by the target character set. */
9338 unsigned HOST_WIDE_INT target_digit0
9339 = lang_hooks.to_target_charset ('0');
9340
9341 if (target_digit0 == 0)
9342 return NULL_TREE;
9343
9344 arg = fold_convert_loc (loc, unsigned_type_node, arg);
9345 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
9346 build_int_cst (unsigned_type_node, target_digit0));
9347 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
9348 build_int_cst (unsigned_type_node, 9));
9349 }
9350 }
9351
9352 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
9353
9354 static tree
fold_builtin_fabs(location_t loc,tree arg,tree type)9355 fold_builtin_fabs (location_t loc, tree arg, tree type)
9356 {
9357 if (!validate_arg (arg, REAL_TYPE))
9358 return NULL_TREE;
9359
9360 arg = fold_convert_loc (loc, type, arg);
9361 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9362 }
9363
9364 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
9365
9366 static tree
fold_builtin_abs(location_t loc,tree arg,tree type)9367 fold_builtin_abs (location_t loc, tree arg, tree type)
9368 {
9369 if (!validate_arg (arg, INTEGER_TYPE))
9370 return NULL_TREE;
9371
9372 arg = fold_convert_loc (loc, type, arg);
9373 return fold_build1_loc (loc, ABS_EXPR, type, arg);
9374 }
9375
9376 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
9377
9378 static tree
fold_builtin_carg(location_t loc,tree arg,tree type)9379 fold_builtin_carg (location_t loc, tree arg, tree type)
9380 {
9381 if (validate_arg (arg, COMPLEX_TYPE)
9382 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
9383 {
9384 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
9385
9386 if (atan2_fn)
9387 {
9388 tree new_arg = builtin_save_expr (arg);
9389 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
9390 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
9391 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
9392 }
9393 }
9394
9395 return NULL_TREE;
9396 }
9397
9398 /* Fold a call to builtin frexp, we can assume the base is 2. */
9399
9400 static tree
fold_builtin_frexp(location_t loc,tree arg0,tree arg1,tree rettype)9401 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
9402 {
9403 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9404 return NULL_TREE;
9405
9406 STRIP_NOPS (arg0);
9407
9408 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9409 return NULL_TREE;
9410
9411 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9412
9413 /* Proceed if a valid pointer type was passed in. */
9414 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
9415 {
9416 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9417 tree frac, exp;
9418
9419 switch (value->cl)
9420 {
9421 case rvc_zero:
9422 /* For +-0, return (*exp = 0, +-0). */
9423 exp = integer_zero_node;
9424 frac = arg0;
9425 break;
9426 case rvc_nan:
9427 case rvc_inf:
9428 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
9429 return omit_one_operand_loc (loc, rettype, arg0, arg1);
9430 case rvc_normal:
9431 {
9432 /* Since the frexp function always expects base 2, and in
9433 GCC normalized significands are already in the range
9434 [0.5, 1.0), we have exactly what frexp wants. */
9435 REAL_VALUE_TYPE frac_rvt = *value;
9436 SET_REAL_EXP (&frac_rvt, 0);
9437 frac = build_real (rettype, frac_rvt);
9438 exp = build_int_cst (integer_type_node, REAL_EXP (value));
9439 }
9440 break;
9441 default:
9442 gcc_unreachable ();
9443 }
9444
9445 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9446 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
9447 TREE_SIDE_EFFECTS (arg1) = 1;
9448 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
9449 }
9450
9451 return NULL_TREE;
9452 }
9453
9454 /* Fold a call to builtin modf. */
9455
9456 static tree
fold_builtin_modf(location_t loc,tree arg0,tree arg1,tree rettype)9457 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
9458 {
9459 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
9460 return NULL_TREE;
9461
9462 STRIP_NOPS (arg0);
9463
9464 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
9465 return NULL_TREE;
9466
9467 arg1 = build_fold_indirect_ref_loc (loc, arg1);
9468
9469 /* Proceed if a valid pointer type was passed in. */
9470 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
9471 {
9472 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
9473 REAL_VALUE_TYPE trunc, frac;
9474
9475 switch (value->cl)
9476 {
9477 case rvc_nan:
9478 case rvc_zero:
9479 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
9480 trunc = frac = *value;
9481 break;
9482 case rvc_inf:
9483 /* For +-Inf, return (*arg1 = arg0, +-0). */
9484 frac = dconst0;
9485 frac.sign = value->sign;
9486 trunc = *value;
9487 break;
9488 case rvc_normal:
9489 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
9490 real_trunc (&trunc, VOIDmode, value);
9491 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
9492 /* If the original number was negative and already
9493 integral, then the fractional part is -0.0. */
9494 if (value->sign && frac.cl == rvc_zero)
9495 frac.sign = value->sign;
9496 break;
9497 }
9498
9499 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
9500 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
9501 build_real (rettype, trunc));
9502 TREE_SIDE_EFFECTS (arg1) = 1;
9503 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
9504 build_real (rettype, frac));
9505 }
9506
9507 return NULL_TREE;
9508 }
9509
9510 /* Given a location LOC, an interclass builtin function decl FNDECL
9511 and its single argument ARG, return an folded expression computing
9512 the same, or NULL_TREE if we either couldn't or didn't want to fold
9513 (the latter happen if there's an RTL instruction available). */
9514
9515 static tree
fold_builtin_interclass_mathfn(location_t loc,tree fndecl,tree arg)9516 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
9517 {
9518 machine_mode mode;
9519
9520 if (!validate_arg (arg, REAL_TYPE))
9521 return NULL_TREE;
9522
9523 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
9524 return NULL_TREE;
9525
9526 mode = TYPE_MODE (TREE_TYPE (arg));
9527
9528 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
9529
9530 /* If there is no optab, try generic code. */
9531 switch (DECL_FUNCTION_CODE (fndecl))
9532 {
9533 tree result;
9534
9535 CASE_FLT_FN (BUILT_IN_ISINF):
9536 {
9537 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
9538 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9539 tree type = TREE_TYPE (arg);
9540 REAL_VALUE_TYPE r;
9541 char buf[128];
9542
9543 if (is_ibm_extended)
9544 {
9545 /* NaN and Inf are encoded in the high-order double value
9546 only. The low-order value is not significant. */
9547 type = double_type_node;
9548 mode = DFmode;
9549 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9550 }
9551 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9552 real_from_string (&r, buf);
9553 result = build_call_expr (isgr_fn, 2,
9554 fold_build1_loc (loc, ABS_EXPR, type, arg),
9555 build_real (type, r));
9556 return result;
9557 }
9558 CASE_FLT_FN (BUILT_IN_FINITE):
9559 case BUILT_IN_ISFINITE:
9560 {
9561 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
9562 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9563 tree type = TREE_TYPE (arg);
9564 REAL_VALUE_TYPE r;
9565 char buf[128];
9566
9567 if (is_ibm_extended)
9568 {
9569 /* NaN and Inf are encoded in the high-order double value
9570 only. The low-order value is not significant. */
9571 type = double_type_node;
9572 mode = DFmode;
9573 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9574 }
9575 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9576 real_from_string (&r, buf);
9577 result = build_call_expr (isle_fn, 2,
9578 fold_build1_loc (loc, ABS_EXPR, type, arg),
9579 build_real (type, r));
9580 /*result = fold_build2_loc (loc, UNGT_EXPR,
9581 TREE_TYPE (TREE_TYPE (fndecl)),
9582 fold_build1_loc (loc, ABS_EXPR, type, arg),
9583 build_real (type, r));
9584 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9585 TREE_TYPE (TREE_TYPE (fndecl)),
9586 result);*/
9587 return result;
9588 }
9589 case BUILT_IN_ISNORMAL:
9590 {
9591 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9592 islessequal(fabs(x),DBL_MAX). */
9593 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9594 tree type = TREE_TYPE (arg);
9595 tree orig_arg, max_exp, min_exp;
9596 machine_mode orig_mode = mode;
9597 REAL_VALUE_TYPE rmax, rmin;
9598 char buf[128];
9599
9600 orig_arg = arg = builtin_save_expr (arg);
9601 if (is_ibm_extended)
9602 {
9603 /* Use double to test the normal range of IBM extended
9604 precision. Emin for IBM extended precision is
9605 different to emin for IEEE double, being 53 higher
9606 since the low double exponent is at least 53 lower
9607 than the high double exponent. */
9608 type = double_type_node;
9609 mode = DFmode;
9610 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9611 }
9612 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9613
9614 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
9615 real_from_string (&rmax, buf);
9616 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9617 real_from_string (&rmin, buf);
9618 max_exp = build_real (type, rmax);
9619 min_exp = build_real (type, rmin);
9620
9621 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9622 if (is_ibm_extended)
9623 {
9624 /* Testing the high end of the range is done just using
9625 the high double, using the same test as isfinite().
9626 For the subnormal end of the range we first test the
9627 high double, then if its magnitude is equal to the
9628 limit of 0x1p-969, we test whether the low double is
9629 non-zero and opposite sign to the high double. */
9630 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9631 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9632 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9633 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9634 arg, min_exp);
9635 tree as_complex = build1 (VIEW_CONVERT_EXPR,
9636 complex_double_type_node, orig_arg);
9637 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9638 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9639 tree zero = build_real (type, dconst0);
9640 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9641 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9642 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9643 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9644 fold_build3 (COND_EXPR,
9645 integer_type_node,
9646 hilt, logt, lolt));
9647 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9648 eq_min, ok_lo);
9649 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9650 gt_min, eq_min);
9651 }
9652 else
9653 {
9654 tree const isge_fn
9655 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9656 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9657 }
9658 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9659 max_exp, min_exp);
9660 return result;
9661 }
9662 default:
9663 break;
9664 }
9665
9666 return NULL_TREE;
9667 }
9668
9669 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9670 ARG is the argument for the call. */
9671
9672 static tree
fold_builtin_classify(location_t loc,tree fndecl,tree arg,int builtin_index)9673 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9674 {
9675 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9676
9677 if (!validate_arg (arg, REAL_TYPE))
9678 return NULL_TREE;
9679
9680 switch (builtin_index)
9681 {
9682 case BUILT_IN_ISINF:
9683 if (!HONOR_INFINITIES (arg))
9684 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9685
9686 return NULL_TREE;
9687
9688 case BUILT_IN_ISINF_SIGN:
9689 {
9690 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9691 /* In a boolean context, GCC will fold the inner COND_EXPR to
9692 1. So e.g. "if (isinf_sign(x))" would be folded to just
9693 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9694 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9695 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9696 tree tmp = NULL_TREE;
9697
9698 arg = builtin_save_expr (arg);
9699
9700 if (signbit_fn && isinf_fn)
9701 {
9702 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9703 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9704
9705 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9706 signbit_call, integer_zero_node);
9707 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9708 isinf_call, integer_zero_node);
9709
9710 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9711 integer_minus_one_node, integer_one_node);
9712 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9713 isinf_call, tmp,
9714 integer_zero_node);
9715 }
9716
9717 return tmp;
9718 }
9719
9720 case BUILT_IN_ISFINITE:
9721 if (!HONOR_NANS (arg)
9722 && !HONOR_INFINITIES (arg))
9723 return omit_one_operand_loc (loc, type, integer_one_node, arg);
9724
9725 return NULL_TREE;
9726
9727 case BUILT_IN_ISNAN:
9728 if (!HONOR_NANS (arg))
9729 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9730
9731 {
9732 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9733 if (is_ibm_extended)
9734 {
9735 /* NaN and Inf are encoded in the high-order double value
9736 only. The low-order value is not significant. */
9737 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9738 }
9739 }
9740 arg = builtin_save_expr (arg);
9741 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9742
9743 default:
9744 gcc_unreachable ();
9745 }
9746 }
9747
9748 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9749 This builtin will generate code to return the appropriate floating
9750 point classification depending on the value of the floating point
9751 number passed in. The possible return values must be supplied as
9752 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9753 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
9754 one floating point argument which is "type generic". */
9755
9756 static tree
fold_builtin_fpclassify(location_t loc,tree * args,int nargs)9757 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9758 {
9759 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9760 arg, type, res, tmp;
9761 machine_mode mode;
9762 REAL_VALUE_TYPE r;
9763 char buf[128];
9764
9765 /* Verify the required arguments in the original call. */
9766 if (nargs != 6
9767 || !validate_arg (args[0], INTEGER_TYPE)
9768 || !validate_arg (args[1], INTEGER_TYPE)
9769 || !validate_arg (args[2], INTEGER_TYPE)
9770 || !validate_arg (args[3], INTEGER_TYPE)
9771 || !validate_arg (args[4], INTEGER_TYPE)
9772 || !validate_arg (args[5], REAL_TYPE))
9773 return NULL_TREE;
9774
9775 fp_nan = args[0];
9776 fp_infinite = args[1];
9777 fp_normal = args[2];
9778 fp_subnormal = args[3];
9779 fp_zero = args[4];
9780 arg = args[5];
9781 type = TREE_TYPE (arg);
9782 mode = TYPE_MODE (type);
9783 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9784
9785 /* fpclassify(x) ->
9786 isnan(x) ? FP_NAN :
9787 (fabs(x) == Inf ? FP_INFINITE :
9788 (fabs(x) >= DBL_MIN ? FP_NORMAL :
9789 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
9790
9791 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9792 build_real (type, dconst0));
9793 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9794 tmp, fp_zero, fp_subnormal);
9795
9796 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9797 real_from_string (&r, buf);
9798 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9799 arg, build_real (type, r));
9800 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9801
9802 if (HONOR_INFINITIES (mode))
9803 {
9804 real_inf (&r);
9805 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9806 build_real (type, r));
9807 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9808 fp_infinite, res);
9809 }
9810
9811 if (HONOR_NANS (mode))
9812 {
9813 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9814 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9815 }
9816
9817 return res;
9818 }
9819
9820 /* Fold a call to an unordered comparison function such as
9821 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
9822 being called and ARG0 and ARG1 are the arguments for the call.
9823 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9824 the opposite of the desired result. UNORDERED_CODE is used
9825 for modes that can hold NaNs and ORDERED_CODE is used for
9826 the rest. */
9827
9828 static tree
fold_builtin_unordered_cmp(location_t loc,tree fndecl,tree arg0,tree arg1,enum tree_code unordered_code,enum tree_code ordered_code)9829 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9830 enum tree_code unordered_code,
9831 enum tree_code ordered_code)
9832 {
9833 tree type = TREE_TYPE (TREE_TYPE (fndecl));
9834 enum tree_code code;
9835 tree type0, type1;
9836 enum tree_code code0, code1;
9837 tree cmp_type = NULL_TREE;
9838
9839 type0 = TREE_TYPE (arg0);
9840 type1 = TREE_TYPE (arg1);
9841
9842 code0 = TREE_CODE (type0);
9843 code1 = TREE_CODE (type1);
9844
9845 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9846 /* Choose the wider of two real types. */
9847 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9848 ? type0 : type1;
9849 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9850 cmp_type = type0;
9851 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9852 cmp_type = type1;
9853
9854 arg0 = fold_convert_loc (loc, cmp_type, arg0);
9855 arg1 = fold_convert_loc (loc, cmp_type, arg1);
9856
9857 if (unordered_code == UNORDERED_EXPR)
9858 {
9859 if (!HONOR_NANS (arg0))
9860 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9861 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9862 }
9863
9864 code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9865 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9866 fold_build2_loc (loc, code, type, arg0, arg1));
9867 }
9868
9869 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9870 arithmetics if it can never overflow, or into internal functions that
9871 return both result of arithmetics and overflowed boolean flag in
9872 a complex integer result, or some other check for overflow.
9873 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9874 checking part of that. */
9875
9876 static tree
fold_builtin_arith_overflow(location_t loc,enum built_in_function fcode,tree arg0,tree arg1,tree arg2)9877 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9878 tree arg0, tree arg1, tree arg2)
9879 {
9880 enum internal_fn ifn = IFN_LAST;
9881 /* The code of the expression corresponding to the built-in. */
9882 enum tree_code opcode = ERROR_MARK;
9883 bool ovf_only = false;
9884
9885 switch (fcode)
9886 {
9887 case BUILT_IN_ADD_OVERFLOW_P:
9888 ovf_only = true;
9889 /* FALLTHRU */
9890 case BUILT_IN_ADD_OVERFLOW:
9891 case BUILT_IN_SADD_OVERFLOW:
9892 case BUILT_IN_SADDL_OVERFLOW:
9893 case BUILT_IN_SADDLL_OVERFLOW:
9894 case BUILT_IN_UADD_OVERFLOW:
9895 case BUILT_IN_UADDL_OVERFLOW:
9896 case BUILT_IN_UADDLL_OVERFLOW:
9897 opcode = PLUS_EXPR;
9898 ifn = IFN_ADD_OVERFLOW;
9899 break;
9900 case BUILT_IN_SUB_OVERFLOW_P:
9901 ovf_only = true;
9902 /* FALLTHRU */
9903 case BUILT_IN_SUB_OVERFLOW:
9904 case BUILT_IN_SSUB_OVERFLOW:
9905 case BUILT_IN_SSUBL_OVERFLOW:
9906 case BUILT_IN_SSUBLL_OVERFLOW:
9907 case BUILT_IN_USUB_OVERFLOW:
9908 case BUILT_IN_USUBL_OVERFLOW:
9909 case BUILT_IN_USUBLL_OVERFLOW:
9910 opcode = MINUS_EXPR;
9911 ifn = IFN_SUB_OVERFLOW;
9912 break;
9913 case BUILT_IN_MUL_OVERFLOW_P:
9914 ovf_only = true;
9915 /* FALLTHRU */
9916 case BUILT_IN_MUL_OVERFLOW:
9917 case BUILT_IN_SMUL_OVERFLOW:
9918 case BUILT_IN_SMULL_OVERFLOW:
9919 case BUILT_IN_SMULLL_OVERFLOW:
9920 case BUILT_IN_UMUL_OVERFLOW:
9921 case BUILT_IN_UMULL_OVERFLOW:
9922 case BUILT_IN_UMULLL_OVERFLOW:
9923 opcode = MULT_EXPR;
9924 ifn = IFN_MUL_OVERFLOW;
9925 break;
9926 default:
9927 gcc_unreachable ();
9928 }
9929
9930 /* For the "generic" overloads, the first two arguments can have different
9931 types and the last argument determines the target type to use to check
9932 for overflow. The arguments of the other overloads all have the same
9933 type. */
9934 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9935
9936 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9937 arguments are constant, attempt to fold the built-in call into a constant
9938 expression indicating whether or not it detected an overflow. */
9939 if (ovf_only
9940 && TREE_CODE (arg0) == INTEGER_CST
9941 && TREE_CODE (arg1) == INTEGER_CST)
9942 /* Perform the computation in the target type and check for overflow. */
9943 return omit_one_operand_loc (loc, boolean_type_node,
9944 arith_overflowed_p (opcode, type, arg0, arg1)
9945 ? boolean_true_node : boolean_false_node,
9946 arg2);
9947
9948 tree intres, ovfres;
9949 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9950 {
9951 intres = fold_binary_loc (loc, opcode, type,
9952 fold_convert_loc (loc, type, arg0),
9953 fold_convert_loc (loc, type, arg1));
9954 if (TREE_OVERFLOW (intres))
9955 intres = drop_tree_overflow (intres);
9956 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9957 ? boolean_true_node : boolean_false_node);
9958 }
9959 else
9960 {
9961 tree ctype = build_complex_type (type);
9962 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9963 arg0, arg1);
9964 tree tgt = save_expr (call);
9965 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9966 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9967 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9968 }
9969
9970 if (ovf_only)
9971 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9972
9973 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9974 tree store
9975 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9976 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9977 }
9978
9979 /* Fold a call to __builtin_FILE to a constant string. */
9980
9981 static inline tree
fold_builtin_FILE(location_t loc)9982 fold_builtin_FILE (location_t loc)
9983 {
9984 if (const char *fname = LOCATION_FILE (loc))
9985 {
9986 /* The documentation says this builtin is equivalent to the preprocessor
9987 __FILE__ macro so it appears appropriate to use the same file prefix
9988 mappings. */
9989 fname = remap_macro_filename (fname);
9990 return build_string_literal (strlen (fname) + 1, fname);
9991 }
9992
9993 return build_string_literal (1, "");
9994 }
9995
9996 /* Fold a call to __builtin_FUNCTION to a constant string. */
9997
9998 static inline tree
fold_builtin_FUNCTION()9999 fold_builtin_FUNCTION ()
10000 {
10001 const char *name = "";
10002
10003 if (current_function_decl)
10004 name = lang_hooks.decl_printable_name (current_function_decl, 0);
10005
10006 return build_string_literal (strlen (name) + 1, name);
10007 }
10008
10009 /* Fold a call to __builtin_LINE to an integer constant. */
10010
10011 static inline tree
fold_builtin_LINE(location_t loc,tree type)10012 fold_builtin_LINE (location_t loc, tree type)
10013 {
10014 return build_int_cst (type, LOCATION_LINE (loc));
10015 }
10016
10017 /* Fold a call to built-in function FNDECL with 0 arguments.
10018 This function returns NULL_TREE if no simplification was possible. */
10019
10020 static tree
fold_builtin_0(location_t loc,tree fndecl)10021 fold_builtin_0 (location_t loc, tree fndecl)
10022 {
10023 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10024 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10025 switch (fcode)
10026 {
10027 case BUILT_IN_FILE:
10028 return fold_builtin_FILE (loc);
10029
10030 case BUILT_IN_FUNCTION:
10031 return fold_builtin_FUNCTION ();
10032
10033 case BUILT_IN_LINE:
10034 return fold_builtin_LINE (loc, type);
10035
10036 CASE_FLT_FN (BUILT_IN_INF):
10037 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
10038 case BUILT_IN_INFD32:
10039 case BUILT_IN_INFD64:
10040 case BUILT_IN_INFD128:
10041 return fold_builtin_inf (loc, type, true);
10042
10043 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
10044 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
10045 return fold_builtin_inf (loc, type, false);
10046
10047 case BUILT_IN_CLASSIFY_TYPE:
10048 return fold_builtin_classify_type (NULL_TREE);
10049
10050 default:
10051 break;
10052 }
10053 return NULL_TREE;
10054 }
10055
10056 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
10057 This function returns NULL_TREE if no simplification was possible. */
10058
10059 static tree
fold_builtin_1(location_t loc,tree fndecl,tree arg0)10060 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
10061 {
10062 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10063 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10064
10065 if (TREE_CODE (arg0) == ERROR_MARK)
10066 return NULL_TREE;
10067
10068 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
10069 return ret;
10070
10071 switch (fcode)
10072 {
10073 case BUILT_IN_CONSTANT_P:
10074 {
10075 tree val = fold_builtin_constant_p (arg0);
10076
10077 /* Gimplification will pull the CALL_EXPR for the builtin out of
10078 an if condition. When not optimizing, we'll not CSE it back.
10079 To avoid link error types of regressions, return false now. */
10080 if (!val && !optimize)
10081 val = integer_zero_node;
10082
10083 return val;
10084 }
10085
10086 case BUILT_IN_CLASSIFY_TYPE:
10087 return fold_builtin_classify_type (arg0);
10088
10089 case BUILT_IN_STRLEN:
10090 return fold_builtin_strlen (loc, type, arg0);
10091
10092 CASE_FLT_FN (BUILT_IN_FABS):
10093 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10094 case BUILT_IN_FABSD32:
10095 case BUILT_IN_FABSD64:
10096 case BUILT_IN_FABSD128:
10097 return fold_builtin_fabs (loc, arg0, type);
10098
10099 case BUILT_IN_ABS:
10100 case BUILT_IN_LABS:
10101 case BUILT_IN_LLABS:
10102 case BUILT_IN_IMAXABS:
10103 return fold_builtin_abs (loc, arg0, type);
10104
10105 CASE_FLT_FN (BUILT_IN_CONJ):
10106 if (validate_arg (arg0, COMPLEX_TYPE)
10107 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10108 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
10109 break;
10110
10111 CASE_FLT_FN (BUILT_IN_CREAL):
10112 if (validate_arg (arg0, COMPLEX_TYPE)
10113 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10114 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
10115 break;
10116
10117 CASE_FLT_FN (BUILT_IN_CIMAG):
10118 if (validate_arg (arg0, COMPLEX_TYPE)
10119 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
10120 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
10121 break;
10122
10123 CASE_FLT_FN (BUILT_IN_CARG):
10124 return fold_builtin_carg (loc, arg0, type);
10125
10126 case BUILT_IN_ISASCII:
10127 return fold_builtin_isascii (loc, arg0);
10128
10129 case BUILT_IN_TOASCII:
10130 return fold_builtin_toascii (loc, arg0);
10131
10132 case BUILT_IN_ISDIGIT:
10133 return fold_builtin_isdigit (loc, arg0);
10134
10135 CASE_FLT_FN (BUILT_IN_FINITE):
10136 case BUILT_IN_FINITED32:
10137 case BUILT_IN_FINITED64:
10138 case BUILT_IN_FINITED128:
10139 case BUILT_IN_ISFINITE:
10140 {
10141 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
10142 if (ret)
10143 return ret;
10144 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10145 }
10146
10147 CASE_FLT_FN (BUILT_IN_ISINF):
10148 case BUILT_IN_ISINFD32:
10149 case BUILT_IN_ISINFD64:
10150 case BUILT_IN_ISINFD128:
10151 {
10152 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
10153 if (ret)
10154 return ret;
10155 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10156 }
10157
10158 case BUILT_IN_ISNORMAL:
10159 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
10160
10161 case BUILT_IN_ISINF_SIGN:
10162 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
10163
10164 CASE_FLT_FN (BUILT_IN_ISNAN):
10165 case BUILT_IN_ISNAND32:
10166 case BUILT_IN_ISNAND64:
10167 case BUILT_IN_ISNAND128:
10168 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
10169
10170 case BUILT_IN_FREE:
10171 if (integer_zerop (arg0))
10172 return build_empty_stmt (loc);
10173 break;
10174
10175 default:
10176 break;
10177 }
10178
10179 return NULL_TREE;
10180
10181 }
10182
10183 /* Folds a call EXPR (which may be null) to built-in function FNDECL
10184 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
10185 if no simplification was possible. */
10186
10187 static tree
fold_builtin_2(location_t loc,tree expr,tree fndecl,tree arg0,tree arg1)10188 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
10189 {
10190 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10191 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10192
10193 if (TREE_CODE (arg0) == ERROR_MARK
10194 || TREE_CODE (arg1) == ERROR_MARK)
10195 return NULL_TREE;
10196
10197 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
10198 return ret;
10199
10200 switch (fcode)
10201 {
10202 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
10203 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
10204 if (validate_arg (arg0, REAL_TYPE)
10205 && validate_arg (arg1, POINTER_TYPE))
10206 return do_mpfr_lgamma_r (arg0, arg1, type);
10207 break;
10208
10209 CASE_FLT_FN (BUILT_IN_FREXP):
10210 return fold_builtin_frexp (loc, arg0, arg1, type);
10211
10212 CASE_FLT_FN (BUILT_IN_MODF):
10213 return fold_builtin_modf (loc, arg0, arg1, type);
10214
10215 case BUILT_IN_STRSPN:
10216 return fold_builtin_strspn (loc, expr, arg0, arg1);
10217
10218 case BUILT_IN_STRCSPN:
10219 return fold_builtin_strcspn (loc, expr, arg0, arg1);
10220
10221 case BUILT_IN_STRPBRK:
10222 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
10223
10224 case BUILT_IN_EXPECT:
10225 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
10226
10227 case BUILT_IN_ISGREATER:
10228 return fold_builtin_unordered_cmp (loc, fndecl,
10229 arg0, arg1, UNLE_EXPR, LE_EXPR);
10230 case BUILT_IN_ISGREATEREQUAL:
10231 return fold_builtin_unordered_cmp (loc, fndecl,
10232 arg0, arg1, UNLT_EXPR, LT_EXPR);
10233 case BUILT_IN_ISLESS:
10234 return fold_builtin_unordered_cmp (loc, fndecl,
10235 arg0, arg1, UNGE_EXPR, GE_EXPR);
10236 case BUILT_IN_ISLESSEQUAL:
10237 return fold_builtin_unordered_cmp (loc, fndecl,
10238 arg0, arg1, UNGT_EXPR, GT_EXPR);
10239 case BUILT_IN_ISLESSGREATER:
10240 return fold_builtin_unordered_cmp (loc, fndecl,
10241 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
10242 case BUILT_IN_ISUNORDERED:
10243 return fold_builtin_unordered_cmp (loc, fndecl,
10244 arg0, arg1, UNORDERED_EXPR,
10245 NOP_EXPR);
10246
10247 /* We do the folding for va_start in the expander. */
10248 case BUILT_IN_VA_START:
10249 break;
10250
10251 case BUILT_IN_OBJECT_SIZE:
10252 return fold_builtin_object_size (arg0, arg1);
10253
10254 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10255 return fold_builtin_atomic_always_lock_free (arg0, arg1);
10256
10257 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10258 return fold_builtin_atomic_is_lock_free (arg0, arg1);
10259
10260 default:
10261 break;
10262 }
10263 return NULL_TREE;
10264 }
10265
10266 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
10267 and ARG2.
10268 This function returns NULL_TREE if no simplification was possible. */
10269
10270 static tree
fold_builtin_3(location_t loc,tree fndecl,tree arg0,tree arg1,tree arg2)10271 fold_builtin_3 (location_t loc, tree fndecl,
10272 tree arg0, tree arg1, tree arg2)
10273 {
10274 tree type = TREE_TYPE (TREE_TYPE (fndecl));
10275 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10276
10277 if (TREE_CODE (arg0) == ERROR_MARK
10278 || TREE_CODE (arg1) == ERROR_MARK
10279 || TREE_CODE (arg2) == ERROR_MARK)
10280 return NULL_TREE;
10281
10282 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
10283 arg0, arg1, arg2))
10284 return ret;
10285
10286 switch (fcode)
10287 {
10288
10289 CASE_FLT_FN (BUILT_IN_SINCOS):
10290 return fold_builtin_sincos (loc, arg0, arg1, arg2);
10291
10292 CASE_FLT_FN (BUILT_IN_REMQUO):
10293 if (validate_arg (arg0, REAL_TYPE)
10294 && validate_arg (arg1, REAL_TYPE)
10295 && validate_arg (arg2, POINTER_TYPE))
10296 return do_mpfr_remquo (arg0, arg1, arg2);
10297 break;
10298
10299 case BUILT_IN_MEMCMP:
10300 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
10301
10302 case BUILT_IN_EXPECT:
10303 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
10304
10305 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10306 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
10307
10308 case BUILT_IN_ADD_OVERFLOW:
10309 case BUILT_IN_SUB_OVERFLOW:
10310 case BUILT_IN_MUL_OVERFLOW:
10311 case BUILT_IN_ADD_OVERFLOW_P:
10312 case BUILT_IN_SUB_OVERFLOW_P:
10313 case BUILT_IN_MUL_OVERFLOW_P:
10314 case BUILT_IN_SADD_OVERFLOW:
10315 case BUILT_IN_SADDL_OVERFLOW:
10316 case BUILT_IN_SADDLL_OVERFLOW:
10317 case BUILT_IN_SSUB_OVERFLOW:
10318 case BUILT_IN_SSUBL_OVERFLOW:
10319 case BUILT_IN_SSUBLL_OVERFLOW:
10320 case BUILT_IN_SMUL_OVERFLOW:
10321 case BUILT_IN_SMULL_OVERFLOW:
10322 case BUILT_IN_SMULLL_OVERFLOW:
10323 case BUILT_IN_UADD_OVERFLOW:
10324 case BUILT_IN_UADDL_OVERFLOW:
10325 case BUILT_IN_UADDLL_OVERFLOW:
10326 case BUILT_IN_USUB_OVERFLOW:
10327 case BUILT_IN_USUBL_OVERFLOW:
10328 case BUILT_IN_USUBLL_OVERFLOW:
10329 case BUILT_IN_UMUL_OVERFLOW:
10330 case BUILT_IN_UMULL_OVERFLOW:
10331 case BUILT_IN_UMULLL_OVERFLOW:
10332 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
10333
10334 default:
10335 break;
10336 }
10337 return NULL_TREE;
10338 }
10339
10340 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
10341 ARGS is an array of NARGS arguments. IGNORE is true if the result
10342 of the function call is ignored. This function returns NULL_TREE
10343 if no simplification was possible. */
10344
10345 static tree
fold_builtin_n(location_t loc,tree expr,tree fndecl,tree * args,int nargs,bool)10346 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
10347 int nargs, bool)
10348 {
10349 tree ret = NULL_TREE;
10350
10351 switch (nargs)
10352 {
10353 case 0:
10354 ret = fold_builtin_0 (loc, fndecl);
10355 break;
10356 case 1:
10357 ret = fold_builtin_1 (loc, fndecl, args[0]);
10358 break;
10359 case 2:
10360 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
10361 break;
10362 case 3:
10363 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
10364 break;
10365 default:
10366 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
10367 break;
10368 }
10369 if (ret)
10370 {
10371 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10372 SET_EXPR_LOCATION (ret, loc);
10373 return ret;
10374 }
10375 return NULL_TREE;
10376 }
10377
10378 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
10379 list ARGS along with N new arguments in NEWARGS. SKIP is the number
10380 of arguments in ARGS to be omitted. OLDNARGS is the number of
10381 elements in ARGS. */
10382
10383 static tree
rewrite_call_expr_valist(location_t loc,int oldnargs,tree * args,int skip,tree fndecl,int n,va_list newargs)10384 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
10385 int skip, tree fndecl, int n, va_list newargs)
10386 {
10387 int nargs = oldnargs - skip + n;
10388 tree *buffer;
10389
10390 if (n > 0)
10391 {
10392 int i, j;
10393
10394 buffer = XALLOCAVEC (tree, nargs);
10395 for (i = 0; i < n; i++)
10396 buffer[i] = va_arg (newargs, tree);
10397 for (j = skip; j < oldnargs; j++, i++)
10398 buffer[i] = args[j];
10399 }
10400 else
10401 buffer = args + skip;
10402
10403 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
10404 }
10405
10406 /* Return true if FNDECL shouldn't be folded right now.
10407 If a built-in function has an inline attribute always_inline
10408 wrapper, defer folding it after always_inline functions have
10409 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
10410 might not be performed. */
10411
10412 bool
avoid_folding_inline_builtin(tree fndecl)10413 avoid_folding_inline_builtin (tree fndecl)
10414 {
10415 return (DECL_DECLARED_INLINE_P (fndecl)
10416 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
10417 && cfun
10418 && !cfun->always_inline_functions_inlined
10419 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
10420 }
10421
10422 /* A wrapper function for builtin folding that prevents warnings for
10423 "statement without effect" and the like, caused by removing the
10424 call node earlier than the warning is generated. */
10425
10426 tree
fold_call_expr(location_t loc,tree exp,bool ignore)10427 fold_call_expr (location_t loc, tree exp, bool ignore)
10428 {
10429 tree ret = NULL_TREE;
10430 tree fndecl = get_callee_fndecl (exp);
10431 if (fndecl && fndecl_built_in_p (fndecl)
10432 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
10433 yet. Defer folding until we see all the arguments
10434 (after inlining). */
10435 && !CALL_EXPR_VA_ARG_PACK (exp))
10436 {
10437 int nargs = call_expr_nargs (exp);
10438
10439 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
10440 instead last argument is __builtin_va_arg_pack (). Defer folding
10441 even in that case, until arguments are finalized. */
10442 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
10443 {
10444 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
10445 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10446 return NULL_TREE;
10447 }
10448
10449 if (avoid_folding_inline_builtin (fndecl))
10450 return NULL_TREE;
10451
10452 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10453 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
10454 CALL_EXPR_ARGP (exp), ignore);
10455 else
10456 {
10457 tree *args = CALL_EXPR_ARGP (exp);
10458 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
10459 if (ret)
10460 return ret;
10461 }
10462 }
10463 return NULL_TREE;
10464 }
10465
10466 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
10467 N arguments are passed in the array ARGARRAY. Return a folded
10468 expression or NULL_TREE if no simplification was possible. */
10469
10470 tree
fold_builtin_call_array(location_t loc,tree,tree fn,int n,tree * argarray)10471 fold_builtin_call_array (location_t loc, tree,
10472 tree fn,
10473 int n,
10474 tree *argarray)
10475 {
10476 if (TREE_CODE (fn) != ADDR_EXPR)
10477 return NULL_TREE;
10478
10479 tree fndecl = TREE_OPERAND (fn, 0);
10480 if (TREE_CODE (fndecl) == FUNCTION_DECL
10481 && fndecl_built_in_p (fndecl))
10482 {
10483 /* If last argument is __builtin_va_arg_pack (), arguments to this
10484 function are not finalized yet. Defer folding until they are. */
10485 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
10486 {
10487 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
10488 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
10489 return NULL_TREE;
10490 }
10491 if (avoid_folding_inline_builtin (fndecl))
10492 return NULL_TREE;
10493 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10494 return targetm.fold_builtin (fndecl, n, argarray, false);
10495 else
10496 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
10497 }
10498
10499 return NULL_TREE;
10500 }
10501
10502 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
10503 along with N new arguments specified as the "..." parameters. SKIP
10504 is the number of arguments in EXP to be omitted. This function is used
10505 to do varargs-to-varargs transformations. */
10506
10507 static tree
rewrite_call_expr(location_t loc,tree exp,int skip,tree fndecl,int n,...)10508 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
10509 {
10510 va_list ap;
10511 tree t;
10512
10513 va_start (ap, n);
10514 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
10515 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
10516 va_end (ap);
10517
10518 return t;
10519 }
10520
10521 /* Validate a single argument ARG against a tree code CODE representing
10522 a type. Return true when argument is valid. */
10523
10524 static bool
validate_arg(const_tree arg,enum tree_code code)10525 validate_arg (const_tree arg, enum tree_code code)
10526 {
10527 if (!arg)
10528 return false;
10529 else if (code == POINTER_TYPE)
10530 return POINTER_TYPE_P (TREE_TYPE (arg));
10531 else if (code == INTEGER_TYPE)
10532 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
10533 return code == TREE_CODE (TREE_TYPE (arg));
10534 }
10535
10536 /* This function validates the types of a function call argument list
10537 against a specified list of tree_codes. If the last specifier is a 0,
10538 that represents an ellipses, otherwise the last specifier must be a
10539 VOID_TYPE.
10540
10541 This is the GIMPLE version of validate_arglist. Eventually we want to
10542 completely convert builtins.c to work from GIMPLEs and the tree based
10543 validate_arglist will then be removed. */
10544
10545 bool
validate_gimple_arglist(const gcall * call,...)10546 validate_gimple_arglist (const gcall *call, ...)
10547 {
10548 enum tree_code code;
10549 bool res = 0;
10550 va_list ap;
10551 const_tree arg;
10552 size_t i;
10553
10554 va_start (ap, call);
10555 i = 0;
10556
10557 do
10558 {
10559 code = (enum tree_code) va_arg (ap, int);
10560 switch (code)
10561 {
10562 case 0:
10563 /* This signifies an ellipses, any further arguments are all ok. */
10564 res = true;
10565 goto end;
10566 case VOID_TYPE:
10567 /* This signifies an endlink, if no arguments remain, return
10568 true, otherwise return false. */
10569 res = (i == gimple_call_num_args (call));
10570 goto end;
10571 default:
10572 /* If no parameters remain or the parameter's code does not
10573 match the specified code, return false. Otherwise continue
10574 checking any remaining arguments. */
10575 arg = gimple_call_arg (call, i++);
10576 if (!validate_arg (arg, code))
10577 goto end;
10578 break;
10579 }
10580 }
10581 while (1);
10582
10583 /* We need gotos here since we can only have one VA_CLOSE in a
10584 function. */
10585 end: ;
10586 va_end (ap);
10587
10588 return res;
10589 }
10590
10591 /* Default target-specific builtin expander that does nothing. */
10592
10593 rtx
default_expand_builtin(tree exp ATTRIBUTE_UNUSED,rtx target ATTRIBUTE_UNUSED,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)10594 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10595 rtx target ATTRIBUTE_UNUSED,
10596 rtx subtarget ATTRIBUTE_UNUSED,
10597 machine_mode mode ATTRIBUTE_UNUSED,
10598 int ignore ATTRIBUTE_UNUSED)
10599 {
10600 return NULL_RTX;
10601 }
10602
10603 /* Returns true is EXP represents data that would potentially reside
10604 in a readonly section. */
10605
10606 bool
readonly_data_expr(tree exp)10607 readonly_data_expr (tree exp)
10608 {
10609 STRIP_NOPS (exp);
10610
10611 if (TREE_CODE (exp) != ADDR_EXPR)
10612 return false;
10613
10614 exp = get_base_address (TREE_OPERAND (exp, 0));
10615 if (!exp)
10616 return false;
10617
10618 /* Make sure we call decl_readonly_section only for trees it
10619 can handle (since it returns true for everything it doesn't
10620 understand). */
10621 if (TREE_CODE (exp) == STRING_CST
10622 || TREE_CODE (exp) == CONSTRUCTOR
10623 || (VAR_P (exp) && TREE_STATIC (exp)))
10624 return decl_readonly_section (exp, 0);
10625 else
10626 return false;
10627 }
10628
10629 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
10630 to the call, and TYPE is its return type.
10631
10632 Return NULL_TREE if no simplification was possible, otherwise return the
10633 simplified form of the call as a tree.
10634
10635 The simplified form may be a constant or other expression which
10636 computes the same value, but in a more efficient manner (including
10637 calls to other builtin functions).
10638
10639 The call may contain arguments which need to be evaluated, but
10640 which are not useful to determine the result of the call. In
10641 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10642 COMPOUND_EXPR will be an argument which must be evaluated.
10643 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10644 COMPOUND_EXPR in the chain will contain the tree for the simplified
10645 form of the builtin function call. */
10646
10647 static tree
fold_builtin_strpbrk(location_t loc,tree expr,tree s1,tree s2,tree type)10648 fold_builtin_strpbrk (location_t loc, tree expr, tree s1, tree s2, tree type)
10649 {
10650 if (!validate_arg (s1, POINTER_TYPE)
10651 || !validate_arg (s2, POINTER_TYPE))
10652 return NULL_TREE;
10653
10654 if (!check_nul_terminated_array (expr, s1)
10655 || !check_nul_terminated_array (expr, s2))
10656 return NULL_TREE;
10657
10658 tree fn;
10659 const char *p1, *p2;
10660
10661 p2 = c_getstr (s2);
10662 if (p2 == NULL)
10663 return NULL_TREE;
10664
10665 p1 = c_getstr (s1);
10666 if (p1 != NULL)
10667 {
10668 const char *r = strpbrk (p1, p2);
10669 tree tem;
10670
10671 if (r == NULL)
10672 return build_int_cst (TREE_TYPE (s1), 0);
10673
10674 /* Return an offset into the constant string argument. */
10675 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10676 return fold_convert_loc (loc, type, tem);
10677 }
10678
10679 if (p2[0] == '\0')
10680 /* strpbrk(x, "") == NULL.
10681 Evaluate and ignore s1 in case it had side-effects. */
10682 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10683
10684 if (p2[1] != '\0')
10685 return NULL_TREE; /* Really call strpbrk. */
10686
10687 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10688 if (!fn)
10689 return NULL_TREE;
10690
10691 /* New argument list transforming strpbrk(s1, s2) to
10692 strchr(s1, s2[0]). */
10693 return build_call_expr_loc (loc, fn, 2, s1,
10694 build_int_cst (integer_type_node, p2[0]));
10695 }
10696
10697 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
10698 to the call.
10699
10700 Return NULL_TREE if no simplification was possible, otherwise return the
10701 simplified form of the call as a tree.
10702
10703 The simplified form may be a constant or other expression which
10704 computes the same value, but in a more efficient manner (including
10705 calls to other builtin functions).
10706
10707 The call may contain arguments which need to be evaluated, but
10708 which are not useful to determine the result of the call. In
10709 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10710 COMPOUND_EXPR will be an argument which must be evaluated.
10711 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10712 COMPOUND_EXPR in the chain will contain the tree for the simplified
10713 form of the builtin function call. */
10714
10715 static tree
fold_builtin_strspn(location_t loc,tree expr,tree s1,tree s2)10716 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
10717 {
10718 if (!validate_arg (s1, POINTER_TYPE)
10719 || !validate_arg (s2, POINTER_TYPE))
10720 return NULL_TREE;
10721
10722 if (!check_nul_terminated_array (expr, s1)
10723 || !check_nul_terminated_array (expr, s2))
10724 return NULL_TREE;
10725
10726 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10727
10728 /* If either argument is "", return NULL_TREE. */
10729 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10730 /* Evaluate and ignore both arguments in case either one has
10731 side-effects. */
10732 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10733 s1, s2);
10734 return NULL_TREE;
10735 }
10736
10737 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
10738 to the call.
10739
10740 Return NULL_TREE if no simplification was possible, otherwise return the
10741 simplified form of the call as a tree.
10742
10743 The simplified form may be a constant or other expression which
10744 computes the same value, but in a more efficient manner (including
10745 calls to other builtin functions).
10746
10747 The call may contain arguments which need to be evaluated, but
10748 which are not useful to determine the result of the call. In
10749 this case we return a chain of COMPOUND_EXPRs. The LHS of each
10750 COMPOUND_EXPR will be an argument which must be evaluated.
10751 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
10752 COMPOUND_EXPR in the chain will contain the tree for the simplified
10753 form of the builtin function call. */
10754
10755 static tree
fold_builtin_strcspn(location_t loc,tree expr,tree s1,tree s2)10756 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
10757 {
10758 if (!validate_arg (s1, POINTER_TYPE)
10759 || !validate_arg (s2, POINTER_TYPE))
10760 return NULL_TREE;
10761
10762 if (!check_nul_terminated_array (expr, s1)
10763 || !check_nul_terminated_array (expr, s2))
10764 return NULL_TREE;
10765
10766 /* If the first argument is "", return NULL_TREE. */
10767 const char *p1 = c_getstr (s1);
10768 if (p1 && *p1 == '\0')
10769 {
10770 /* Evaluate and ignore argument s2 in case it has
10771 side-effects. */
10772 return omit_one_operand_loc (loc, size_type_node,
10773 size_zero_node, s2);
10774 }
10775
10776 /* If the second argument is "", return __builtin_strlen(s1). */
10777 const char *p2 = c_getstr (s2);
10778 if (p2 && *p2 == '\0')
10779 {
10780 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10781
10782 /* If the replacement _DECL isn't initialized, don't do the
10783 transformation. */
10784 if (!fn)
10785 return NULL_TREE;
10786
10787 return build_call_expr_loc (loc, fn, 1, s1);
10788 }
10789 return NULL_TREE;
10790 }
10791
10792 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10793 produced. False otherwise. This is done so that we don't output the error
10794 or warning twice or three times. */
10795
10796 bool
fold_builtin_next_arg(tree exp,bool va_start_p)10797 fold_builtin_next_arg (tree exp, bool va_start_p)
10798 {
10799 tree fntype = TREE_TYPE (current_function_decl);
10800 int nargs = call_expr_nargs (exp);
10801 tree arg;
10802 /* There is good chance the current input_location points inside the
10803 definition of the va_start macro (perhaps on the token for
10804 builtin) in a system header, so warnings will not be emitted.
10805 Use the location in real source code. */
10806 location_t current_location =
10807 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10808 NULL);
10809
10810 if (!stdarg_p (fntype))
10811 {
10812 error ("%<va_start%> used in function with fixed arguments");
10813 return true;
10814 }
10815
10816 if (va_start_p)
10817 {
10818 if (va_start_p && (nargs != 2))
10819 {
10820 error ("wrong number of arguments to function %<va_start%>");
10821 return true;
10822 }
10823 arg = CALL_EXPR_ARG (exp, 1);
10824 }
10825 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10826 when we checked the arguments and if needed issued a warning. */
10827 else
10828 {
10829 if (nargs == 0)
10830 {
10831 /* Evidently an out of date version of <stdarg.h>; can't validate
10832 va_start's second argument, but can still work as intended. */
10833 warning_at (current_location,
10834 OPT_Wvarargs,
10835 "%<__builtin_next_arg%> called without an argument");
10836 return true;
10837 }
10838 else if (nargs > 1)
10839 {
10840 error ("wrong number of arguments to function %<__builtin_next_arg%>");
10841 return true;
10842 }
10843 arg = CALL_EXPR_ARG (exp, 0);
10844 }
10845
10846 if (TREE_CODE (arg) == SSA_NAME
10847 && SSA_NAME_VAR (arg))
10848 arg = SSA_NAME_VAR (arg);
10849
10850 /* We destructively modify the call to be __builtin_va_start (ap, 0)
10851 or __builtin_next_arg (0) the first time we see it, after checking
10852 the arguments and if needed issuing a warning. */
10853 if (!integer_zerop (arg))
10854 {
10855 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10856
10857 /* Strip off all nops for the sake of the comparison. This
10858 is not quite the same as STRIP_NOPS. It does more.
10859 We must also strip off INDIRECT_EXPR for C++ reference
10860 parameters. */
10861 while (CONVERT_EXPR_P (arg)
10862 || TREE_CODE (arg) == INDIRECT_REF)
10863 arg = TREE_OPERAND (arg, 0);
10864 if (arg != last_parm)
10865 {
10866 /* FIXME: Sometimes with the tree optimizers we can get the
10867 not the last argument even though the user used the last
10868 argument. We just warn and set the arg to be the last
10869 argument so that we will get wrong-code because of
10870 it. */
10871 warning_at (current_location,
10872 OPT_Wvarargs,
10873 "second parameter of %<va_start%> not last named argument");
10874 }
10875
10876 /* Undefined by C99 7.15.1.4p4 (va_start):
10877 "If the parameter parmN is declared with the register storage
10878 class, with a function or array type, or with a type that is
10879 not compatible with the type that results after application of
10880 the default argument promotions, the behavior is undefined."
10881 */
10882 else if (DECL_REGISTER (arg))
10883 {
10884 warning_at (current_location,
10885 OPT_Wvarargs,
10886 "undefined behavior when second parameter of "
10887 "%<va_start%> is declared with %<register%> storage");
10888 }
10889
10890 /* We want to verify the second parameter just once before the tree
10891 optimizers are run and then avoid keeping it in the tree,
10892 as otherwise we could warn even for correct code like:
10893 void foo (int i, ...)
10894 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
10895 if (va_start_p)
10896 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10897 else
10898 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10899 }
10900 return false;
10901 }
10902
10903
10904 /* Expand a call EXP to __builtin_object_size. */
10905
10906 static rtx
expand_builtin_object_size(tree exp)10907 expand_builtin_object_size (tree exp)
10908 {
10909 tree ost;
10910 int object_size_type;
10911 tree fndecl = get_callee_fndecl (exp);
10912
10913 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10914 {
10915 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10916 exp, fndecl);
10917 expand_builtin_trap ();
10918 return const0_rtx;
10919 }
10920
10921 ost = CALL_EXPR_ARG (exp, 1);
10922 STRIP_NOPS (ost);
10923
10924 if (TREE_CODE (ost) != INTEGER_CST
10925 || tree_int_cst_sgn (ost) < 0
10926 || compare_tree_int (ost, 3) > 0)
10927 {
10928 error ("%Klast argument of %qD is not integer constant between 0 and 3",
10929 exp, fndecl);
10930 expand_builtin_trap ();
10931 return const0_rtx;
10932 }
10933
10934 object_size_type = tree_to_shwi (ost);
10935
10936 return object_size_type < 2 ? constm1_rtx : const0_rtx;
10937 }
10938
10939 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10940 FCODE is the BUILT_IN_* to use.
10941 Return NULL_RTX if we failed; the caller should emit a normal call,
10942 otherwise try to get the result in TARGET, if convenient (and in
10943 mode MODE if that's convenient). */
10944
10945 static rtx
expand_builtin_memory_chk(tree exp,rtx target,machine_mode mode,enum built_in_function fcode)10946 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10947 enum built_in_function fcode)
10948 {
10949 if (!validate_arglist (exp,
10950 POINTER_TYPE,
10951 fcode == BUILT_IN_MEMSET_CHK
10952 ? INTEGER_TYPE : POINTER_TYPE,
10953 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10954 return NULL_RTX;
10955
10956 tree dest = CALL_EXPR_ARG (exp, 0);
10957 tree src = CALL_EXPR_ARG (exp, 1);
10958 tree len = CALL_EXPR_ARG (exp, 2);
10959 tree size = CALL_EXPR_ARG (exp, 3);
10960
10961 bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10962 /*str=*/NULL_TREE, size);
10963
10964 if (!tree_fits_uhwi_p (size))
10965 return NULL_RTX;
10966
10967 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10968 {
10969 /* Avoid transforming the checking call to an ordinary one when
10970 an overflow has been detected or when the call couldn't be
10971 validated because the size is not constant. */
10972 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10973 return NULL_RTX;
10974
10975 tree fn = NULL_TREE;
10976 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10977 mem{cpy,pcpy,move,set} is available. */
10978 switch (fcode)
10979 {
10980 case BUILT_IN_MEMCPY_CHK:
10981 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10982 break;
10983 case BUILT_IN_MEMPCPY_CHK:
10984 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10985 break;
10986 case BUILT_IN_MEMMOVE_CHK:
10987 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10988 break;
10989 case BUILT_IN_MEMSET_CHK:
10990 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10991 break;
10992 default:
10993 break;
10994 }
10995
10996 if (! fn)
10997 return NULL_RTX;
10998
10999 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
11000 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11001 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11002 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11003 }
11004 else if (fcode == BUILT_IN_MEMSET_CHK)
11005 return NULL_RTX;
11006 else
11007 {
11008 unsigned int dest_align = get_pointer_alignment (dest);
11009
11010 /* If DEST is not a pointer type, call the normal function. */
11011 if (dest_align == 0)
11012 return NULL_RTX;
11013
11014 /* If SRC and DEST are the same (and not volatile), do nothing. */
11015 if (operand_equal_p (src, dest, 0))
11016 {
11017 tree expr;
11018
11019 if (fcode != BUILT_IN_MEMPCPY_CHK)
11020 {
11021 /* Evaluate and ignore LEN in case it has side-effects. */
11022 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
11023 return expand_expr (dest, target, mode, EXPAND_NORMAL);
11024 }
11025
11026 expr = fold_build_pointer_plus (dest, len);
11027 return expand_expr (expr, target, mode, EXPAND_NORMAL);
11028 }
11029
11030 /* __memmove_chk special case. */
11031 if (fcode == BUILT_IN_MEMMOVE_CHK)
11032 {
11033 unsigned int src_align = get_pointer_alignment (src);
11034
11035 if (src_align == 0)
11036 return NULL_RTX;
11037
11038 /* If src is categorized for a readonly section we can use
11039 normal __memcpy_chk. */
11040 if (readonly_data_expr (src))
11041 {
11042 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
11043 if (!fn)
11044 return NULL_RTX;
11045 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
11046 dest, src, len, size);
11047 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
11048 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
11049 return expand_expr (fn, target, mode, EXPAND_NORMAL);
11050 }
11051 }
11052 return NULL_RTX;
11053 }
11054 }
11055
11056 /* Emit warning if a buffer overflow is detected at compile time. */
11057
11058 static void
maybe_emit_chk_warning(tree exp,enum built_in_function fcode)11059 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
11060 {
11061 /* The source string. */
11062 tree srcstr = NULL_TREE;
11063 /* The size of the destination object. */
11064 tree objsize = NULL_TREE;
11065 /* The string that is being concatenated with (as in __strcat_chk)
11066 or null if it isn't. */
11067 tree catstr = NULL_TREE;
11068 /* The maximum length of the source sequence in a bounded operation
11069 (such as __strncat_chk) or null if the operation isn't bounded
11070 (such as __strcat_chk). */
11071 tree maxread = NULL_TREE;
11072 /* The exact size of the access (such as in __strncpy_chk). */
11073 tree size = NULL_TREE;
11074
11075 switch (fcode)
11076 {
11077 case BUILT_IN_STRCPY_CHK:
11078 case BUILT_IN_STPCPY_CHK:
11079 srcstr = CALL_EXPR_ARG (exp, 1);
11080 objsize = CALL_EXPR_ARG (exp, 2);
11081 break;
11082
11083 case BUILT_IN_STRCAT_CHK:
11084 /* For __strcat_chk the warning will be emitted only if overflowing
11085 by at least strlen (dest) + 1 bytes. */
11086 catstr = CALL_EXPR_ARG (exp, 0);
11087 srcstr = CALL_EXPR_ARG (exp, 1);
11088 objsize = CALL_EXPR_ARG (exp, 2);
11089 break;
11090
11091 case BUILT_IN_STRNCAT_CHK:
11092 catstr = CALL_EXPR_ARG (exp, 0);
11093 srcstr = CALL_EXPR_ARG (exp, 1);
11094 maxread = CALL_EXPR_ARG (exp, 2);
11095 objsize = CALL_EXPR_ARG (exp, 3);
11096 break;
11097
11098 case BUILT_IN_STRNCPY_CHK:
11099 case BUILT_IN_STPNCPY_CHK:
11100 srcstr = CALL_EXPR_ARG (exp, 1);
11101 size = CALL_EXPR_ARG (exp, 2);
11102 objsize = CALL_EXPR_ARG (exp, 3);
11103 break;
11104
11105 case BUILT_IN_SNPRINTF_CHK:
11106 case BUILT_IN_VSNPRINTF_CHK:
11107 maxread = CALL_EXPR_ARG (exp, 1);
11108 objsize = CALL_EXPR_ARG (exp, 3);
11109 break;
11110 default:
11111 gcc_unreachable ();
11112 }
11113
11114 if (catstr && maxread)
11115 {
11116 /* Check __strncat_chk. There is no way to determine the length
11117 of the string to which the source string is being appended so
11118 just warn when the length of the source string is not known. */
11119 check_strncat_sizes (exp, objsize);
11120 return;
11121 }
11122
11123 /* The destination argument is the first one for all built-ins above. */
11124 tree dst = CALL_EXPR_ARG (exp, 0);
11125
11126 check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
11127 }
11128
11129 /* Emit warning if a buffer overflow is detected at compile time
11130 in __sprintf_chk/__vsprintf_chk calls. */
11131
11132 static void
maybe_emit_sprintf_chk_warning(tree exp,enum built_in_function fcode)11133 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
11134 {
11135 tree size, len, fmt;
11136 const char *fmt_str;
11137 int nargs = call_expr_nargs (exp);
11138
11139 /* Verify the required arguments in the original call. */
11140
11141 if (nargs < 4)
11142 return;
11143 size = CALL_EXPR_ARG (exp, 2);
11144 fmt = CALL_EXPR_ARG (exp, 3);
11145
11146 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
11147 return;
11148
11149 /* Check whether the format is a literal string constant. */
11150 fmt_str = c_getstr (fmt);
11151 if (fmt_str == NULL)
11152 return;
11153
11154 if (!init_target_chars ())
11155 return;
11156
11157 /* If the format doesn't contain % args or %%, we know its size. */
11158 if (strchr (fmt_str, target_percent) == 0)
11159 len = build_int_cstu (size_type_node, strlen (fmt_str));
11160 /* If the format is "%s" and first ... argument is a string literal,
11161 we know it too. */
11162 else if (fcode == BUILT_IN_SPRINTF_CHK
11163 && strcmp (fmt_str, target_percent_s) == 0)
11164 {
11165 tree arg;
11166
11167 if (nargs < 5)
11168 return;
11169 arg = CALL_EXPR_ARG (exp, 4);
11170 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
11171 return;
11172
11173 len = c_strlen (arg, 1);
11174 if (!len || ! tree_fits_uhwi_p (len))
11175 return;
11176 }
11177 else
11178 return;
11179
11180 /* Add one for the terminating nul. */
11181 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
11182
11183 check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
11184 /*maxread=*/NULL_TREE, len, size);
11185 }
11186
11187 /* Emit warning if a free is called with address of a variable. */
11188
11189 static void
maybe_emit_free_warning(tree exp)11190 maybe_emit_free_warning (tree exp)
11191 {
11192 if (call_expr_nargs (exp) != 1)
11193 return;
11194
11195 tree arg = CALL_EXPR_ARG (exp, 0);
11196
11197 STRIP_NOPS (arg);
11198 if (TREE_CODE (arg) != ADDR_EXPR)
11199 return;
11200
11201 arg = get_base_address (TREE_OPERAND (arg, 0));
11202 if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
11203 return;
11204
11205 if (SSA_VAR_P (arg))
11206 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11207 "%Kattempt to free a non-heap object %qD", exp, arg);
11208 else
11209 warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
11210 "%Kattempt to free a non-heap object", exp);
11211 }
11212
11213 /* Fold a call to __builtin_object_size with arguments PTR and OST,
11214 if possible. */
11215
11216 static tree
fold_builtin_object_size(tree ptr,tree ost)11217 fold_builtin_object_size (tree ptr, tree ost)
11218 {
11219 unsigned HOST_WIDE_INT bytes;
11220 int object_size_type;
11221
11222 if (!validate_arg (ptr, POINTER_TYPE)
11223 || !validate_arg (ost, INTEGER_TYPE))
11224 return NULL_TREE;
11225
11226 STRIP_NOPS (ost);
11227
11228 if (TREE_CODE (ost) != INTEGER_CST
11229 || tree_int_cst_sgn (ost) < 0
11230 || compare_tree_int (ost, 3) > 0)
11231 return NULL_TREE;
11232
11233 object_size_type = tree_to_shwi (ost);
11234
11235 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
11236 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
11237 and (size_t) 0 for types 2 and 3. */
11238 if (TREE_SIDE_EFFECTS (ptr))
11239 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
11240
11241 if (TREE_CODE (ptr) == ADDR_EXPR)
11242 {
11243 compute_builtin_object_size (ptr, object_size_type, &bytes);
11244 if (wi::fits_to_tree_p (bytes, size_type_node))
11245 return build_int_cstu (size_type_node, bytes);
11246 }
11247 else if (TREE_CODE (ptr) == SSA_NAME)
11248 {
11249 /* If object size is not known yet, delay folding until
11250 later. Maybe subsequent passes will help determining
11251 it. */
11252 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
11253 && wi::fits_to_tree_p (bytes, size_type_node))
11254 return build_int_cstu (size_type_node, bytes);
11255 }
11256
11257 return NULL_TREE;
11258 }
11259
11260 /* Builtins with folding operations that operate on "..." arguments
11261 need special handling; we need to store the arguments in a convenient
11262 data structure before attempting any folding. Fortunately there are
11263 only a few builtins that fall into this category. FNDECL is the
11264 function, EXP is the CALL_EXPR for the call. */
11265
11266 static tree
fold_builtin_varargs(location_t loc,tree fndecl,tree * args,int nargs)11267 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
11268 {
11269 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
11270 tree ret = NULL_TREE;
11271
11272 switch (fcode)
11273 {
11274 case BUILT_IN_FPCLASSIFY:
11275 ret = fold_builtin_fpclassify (loc, args, nargs);
11276 break;
11277
11278 default:
11279 break;
11280 }
11281 if (ret)
11282 {
11283 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
11284 SET_EXPR_LOCATION (ret, loc);
11285 TREE_NO_WARNING (ret) = 1;
11286 return ret;
11287 }
11288 return NULL_TREE;
11289 }
11290
11291 /* Initialize format string characters in the target charset. */
11292
11293 bool
init_target_chars(void)11294 init_target_chars (void)
11295 {
11296 static bool init;
11297 if (!init)
11298 {
11299 target_newline = lang_hooks.to_target_charset ('\n');
11300 target_percent = lang_hooks.to_target_charset ('%');
11301 target_c = lang_hooks.to_target_charset ('c');
11302 target_s = lang_hooks.to_target_charset ('s');
11303 if (target_newline == 0 || target_percent == 0 || target_c == 0
11304 || target_s == 0)
11305 return false;
11306
11307 target_percent_c[0] = target_percent;
11308 target_percent_c[1] = target_c;
11309 target_percent_c[2] = '\0';
11310
11311 target_percent_s[0] = target_percent;
11312 target_percent_s[1] = target_s;
11313 target_percent_s[2] = '\0';
11314
11315 target_percent_s_newline[0] = target_percent;
11316 target_percent_s_newline[1] = target_s;
11317 target_percent_s_newline[2] = target_newline;
11318 target_percent_s_newline[3] = '\0';
11319
11320 init = true;
11321 }
11322 return true;
11323 }
11324
11325 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
11326 and no overflow/underflow occurred. INEXACT is true if M was not
11327 exactly calculated. TYPE is the tree type for the result. This
11328 function assumes that you cleared the MPFR flags and then
11329 calculated M to see if anything subsequently set a flag prior to
11330 entering this function. Return NULL_TREE if any checks fail. */
11331
11332 static tree
do_mpfr_ckconv(mpfr_srcptr m,tree type,int inexact)11333 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
11334 {
11335 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11336 overflow/underflow occurred. If -frounding-math, proceed iff the
11337 result of calling FUNC was exact. */
11338 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
11339 && (!flag_rounding_math || !inexact))
11340 {
11341 REAL_VALUE_TYPE rr;
11342
11343 real_from_mpfr (&rr, m, type, MPFR_RNDN);
11344 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
11345 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11346 but the mpft_t is not, then we underflowed in the
11347 conversion. */
11348 if (real_isfinite (&rr)
11349 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
11350 {
11351 REAL_VALUE_TYPE rmode;
11352
11353 real_convert (&rmode, TYPE_MODE (type), &rr);
11354 /* Proceed iff the specified mode can hold the value. */
11355 if (real_identical (&rmode, &rr))
11356 return build_real (type, rmode);
11357 }
11358 }
11359 return NULL_TREE;
11360 }
11361
11362 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
11363 number and no overflow/underflow occurred. INEXACT is true if M
11364 was not exactly calculated. TYPE is the tree type for the result.
11365 This function assumes that you cleared the MPFR flags and then
11366 calculated M to see if anything subsequently set a flag prior to
11367 entering this function. Return NULL_TREE if any checks fail, if
11368 FORCE_CONVERT is true, then bypass the checks. */
11369
11370 static tree
do_mpc_ckconv(mpc_srcptr m,tree type,int inexact,int force_convert)11371 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
11372 {
11373 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
11374 overflow/underflow occurred. If -frounding-math, proceed iff the
11375 result of calling FUNC was exact. */
11376 if (force_convert
11377 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
11378 && !mpfr_overflow_p () && !mpfr_underflow_p ()
11379 && (!flag_rounding_math || !inexact)))
11380 {
11381 REAL_VALUE_TYPE re, im;
11382
11383 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
11384 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
11385 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
11386 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
11387 but the mpft_t is not, then we underflowed in the
11388 conversion. */
11389 if (force_convert
11390 || (real_isfinite (&re) && real_isfinite (&im)
11391 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
11392 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
11393 {
11394 REAL_VALUE_TYPE re_mode, im_mode;
11395
11396 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
11397 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
11398 /* Proceed iff the specified mode can hold the value. */
11399 if (force_convert
11400 || (real_identical (&re_mode, &re)
11401 && real_identical (&im_mode, &im)))
11402 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
11403 build_real (TREE_TYPE (type), im_mode));
11404 }
11405 }
11406 return NULL_TREE;
11407 }
11408
11409 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
11410 the pointer *(ARG_QUO) and return the result. The type is taken
11411 from the type of ARG0 and is used for setting the precision of the
11412 calculation and results. */
11413
11414 static tree
do_mpfr_remquo(tree arg0,tree arg1,tree arg_quo)11415 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
11416 {
11417 tree const type = TREE_TYPE (arg0);
11418 tree result = NULL_TREE;
11419
11420 STRIP_NOPS (arg0);
11421 STRIP_NOPS (arg1);
11422
11423 /* To proceed, MPFR must exactly represent the target floating point
11424 format, which only happens when the target base equals two. */
11425 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11426 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
11427 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
11428 {
11429 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
11430 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
11431
11432 if (real_isfinite (ra0) && real_isfinite (ra1))
11433 {
11434 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11435 const int prec = fmt->p;
11436 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11437 tree result_rem;
11438 long integer_quo;
11439 mpfr_t m0, m1;
11440
11441 mpfr_inits2 (prec, m0, m1, NULL);
11442 mpfr_from_real (m0, ra0, MPFR_RNDN);
11443 mpfr_from_real (m1, ra1, MPFR_RNDN);
11444 mpfr_clear_flags ();
11445 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
11446 /* Remquo is independent of the rounding mode, so pass
11447 inexact=0 to do_mpfr_ckconv(). */
11448 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
11449 mpfr_clears (m0, m1, NULL);
11450 if (result_rem)
11451 {
11452 /* MPFR calculates quo in the host's long so it may
11453 return more bits in quo than the target int can hold
11454 if sizeof(host long) > sizeof(target int). This can
11455 happen even for native compilers in LP64 mode. In
11456 these cases, modulo the quo value with the largest
11457 number that the target int can hold while leaving one
11458 bit for the sign. */
11459 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
11460 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
11461
11462 /* Dereference the quo pointer argument. */
11463 arg_quo = build_fold_indirect_ref (arg_quo);
11464 /* Proceed iff a valid pointer type was passed in. */
11465 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
11466 {
11467 /* Set the value. */
11468 tree result_quo
11469 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
11470 build_int_cst (TREE_TYPE (arg_quo),
11471 integer_quo));
11472 TREE_SIDE_EFFECTS (result_quo) = 1;
11473 /* Combine the quo assignment with the rem. */
11474 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11475 result_quo, result_rem));
11476 }
11477 }
11478 }
11479 }
11480 return result;
11481 }
11482
11483 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
11484 resulting value as a tree with type TYPE. The mpfr precision is
11485 set to the precision of TYPE. We assume that this mpfr function
11486 returns zero if the result could be calculated exactly within the
11487 requested precision. In addition, the integer pointer represented
11488 by ARG_SG will be dereferenced and set to the appropriate signgam
11489 (-1,1) value. */
11490
11491 static tree
do_mpfr_lgamma_r(tree arg,tree arg_sg,tree type)11492 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
11493 {
11494 tree result = NULL_TREE;
11495
11496 STRIP_NOPS (arg);
11497
11498 /* To proceed, MPFR must exactly represent the target floating point
11499 format, which only happens when the target base equals two. Also
11500 verify ARG is a constant and that ARG_SG is an int pointer. */
11501 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
11502 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
11503 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
11504 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
11505 {
11506 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
11507
11508 /* In addition to NaN and Inf, the argument cannot be zero or a
11509 negative integer. */
11510 if (real_isfinite (ra)
11511 && ra->cl != rvc_zero
11512 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
11513 {
11514 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
11515 const int prec = fmt->p;
11516 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
11517 int inexact, sg;
11518 mpfr_t m;
11519 tree result_lg;
11520
11521 mpfr_init2 (m, prec);
11522 mpfr_from_real (m, ra, MPFR_RNDN);
11523 mpfr_clear_flags ();
11524 inexact = mpfr_lgamma (m, &sg, m, rnd);
11525 result_lg = do_mpfr_ckconv (m, type, inexact);
11526 mpfr_clear (m);
11527 if (result_lg)
11528 {
11529 tree result_sg;
11530
11531 /* Dereference the arg_sg pointer argument. */
11532 arg_sg = build_fold_indirect_ref (arg_sg);
11533 /* Assign the signgam value into *arg_sg. */
11534 result_sg = fold_build2 (MODIFY_EXPR,
11535 TREE_TYPE (arg_sg), arg_sg,
11536 build_int_cst (TREE_TYPE (arg_sg), sg));
11537 TREE_SIDE_EFFECTS (result_sg) = 1;
11538 /* Combine the signgam assignment with the lgamma result. */
11539 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
11540 result_sg, result_lg));
11541 }
11542 }
11543 }
11544
11545 return result;
11546 }
11547
11548 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
11549 mpc function FUNC on it and return the resulting value as a tree
11550 with type TYPE. The mpfr precision is set to the precision of
11551 TYPE. We assume that function FUNC returns zero if the result
11552 could be calculated exactly within the requested precision. If
11553 DO_NONFINITE is true, then fold expressions containing Inf or NaN
11554 in the arguments and/or results. */
11555
11556 tree
do_mpc_arg2(tree arg0,tree arg1,tree type,int do_nonfinite,int (* func)(mpc_ptr,mpc_srcptr,mpc_srcptr,mpc_rnd_t))11557 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
11558 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
11559 {
11560 tree result = NULL_TREE;
11561
11562 STRIP_NOPS (arg0);
11563 STRIP_NOPS (arg1);
11564
11565 /* To proceed, MPFR must exactly represent the target floating point
11566 format, which only happens when the target base equals two. */
11567 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
11568 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
11569 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
11570 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
11571 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
11572 {
11573 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
11574 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
11575 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11576 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11577
11578 if (do_nonfinite
11579 || (real_isfinite (re0) && real_isfinite (im0)
11580 && real_isfinite (re1) && real_isfinite (im1)))
11581 {
11582 const struct real_format *const fmt =
11583 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11584 const int prec = fmt->p;
11585 const mpfr_rnd_t rnd = fmt->round_towards_zero
11586 ? MPFR_RNDZ : MPFR_RNDN;
11587 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11588 int inexact;
11589 mpc_t m0, m1;
11590
11591 mpc_init2 (m0, prec);
11592 mpc_init2 (m1, prec);
11593 mpfr_from_real (mpc_realref (m0), re0, rnd);
11594 mpfr_from_real (mpc_imagref (m0), im0, rnd);
11595 mpfr_from_real (mpc_realref (m1), re1, rnd);
11596 mpfr_from_real (mpc_imagref (m1), im1, rnd);
11597 mpfr_clear_flags ();
11598 inexact = func (m0, m0, m1, crnd);
11599 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11600 mpc_clear (m0);
11601 mpc_clear (m1);
11602 }
11603 }
11604
11605 return result;
11606 }
11607
11608 /* A wrapper function for builtin folding that prevents warnings for
11609 "statement without effect" and the like, caused by removing the
11610 call node earlier than the warning is generated. */
11611
11612 tree
fold_call_stmt(gcall * stmt,bool ignore)11613 fold_call_stmt (gcall *stmt, bool ignore)
11614 {
11615 tree ret = NULL_TREE;
11616 tree fndecl = gimple_call_fndecl (stmt);
11617 location_t loc = gimple_location (stmt);
11618 if (fndecl && fndecl_built_in_p (fndecl)
11619 && !gimple_call_va_arg_pack_p (stmt))
11620 {
11621 int nargs = gimple_call_num_args (stmt);
11622 tree *args = (nargs > 0
11623 ? gimple_call_arg_ptr (stmt, 0)
11624 : &error_mark_node);
11625
11626 if (avoid_folding_inline_builtin (fndecl))
11627 return NULL_TREE;
11628 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11629 {
11630 return targetm.fold_builtin (fndecl, nargs, args, ignore);
11631 }
11632 else
11633 {
11634 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
11635 if (ret)
11636 {
11637 /* Propagate location information from original call to
11638 expansion of builtin. Otherwise things like
11639 maybe_emit_chk_warning, that operate on the expansion
11640 of a builtin, will use the wrong location information. */
11641 if (gimple_has_location (stmt))
11642 {
11643 tree realret = ret;
11644 if (TREE_CODE (ret) == NOP_EXPR)
11645 realret = TREE_OPERAND (ret, 0);
11646 if (CAN_HAVE_LOCATION_P (realret)
11647 && !EXPR_HAS_LOCATION (realret))
11648 SET_EXPR_LOCATION (realret, loc);
11649 return realret;
11650 }
11651 return ret;
11652 }
11653 }
11654 }
11655 return NULL_TREE;
11656 }
11657
11658 /* Look up the function in builtin_decl that corresponds to DECL
11659 and set ASMSPEC as its user assembler name. DECL must be a
11660 function decl that declares a builtin. */
11661
11662 void
set_builtin_user_assembler_name(tree decl,const char * asmspec)11663 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11664 {
11665 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11666 && asmspec != 0);
11667
11668 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11669 set_user_assembler_name (builtin, asmspec);
11670
11671 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11672 && INT_TYPE_SIZE < BITS_PER_WORD)
11673 {
11674 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11675 set_user_assembler_libfunc ("ffs", asmspec);
11676 set_optab_libfunc (ffs_optab, mode, "ffs");
11677 }
11678 }
11679
11680 /* Return true if DECL is a builtin that expands to a constant or similarly
11681 simple code. */
11682 bool
is_simple_builtin(tree decl)11683 is_simple_builtin (tree decl)
11684 {
11685 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11686 switch (DECL_FUNCTION_CODE (decl))
11687 {
11688 /* Builtins that expand to constants. */
11689 case BUILT_IN_CONSTANT_P:
11690 case BUILT_IN_EXPECT:
11691 case BUILT_IN_OBJECT_SIZE:
11692 case BUILT_IN_UNREACHABLE:
11693 /* Simple register moves or loads from stack. */
11694 case BUILT_IN_ASSUME_ALIGNED:
11695 case BUILT_IN_RETURN_ADDRESS:
11696 case BUILT_IN_EXTRACT_RETURN_ADDR:
11697 case BUILT_IN_FROB_RETURN_ADDR:
11698 case BUILT_IN_RETURN:
11699 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11700 case BUILT_IN_FRAME_ADDRESS:
11701 case BUILT_IN_VA_END:
11702 case BUILT_IN_STACK_SAVE:
11703 case BUILT_IN_STACK_RESTORE:
11704 /* Exception state returns or moves registers around. */
11705 case BUILT_IN_EH_FILTER:
11706 case BUILT_IN_EH_POINTER:
11707 case BUILT_IN_EH_COPY_VALUES:
11708 return true;
11709
11710 default:
11711 return false;
11712 }
11713
11714 return false;
11715 }
11716
11717 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11718 most probably expanded inline into reasonably simple code. This is a
11719 superset of is_simple_builtin. */
11720 bool
is_inexpensive_builtin(tree decl)11721 is_inexpensive_builtin (tree decl)
11722 {
11723 if (!decl)
11724 return false;
11725 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11726 return true;
11727 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11728 switch (DECL_FUNCTION_CODE (decl))
11729 {
11730 case BUILT_IN_ABS:
11731 CASE_BUILT_IN_ALLOCA:
11732 case BUILT_IN_BSWAP16:
11733 case BUILT_IN_BSWAP32:
11734 case BUILT_IN_BSWAP64:
11735 case BUILT_IN_CLZ:
11736 case BUILT_IN_CLZIMAX:
11737 case BUILT_IN_CLZL:
11738 case BUILT_IN_CLZLL:
11739 case BUILT_IN_CTZ:
11740 case BUILT_IN_CTZIMAX:
11741 case BUILT_IN_CTZL:
11742 case BUILT_IN_CTZLL:
11743 case BUILT_IN_FFS:
11744 case BUILT_IN_FFSIMAX:
11745 case BUILT_IN_FFSL:
11746 case BUILT_IN_FFSLL:
11747 case BUILT_IN_IMAXABS:
11748 case BUILT_IN_FINITE:
11749 case BUILT_IN_FINITEF:
11750 case BUILT_IN_FINITEL:
11751 case BUILT_IN_FINITED32:
11752 case BUILT_IN_FINITED64:
11753 case BUILT_IN_FINITED128:
11754 case BUILT_IN_FPCLASSIFY:
11755 case BUILT_IN_ISFINITE:
11756 case BUILT_IN_ISINF_SIGN:
11757 case BUILT_IN_ISINF:
11758 case BUILT_IN_ISINFF:
11759 case BUILT_IN_ISINFL:
11760 case BUILT_IN_ISINFD32:
11761 case BUILT_IN_ISINFD64:
11762 case BUILT_IN_ISINFD128:
11763 case BUILT_IN_ISNAN:
11764 case BUILT_IN_ISNANF:
11765 case BUILT_IN_ISNANL:
11766 case BUILT_IN_ISNAND32:
11767 case BUILT_IN_ISNAND64:
11768 case BUILT_IN_ISNAND128:
11769 case BUILT_IN_ISNORMAL:
11770 case BUILT_IN_ISGREATER:
11771 case BUILT_IN_ISGREATEREQUAL:
11772 case BUILT_IN_ISLESS:
11773 case BUILT_IN_ISLESSEQUAL:
11774 case BUILT_IN_ISLESSGREATER:
11775 case BUILT_IN_ISUNORDERED:
11776 case BUILT_IN_VA_ARG_PACK:
11777 case BUILT_IN_VA_ARG_PACK_LEN:
11778 case BUILT_IN_VA_COPY:
11779 case BUILT_IN_TRAP:
11780 case BUILT_IN_SAVEREGS:
11781 case BUILT_IN_POPCOUNTL:
11782 case BUILT_IN_POPCOUNTLL:
11783 case BUILT_IN_POPCOUNTIMAX:
11784 case BUILT_IN_POPCOUNT:
11785 case BUILT_IN_PARITYL:
11786 case BUILT_IN_PARITYLL:
11787 case BUILT_IN_PARITYIMAX:
11788 case BUILT_IN_PARITY:
11789 case BUILT_IN_LABS:
11790 case BUILT_IN_LLABS:
11791 case BUILT_IN_PREFETCH:
11792 case BUILT_IN_ACC_ON_DEVICE:
11793 return true;
11794
11795 default:
11796 return is_simple_builtin (decl);
11797 }
11798
11799 return false;
11800 }
11801
11802 /* Return true if T is a constant and the value cast to a target char
11803 can be represented by a host char.
11804 Store the casted char constant in *P if so. */
11805
11806 bool
target_char_cst_p(tree t,char * p)11807 target_char_cst_p (tree t, char *p)
11808 {
11809 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11810 return false;
11811
11812 *p = (char)tree_to_uhwi (t);
11813 return true;
11814 }
11815
11816 /* Return true if the builtin DECL is implemented in a standard library.
11817 Otherwise returns false which doesn't guarantee it is not (thus the list of
11818 handled builtins below may be incomplete). */
11819
11820 bool
builtin_with_linkage_p(tree decl)11821 builtin_with_linkage_p (tree decl)
11822 {
11823 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11824 switch (DECL_FUNCTION_CODE (decl))
11825 {
11826 CASE_FLT_FN (BUILT_IN_ACOS):
11827 CASE_FLT_FN (BUILT_IN_ACOSH):
11828 CASE_FLT_FN (BUILT_IN_ASIN):
11829 CASE_FLT_FN (BUILT_IN_ASINH):
11830 CASE_FLT_FN (BUILT_IN_ATAN):
11831 CASE_FLT_FN (BUILT_IN_ATANH):
11832 CASE_FLT_FN (BUILT_IN_ATAN2):
11833 CASE_FLT_FN (BUILT_IN_CBRT):
11834 CASE_FLT_FN (BUILT_IN_CEIL):
11835 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11836 CASE_FLT_FN (BUILT_IN_COPYSIGN):
11837 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11838 CASE_FLT_FN (BUILT_IN_COS):
11839 CASE_FLT_FN (BUILT_IN_COSH):
11840 CASE_FLT_FN (BUILT_IN_ERF):
11841 CASE_FLT_FN (BUILT_IN_ERFC):
11842 CASE_FLT_FN (BUILT_IN_EXP):
11843 CASE_FLT_FN (BUILT_IN_EXP2):
11844 CASE_FLT_FN (BUILT_IN_EXPM1):
11845 CASE_FLT_FN (BUILT_IN_FABS):
11846 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11847 CASE_FLT_FN (BUILT_IN_FDIM):
11848 CASE_FLT_FN (BUILT_IN_FLOOR):
11849 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11850 CASE_FLT_FN (BUILT_IN_FMA):
11851 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11852 CASE_FLT_FN (BUILT_IN_FMAX):
11853 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11854 CASE_FLT_FN (BUILT_IN_FMIN):
11855 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11856 CASE_FLT_FN (BUILT_IN_FMOD):
11857 CASE_FLT_FN (BUILT_IN_FREXP):
11858 CASE_FLT_FN (BUILT_IN_HYPOT):
11859 CASE_FLT_FN (BUILT_IN_ILOGB):
11860 CASE_FLT_FN (BUILT_IN_LDEXP):
11861 CASE_FLT_FN (BUILT_IN_LGAMMA):
11862 CASE_FLT_FN (BUILT_IN_LLRINT):
11863 CASE_FLT_FN (BUILT_IN_LLROUND):
11864 CASE_FLT_FN (BUILT_IN_LOG):
11865 CASE_FLT_FN (BUILT_IN_LOG10):
11866 CASE_FLT_FN (BUILT_IN_LOG1P):
11867 CASE_FLT_FN (BUILT_IN_LOG2):
11868 CASE_FLT_FN (BUILT_IN_LOGB):
11869 CASE_FLT_FN (BUILT_IN_LRINT):
11870 CASE_FLT_FN (BUILT_IN_LROUND):
11871 CASE_FLT_FN (BUILT_IN_MODF):
11872 CASE_FLT_FN (BUILT_IN_NAN):
11873 CASE_FLT_FN (BUILT_IN_NEARBYINT):
11874 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11875 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11876 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11877 CASE_FLT_FN (BUILT_IN_POW):
11878 CASE_FLT_FN (BUILT_IN_REMAINDER):
11879 CASE_FLT_FN (BUILT_IN_REMQUO):
11880 CASE_FLT_FN (BUILT_IN_RINT):
11881 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11882 CASE_FLT_FN (BUILT_IN_ROUND):
11883 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11884 CASE_FLT_FN (BUILT_IN_SCALBLN):
11885 CASE_FLT_FN (BUILT_IN_SCALBN):
11886 CASE_FLT_FN (BUILT_IN_SIN):
11887 CASE_FLT_FN (BUILT_IN_SINH):
11888 CASE_FLT_FN (BUILT_IN_SINCOS):
11889 CASE_FLT_FN (BUILT_IN_SQRT):
11890 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11891 CASE_FLT_FN (BUILT_IN_TAN):
11892 CASE_FLT_FN (BUILT_IN_TANH):
11893 CASE_FLT_FN (BUILT_IN_TGAMMA):
11894 CASE_FLT_FN (BUILT_IN_TRUNC):
11895 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11896 return true;
11897 default:
11898 break;
11899 }
11900 return false;
11901 }
11902