1 /* Expand builtin functions.
2    Copyright (C) 1988-2019 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* Legacy warning!  Please add no further builtin simplifications here
21    (apart from pure constant folding) - builtin simplifications should go
22    to match.pd or gimple-fold.c instead.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "params.h"
35 #include "tm_p.h"
36 #include "stringpool.h"
37 #include "tree-vrp.h"
38 #include "tree-ssanames.h"
39 #include "expmed.h"
40 #include "optabs.h"
41 #include "emit-rtl.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "alias.h"
45 #include "fold-const.h"
46 #include "fold-const-call.h"
47 #include "gimple-ssa-warn-restrict.h"
48 #include "stor-layout.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "tree-object-size.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename()  */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 
76 struct target_builtins default_target_builtins;
77 #if SWITCHABLE_TARGET
78 struct target_builtins *this_target_builtins = &default_target_builtins;
79 #endif
80 
81 /* Define the names of the builtin function types and codes.  */
82 const char *const built_in_class_names[BUILT_IN_LAST]
83   = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
84 
85 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
86 const char * built_in_names[(int) END_BUILTINS] =
87 {
88 #include "builtins.def"
89 };
90 
91 /* Setup an array of builtin_info_type, make sure each element decl is
92    initialized to NULL_TREE.  */
93 builtin_info_type builtin_info[(int)END_BUILTINS];
94 
95 /* Non-zero if __builtin_constant_p should be folded right away.  */
96 bool force_folding_builtin_constant_p;
97 
98 static int target_char_cast (tree, char *);
99 static rtx get_memory_rtx (tree, tree);
100 static int apply_args_size (void);
101 static int apply_result_size (void);
102 static rtx result_vector (int, rtx);
103 static void expand_builtin_prefetch (tree);
104 static rtx expand_builtin_apply_args (void);
105 static rtx expand_builtin_apply_args_1 (void);
106 static rtx expand_builtin_apply (rtx, rtx, rtx);
107 static void expand_builtin_return (rtx);
108 static enum type_class type_to_class (tree);
109 static rtx expand_builtin_classify_type (tree);
110 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
111 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
112 static rtx expand_builtin_interclass_mathfn (tree, rtx);
113 static rtx expand_builtin_sincos (tree);
114 static rtx expand_builtin_cexpi (tree, rtx);
115 static rtx expand_builtin_int_roundingfn (tree, rtx);
116 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
117 static rtx expand_builtin_next_arg (void);
118 static rtx expand_builtin_va_start (tree);
119 static rtx expand_builtin_va_end (tree);
120 static rtx expand_builtin_va_copy (tree);
121 static rtx inline_expand_builtin_string_cmp (tree, rtx);
122 static rtx expand_builtin_strcmp (tree, rtx);
123 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
124 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, scalar_int_mode);
125 static rtx expand_builtin_memchr (tree, rtx);
126 static rtx expand_builtin_memcpy (tree, rtx);
127 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
128 					    rtx target, tree exp,
129 					    memop_ret retmode);
130 static rtx expand_builtin_memmove (tree, rtx);
131 static rtx expand_builtin_mempcpy (tree, rtx);
132 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
133 static rtx expand_builtin_strcat (tree, rtx);
134 static rtx expand_builtin_strcpy (tree, rtx);
135 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
136 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
137 static rtx expand_builtin_stpncpy (tree, rtx);
138 static rtx expand_builtin_strncat (tree, rtx);
139 static rtx expand_builtin_strncpy (tree, rtx);
140 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, scalar_int_mode);
141 static rtx expand_builtin_memset (tree, rtx, machine_mode);
142 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
143 static rtx expand_builtin_bzero (tree);
144 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
145 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
146 static rtx expand_builtin_alloca (tree);
147 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
148 static rtx expand_builtin_frame_address (tree, tree);
149 static tree stabilize_va_list_loc (location_t, tree, int);
150 static rtx expand_builtin_expect (tree, rtx);
151 static rtx expand_builtin_expect_with_probability (tree, rtx);
152 static tree fold_builtin_constant_p (tree);
153 static tree fold_builtin_classify_type (tree);
154 static tree fold_builtin_strlen (location_t, tree, tree);
155 static tree fold_builtin_inf (location_t, tree, int);
156 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
157 static bool validate_arg (const_tree, enum tree_code code);
158 static rtx expand_builtin_fabs (tree, rtx, rtx);
159 static rtx expand_builtin_signbit (tree, rtx);
160 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
161 static tree fold_builtin_isascii (location_t, tree);
162 static tree fold_builtin_toascii (location_t, tree);
163 static tree fold_builtin_isdigit (location_t, tree);
164 static tree fold_builtin_fabs (location_t, tree, tree);
165 static tree fold_builtin_abs (location_t, tree, tree);
166 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
167 					enum tree_code);
168 static tree fold_builtin_0 (location_t, tree);
169 static tree fold_builtin_1 (location_t, tree, tree);
170 static tree fold_builtin_2 (location_t, tree, tree, tree);
171 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
172 static tree fold_builtin_varargs (location_t, tree, tree*, int);
173 
174 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
175 static tree fold_builtin_strspn (location_t, tree, tree);
176 static tree fold_builtin_strcspn (location_t, tree, tree);
177 
178 static rtx expand_builtin_object_size (tree);
179 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
180 				      enum built_in_function);
181 static void maybe_emit_chk_warning (tree, enum built_in_function);
182 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
183 static void maybe_emit_free_warning (tree);
184 static tree fold_builtin_object_size (tree, tree);
185 
186 unsigned HOST_WIDE_INT target_newline;
187 unsigned HOST_WIDE_INT target_percent;
188 static unsigned HOST_WIDE_INT target_c;
189 static unsigned HOST_WIDE_INT target_s;
190 char target_percent_c[3];
191 char target_percent_s[3];
192 char target_percent_s_newline[4];
193 static tree do_mpfr_remquo (tree, tree, tree);
194 static tree do_mpfr_lgamma_r (tree, tree, tree);
195 static void expand_builtin_sync_synchronize (void);
196 
197 /* Return true if NAME starts with __builtin_ or __sync_.  */
198 
199 static bool
is_builtin_name(const char * name)200 is_builtin_name (const char *name)
201 {
202   if (strncmp (name, "__builtin_", 10) == 0)
203     return true;
204   if (strncmp (name, "__sync_", 7) == 0)
205     return true;
206   if (strncmp (name, "__atomic_", 9) == 0)
207     return true;
208   return false;
209 }
210 
211 /* Return true if NODE should be considered for inline expansion regardless
212    of the optimization level.  This means whenever a function is invoked with
213    its "internal" name, which normally contains the prefix "__builtin".  */
214 
215 bool
called_as_built_in(tree node)216 called_as_built_in (tree node)
217 {
218   /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
219      we want the name used to call the function, not the name it
220      will have. */
221   const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
222   return is_builtin_name (name);
223 }
224 
225 /* Compute values M and N such that M divides (address of EXP - N) and such
226    that N < M.  If these numbers can be determined, store M in alignp and N in
227    *BITPOSP and return true.  Otherwise return false and store BITS_PER_UNIT to
228    *alignp and any bit-offset to *bitposp.
229 
230    Note that the address (and thus the alignment) computed here is based
231    on the address to which a symbol resolves, whereas DECL_ALIGN is based
232    on the address at which an object is actually located.  These two
233    addresses are not always the same.  For example, on ARM targets,
234    the address &foo of a Thumb function foo() has the lowest bit set,
235    whereas foo() itself starts on an even address.
236 
237    If ADDR_P is true we are taking the address of the memory reference EXP
238    and thus cannot rely on the access taking place.  */
239 
240 static bool
get_object_alignment_2(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp,bool addr_p)241 get_object_alignment_2 (tree exp, unsigned int *alignp,
242 			unsigned HOST_WIDE_INT *bitposp, bool addr_p)
243 {
244   poly_int64 bitsize, bitpos;
245   tree offset;
246   machine_mode mode;
247   int unsignedp, reversep, volatilep;
248   unsigned int align = BITS_PER_UNIT;
249   bool known_alignment = false;
250 
251   /* Get the innermost object and the constant (bitpos) and possibly
252      variable (offset) offset of the access.  */
253   exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
254 			     &unsignedp, &reversep, &volatilep);
255 
256   /* Extract alignment information from the innermost object and
257      possibly adjust bitpos and offset.  */
258   if (TREE_CODE (exp) == FUNCTION_DECL)
259     {
260       /* Function addresses can encode extra information besides their
261 	 alignment.  However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
262 	 allows the low bit to be used as a virtual bit, we know
263 	 that the address itself must be at least 2-byte aligned.  */
264       if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
265 	align = 2 * BITS_PER_UNIT;
266     }
267   else if (TREE_CODE (exp) == LABEL_DECL)
268     ;
269   else if (TREE_CODE (exp) == CONST_DECL)
270     {
271       /* The alignment of a CONST_DECL is determined by its initializer.  */
272       exp = DECL_INITIAL (exp);
273       align = TYPE_ALIGN (TREE_TYPE (exp));
274       if (CONSTANT_CLASS_P (exp))
275 	align = targetm.constant_alignment (exp, align);
276 
277       known_alignment = true;
278     }
279   else if (DECL_P (exp))
280     {
281       align = DECL_ALIGN (exp);
282       known_alignment = true;
283     }
284   else if (TREE_CODE (exp) == INDIRECT_REF
285 	   || TREE_CODE (exp) == MEM_REF
286 	   || TREE_CODE (exp) == TARGET_MEM_REF)
287     {
288       tree addr = TREE_OPERAND (exp, 0);
289       unsigned ptr_align;
290       unsigned HOST_WIDE_INT ptr_bitpos;
291       unsigned HOST_WIDE_INT ptr_bitmask = ~0;
292 
293       /* If the address is explicitely aligned, handle that.  */
294       if (TREE_CODE (addr) == BIT_AND_EXPR
295 	  && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
296 	{
297 	  ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
298 	  ptr_bitmask *= BITS_PER_UNIT;
299 	  align = least_bit_hwi (ptr_bitmask);
300 	  addr = TREE_OPERAND (addr, 0);
301 	}
302 
303       known_alignment
304 	= get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
305       align = MAX (ptr_align, align);
306 
307       /* Re-apply explicit alignment to the bitpos.  */
308       ptr_bitpos &= ptr_bitmask;
309 
310       /* The alignment of the pointer operand in a TARGET_MEM_REF
311 	 has to take the variable offset parts into account.  */
312       if (TREE_CODE (exp) == TARGET_MEM_REF)
313 	{
314 	  if (TMR_INDEX (exp))
315 	    {
316 	      unsigned HOST_WIDE_INT step = 1;
317 	      if (TMR_STEP (exp))
318 		step = TREE_INT_CST_LOW (TMR_STEP (exp));
319 	      align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
320 	    }
321 	  if (TMR_INDEX2 (exp))
322 	    align = BITS_PER_UNIT;
323 	  known_alignment = false;
324 	}
325 
326       /* When EXP is an actual memory reference then we can use
327 	 TYPE_ALIGN of a pointer indirection to derive alignment.
328 	 Do so only if get_pointer_alignment_1 did not reveal absolute
329 	 alignment knowledge and if using that alignment would
330 	 improve the situation.  */
331       unsigned int talign;
332       if (!addr_p && !known_alignment
333 	  && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
334 	  && talign > align)
335 	align = talign;
336       else
337 	{
338 	  /* Else adjust bitpos accordingly.  */
339 	  bitpos += ptr_bitpos;
340 	  if (TREE_CODE (exp) == MEM_REF
341 	      || TREE_CODE (exp) == TARGET_MEM_REF)
342 	    bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
343 	}
344     }
345   else if (TREE_CODE (exp) == STRING_CST)
346     {
347       /* STRING_CST are the only constant objects we allow to be not
348          wrapped inside a CONST_DECL.  */
349       align = TYPE_ALIGN (TREE_TYPE (exp));
350       if (CONSTANT_CLASS_P (exp))
351 	align = targetm.constant_alignment (exp, align);
352 
353       known_alignment = true;
354     }
355 
356   /* If there is a non-constant offset part extract the maximum
357      alignment that can prevail.  */
358   if (offset)
359     {
360       unsigned int trailing_zeros = tree_ctz (offset);
361       if (trailing_zeros < HOST_BITS_PER_INT)
362 	{
363 	  unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
364 	  if (inner)
365 	    align = MIN (align, inner);
366 	}
367     }
368 
369   /* Account for the alignment of runtime coefficients, so that the constant
370      bitpos is guaranteed to be accurate.  */
371   unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
372   if (alt_align != 0 && alt_align < align)
373     {
374       align = alt_align;
375       known_alignment = false;
376     }
377 
378   *alignp = align;
379   *bitposp = bitpos.coeffs[0] & (align - 1);
380   return known_alignment;
381 }
382 
383 /* For a memory reference expression EXP compute values M and N such that M
384    divides (&EXP - N) and such that N < M.  If these numbers can be determined,
385    store M in alignp and N in *BITPOSP and return true.  Otherwise return false
386    and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp.  */
387 
388 bool
get_object_alignment_1(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp)389 get_object_alignment_1 (tree exp, unsigned int *alignp,
390 			unsigned HOST_WIDE_INT *bitposp)
391 {
392   return get_object_alignment_2 (exp, alignp, bitposp, false);
393 }
394 
395 /* Return the alignment in bits of EXP, an object.  */
396 
397 unsigned int
get_object_alignment(tree exp)398 get_object_alignment (tree exp)
399 {
400   unsigned HOST_WIDE_INT bitpos = 0;
401   unsigned int align;
402 
403   get_object_alignment_1 (exp, &align, &bitpos);
404 
405   /* align and bitpos now specify known low bits of the pointer.
406      ptr & (align - 1) == bitpos.  */
407 
408   if (bitpos != 0)
409     align = least_bit_hwi (bitpos);
410   return align;
411 }
412 
413 /* For a pointer valued expression EXP compute values M and N such that M
414    divides (EXP - N) and such that N < M.  If these numbers can be determined,
415    store M in alignp and N in *BITPOSP and return true.  Return false if
416    the results are just a conservative approximation.
417 
418    If EXP is not a pointer, false is returned too.  */
419 
420 bool
get_pointer_alignment_1(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp)421 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 			 unsigned HOST_WIDE_INT *bitposp)
423 {
424   STRIP_NOPS (exp);
425 
426   if (TREE_CODE (exp) == ADDR_EXPR)
427     return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 				   alignp, bitposp, true);
429   else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
430     {
431       unsigned int align;
432       unsigned HOST_WIDE_INT bitpos;
433       bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 					  &align, &bitpos);
435       if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 	bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437       else
438 	{
439 	  unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 	  if (trailing_zeros < HOST_BITS_PER_INT)
441 	    {
442 	      unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 	      if (inner)
444 		align = MIN (align, inner);
445 	    }
446 	}
447       *alignp = align;
448       *bitposp = bitpos & (align - 1);
449       return res;
450     }
451   else if (TREE_CODE (exp) == SSA_NAME
452 	   && POINTER_TYPE_P (TREE_TYPE (exp)))
453     {
454       unsigned int ptr_align, ptr_misalign;
455       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
456 
457       if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
458 	{
459 	  *bitposp = ptr_misalign * BITS_PER_UNIT;
460 	  *alignp = ptr_align * BITS_PER_UNIT;
461 	  /* Make sure to return a sensible alignment when the multiplication
462 	     by BITS_PER_UNIT overflowed.  */
463 	  if (*alignp == 0)
464 	    *alignp = 1u << (HOST_BITS_PER_INT - 1);
465 	  /* We cannot really tell whether this result is an approximation.  */
466 	  return false;
467 	}
468       else
469 	{
470 	  *bitposp = 0;
471 	  *alignp = BITS_PER_UNIT;
472 	  return false;
473 	}
474     }
475   else if (TREE_CODE (exp) == INTEGER_CST)
476     {
477       *alignp = BIGGEST_ALIGNMENT;
478       *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
479 		  & (BIGGEST_ALIGNMENT - 1));
480       return true;
481     }
482 
483   *bitposp = 0;
484   *alignp = BITS_PER_UNIT;
485   return false;
486 }
487 
488 /* Return the alignment in bits of EXP, a pointer valued expression.
489    The alignment returned is, by default, the alignment of the thing that
490    EXP points to.  If it is not a POINTER_TYPE, 0 is returned.
491 
492    Otherwise, look at the expression to see if we can do better, i.e., if the
493    expression is actually pointing at an object whose alignment is tighter.  */
494 
495 unsigned int
get_pointer_alignment(tree exp)496 get_pointer_alignment (tree exp)
497 {
498   unsigned HOST_WIDE_INT bitpos = 0;
499   unsigned int align;
500 
501   get_pointer_alignment_1 (exp, &align, &bitpos);
502 
503   /* align and bitpos now specify known low bits of the pointer.
504      ptr & (align - 1) == bitpos.  */
505 
506   if (bitpos != 0)
507     align = least_bit_hwi (bitpos);
508 
509   return align;
510 }
511 
512 /* Return the number of leading non-zero elements in the sequence
513    [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
514    ELTSIZE must be a power of 2 less than 8.  Used by c_strlen.  */
515 
516 unsigned
string_length(const void * ptr,unsigned eltsize,unsigned maxelts)517 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
518 {
519   gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
520 
521   unsigned n;
522 
523   if (eltsize == 1)
524     {
525       /* Optimize the common case of plain char.  */
526       for (n = 0; n < maxelts; n++)
527 	{
528 	  const char *elt = (const char*) ptr + n;
529 	  if (!*elt)
530 	    break;
531 	}
532     }
533   else
534     {
535       for (n = 0; n < maxelts; n++)
536 	{
537 	  const char *elt = (const char*) ptr + n * eltsize;
538 	  if (!memcmp (elt, "\0\0\0\0", eltsize))
539 	    break;
540 	}
541     }
542   return n;
543 }
544 
545 /* For a call at LOC to a function FN that expects a string in the argument
546    ARG, issue a diagnostic due to it being a called with an argument
547    declared at NONSTR that is a character array with no terminating NUL.  */
548 
549 void
warn_string_no_nul(location_t loc,const char * fn,tree arg,tree decl)550 warn_string_no_nul (location_t loc, const char *fn, tree arg, tree decl)
551 {
552   if (TREE_NO_WARNING (arg))
553     return;
554 
555   loc = expansion_point_location_if_in_system_header (loc);
556 
557   if (warning_at (loc, OPT_Wstringop_overflow_,
558 		  "%qs argument missing terminating nul", fn))
559     {
560       inform (DECL_SOURCE_LOCATION (decl),
561 	      "referenced argument declared here");
562       TREE_NO_WARNING (arg) = 1;
563     }
564 }
565 
566 /* If EXP refers to an unterminated constant character array return
567    the declaration of the object of which the array is a member or
568    element and if SIZE is not null, set *SIZE to the size of
569    the unterminated array and set *EXACT if the size is exact or
570    clear it otherwise.  Otherwise return null.  */
571 
572 tree
unterminated_array(tree exp,tree * size,bool * exact)573 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
574 {
575   /* C_STRLEN will return NULL and set DECL in the info
576      structure if EXP references a unterminated array.  */
577   c_strlen_data lendata = { };
578   tree len = c_strlen (exp, 1, &lendata);
579   if (len == NULL_TREE && lendata.minlen && lendata.decl)
580      {
581        if (size)
582 	{
583 	  len = lendata.minlen;
584 	  if (lendata.off)
585 	    {
586 	      /* Constant offsets are already accounted for in LENDATA.MINLEN,
587 		 but not in a SSA_NAME + CST expression.  */
588 	      if (TREE_CODE (lendata.off) == INTEGER_CST)
589 		*exact = true;
590 	      else if (TREE_CODE (lendata.off) == PLUS_EXPR
591 		       && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
592 		{
593 		  /* Subtract the offset from the size of the array.  */
594 		  *exact = false;
595 		  tree temp = TREE_OPERAND (lendata.off, 1);
596 		  temp = fold_convert (ssizetype, temp);
597 		  len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
598 		}
599 	      else
600 		*exact = false;
601 	    }
602 	  else
603 	    *exact = true;
604 
605 	  *size = len;
606 	}
607        return lendata.decl;
608      }
609 
610   return NULL_TREE;
611 }
612 
613 /* Compute the length of a null-terminated character string or wide
614    character string handling character sizes of 1, 2, and 4 bytes.
615    TREE_STRING_LENGTH is not the right way because it evaluates to
616    the size of the character array in bytes (as opposed to characters)
617    and because it can contain a zero byte in the middle.
618 
619    ONLY_VALUE should be nonzero if the result is not going to be emitted
620    into the instruction stream and zero if it is going to be expanded.
621    E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
622    is returned, otherwise NULL, since
623    len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
624    evaluate the side-effects.
625 
626    If ONLY_VALUE is two then we do not emit warnings about out-of-bound
627    accesses.  Note that this implies the result is not going to be emitted
628    into the instruction stream.
629 
630    Additional information about the string accessed may be recorded
631    in DATA.  For example, if SRC references an unterminated string,
632    then the declaration will be stored in the DECL field.   If the
633    length of the unterminated string can be determined, it'll be
634    stored in the LEN field.  Note this length could well be different
635    than what a C strlen call would return.
636 
637    ELTSIZE is 1 for normal single byte character strings, and 2 or
638    4 for wide characer strings.  ELTSIZE is by default 1.
639 
640    The value returned is of type `ssizetype'.  */
641 
642 tree
c_strlen(tree src,int only_value,c_strlen_data * data,unsigned eltsize)643 c_strlen (tree src, int only_value, c_strlen_data *data, unsigned eltsize)
644 {
645   /* If we were not passed a DATA pointer, then get one to a local
646      structure.  That avoids having to check DATA for NULL before
647      each time we want to use it.  */
648   c_strlen_data local_strlen_data = { };
649   if (!data)
650     data = &local_strlen_data;
651 
652   gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
653   STRIP_NOPS (src);
654   if (TREE_CODE (src) == COND_EXPR
655       && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
656     {
657       tree len1, len2;
658 
659       len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
660       len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
661       if (tree_int_cst_equal (len1, len2))
662 	return len1;
663     }
664 
665   if (TREE_CODE (src) == COMPOUND_EXPR
666       && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
667     return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
668 
669   location_t loc = EXPR_LOC_OR_LOC (src, input_location);
670 
671   /* Offset from the beginning of the string in bytes.  */
672   tree byteoff;
673   tree memsize;
674   tree decl;
675   src = string_constant (src, &byteoff, &memsize, &decl);
676   if (src == 0)
677     return NULL_TREE;
678 
679   /* Determine the size of the string element.  */
680   if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
681     return NULL_TREE;
682 
683   /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
684      length of SRC.  Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
685      in case the latter is less than the size of the array, such as when
686      SRC refers to a short string literal used to initialize a large array.
687      In that case, the elements of the array after the terminating NUL are
688      all NUL.  */
689   HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
690   strelts = strelts / eltsize;
691 
692   if (!tree_fits_uhwi_p (memsize))
693     return NULL_TREE;
694 
695   HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
696 
697   /* PTR can point to the byte representation of any string type, including
698      char* and wchar_t*.  */
699   const char *ptr = TREE_STRING_POINTER (src);
700 
701   if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
702     {
703       /* The code below works only for single byte character types.  */
704       if (eltsize != 1)
705 	return NULL_TREE;
706 
707       /* If the string has an internal NUL character followed by any
708 	 non-NUL characters (e.g., "foo\0bar"), we can't compute
709 	 the offset to the following NUL if we don't know where to
710 	 start searching for it.  */
711       unsigned len = string_length (ptr, eltsize, strelts);
712 
713       /* Return when an embedded null character is found or none at all.
714 	 In the latter case, set the DECL/LEN field in the DATA structure
715 	 so that callers may examine them.  */
716       if (len + 1 < strelts)
717 	return NULL_TREE;
718       else if (len >= maxelts)
719 	{
720 	  data->decl = decl;
721 	  data->off = byteoff;
722 	  data->minlen = ssize_int (len);
723 	  return NULL_TREE;
724 	}
725 
726       /* For empty strings the result should be zero.  */
727       if (len == 0)
728 	return ssize_int (0);
729 
730       /* We don't know the starting offset, but we do know that the string
731 	 has no internal zero bytes.  If the offset falls within the bounds
732 	 of the string subtract the offset from the length of the string,
733 	 and return that.  Otherwise the length is zero.  Take care to
734 	 use SAVE_EXPR in case the OFFSET has side-effects.  */
735       tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
736 						 : byteoff;
737       offsave = fold_convert_loc (loc, sizetype, offsave);
738       tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
739 				      size_int (len));
740       tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
741 				     offsave);
742       lenexp = fold_convert_loc (loc, ssizetype, lenexp);
743       return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
744 			      build_zero_cst (ssizetype));
745     }
746 
747   /* Offset from the beginning of the string in elements.  */
748   HOST_WIDE_INT eltoff;
749 
750   /* We have a known offset into the string.  Start searching there for
751      a null character if we can represent it as a single HOST_WIDE_INT.  */
752   if (byteoff == 0)
753     eltoff = 0;
754   else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
755     eltoff = -1;
756   else
757     eltoff = tree_to_uhwi (byteoff) / eltsize;
758 
759   /* If the offset is known to be out of bounds, warn, and call strlen at
760      runtime.  */
761   if (eltoff < 0 || eltoff >= maxelts)
762     {
763       /* Suppress multiple warnings for propagated constant strings.  */
764       if (only_value != 2
765 	  && !TREE_NO_WARNING (src)
766 	  && warning_at (loc, OPT_Warray_bounds,
767 			 "offset %qwi outside bounds of constant string",
768 			 eltoff))
769 	TREE_NO_WARNING (src) = 1;
770       return NULL_TREE;
771     }
772 
773   /* If eltoff is larger than strelts but less than maxelts the
774      string length is zero, since the excess memory will be zero.  */
775   if (eltoff > strelts)
776     return ssize_int (0);
777 
778   /* Use strlen to search for the first zero byte.  Since any strings
779      constructed with build_string will have nulls appended, we win even
780      if we get handed something like (char[4])"abcd".
781 
782      Since ELTOFF is our starting index into the string, no further
783      calculation is needed.  */
784   unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
785 				strelts - eltoff);
786 
787   /* Don't know what to return if there was no zero termination.
788      Ideally this would turn into a gcc_checking_assert over time.
789      Set DECL/LEN so callers can examine them.  */
790   if (len >= maxelts - eltoff)
791     {
792       data->decl = decl;
793       data->off = byteoff;
794       data->minlen = ssize_int (len);
795       return NULL_TREE;
796     }
797 
798   return ssize_int (len);
799 }
800 
801 /* Return a constant integer corresponding to target reading
802    GET_MODE_BITSIZE (MODE) bits from string constant STR.  If
803    NULL_TERMINATED_P, reading stops after '\0' character, all further ones
804    are assumed to be zero, otherwise it reads as many characters
805    as needed.  */
806 
807 rtx
c_readstr(const char * str,scalar_int_mode mode,bool null_terminated_p)808 c_readstr (const char *str, scalar_int_mode mode,
809 	   bool null_terminated_p/*=true*/)
810 {
811   HOST_WIDE_INT ch;
812   unsigned int i, j;
813   HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
814 
815   gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
816   unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
817     / HOST_BITS_PER_WIDE_INT;
818 
819   gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
820   for (i = 0; i < len; i++)
821     tmp[i] = 0;
822 
823   ch = 1;
824   for (i = 0; i < GET_MODE_SIZE (mode); i++)
825     {
826       j = i;
827       if (WORDS_BIG_ENDIAN)
828 	j = GET_MODE_SIZE (mode) - i - 1;
829       if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
830 	  && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
831 	j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
832       j *= BITS_PER_UNIT;
833 
834       if (ch || !null_terminated_p)
835 	ch = (unsigned char) str[i];
836       tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
837     }
838 
839   wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
840   return immed_wide_int_const (c, mode);
841 }
842 
843 /* Cast a target constant CST to target CHAR and if that value fits into
844    host char type, return zero and put that value into variable pointed to by
845    P.  */
846 
847 static int
target_char_cast(tree cst,char * p)848 target_char_cast (tree cst, char *p)
849 {
850   unsigned HOST_WIDE_INT val, hostval;
851 
852   if (TREE_CODE (cst) != INTEGER_CST
853       || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
854     return 1;
855 
856   /* Do not care if it fits or not right here.  */
857   val = TREE_INT_CST_LOW (cst);
858 
859   if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
860     val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
861 
862   hostval = val;
863   if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
864     hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
865 
866   if (val != hostval)
867     return 1;
868 
869   *p = hostval;
870   return 0;
871 }
872 
873 /* Similar to save_expr, but assumes that arbitrary code is not executed
874    in between the multiple evaluations.  In particular, we assume that a
875    non-addressable local variable will not be modified.  */
876 
877 static tree
builtin_save_expr(tree exp)878 builtin_save_expr (tree exp)
879 {
880   if (TREE_CODE (exp) == SSA_NAME
881       || (TREE_ADDRESSABLE (exp) == 0
882 	  && (TREE_CODE (exp) == PARM_DECL
883 	      || (VAR_P (exp) && !TREE_STATIC (exp)))))
884     return exp;
885 
886   return save_expr (exp);
887 }
888 
889 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
890    times to get the address of either a higher stack frame, or a return
891    address located within it (depending on FNDECL_CODE).  */
892 
893 static rtx
expand_builtin_return_addr(enum built_in_function fndecl_code,int count)894 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
895 {
896   int i;
897   rtx tem = INITIAL_FRAME_ADDRESS_RTX;
898   if (tem == NULL_RTX)
899     {
900       /* For a zero count with __builtin_return_address, we don't care what
901 	 frame address we return, because target-specific definitions will
902 	 override us.  Therefore frame pointer elimination is OK, and using
903 	 the soft frame pointer is OK.
904 
905 	 For a nonzero count, or a zero count with __builtin_frame_address,
906 	 we require a stable offset from the current frame pointer to the
907 	 previous one, so we must use the hard frame pointer, and
908 	 we must disable frame pointer elimination.  */
909       if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
910 	tem = frame_pointer_rtx;
911       else
912 	{
913 	  tem = hard_frame_pointer_rtx;
914 
915 	  /* Tell reload not to eliminate the frame pointer.  */
916 	  crtl->accesses_prior_frames = 1;
917 	}
918     }
919 
920   if (count > 0)
921     SETUP_FRAME_ADDRESSES ();
922 
923   /* On the SPARC, the return address is not in the frame, it is in a
924      register.  There is no way to access it off of the current frame
925      pointer, but it can be accessed off the previous frame pointer by
926      reading the value from the register window save area.  */
927   if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
928     count--;
929 
930   /* Scan back COUNT frames to the specified frame.  */
931   for (i = 0; i < count; i++)
932     {
933       /* Assume the dynamic chain pointer is in the word that the
934 	 frame address points to, unless otherwise specified.  */
935       tem = DYNAMIC_CHAIN_ADDRESS (tem);
936       tem = memory_address (Pmode, tem);
937       tem = gen_frame_mem (Pmode, tem);
938       tem = copy_to_reg (tem);
939     }
940 
941   /* For __builtin_frame_address, return what we've got.  But, on
942      the SPARC for example, we may have to add a bias.  */
943   if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
944     return FRAME_ADDR_RTX (tem);
945 
946   /* For __builtin_return_address, get the return address from that frame.  */
947 #ifdef RETURN_ADDR_RTX
948   tem = RETURN_ADDR_RTX (count, tem);
949 #else
950   tem = memory_address (Pmode,
951 			plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
952   tem = gen_frame_mem (Pmode, tem);
953 #endif
954   return tem;
955 }
956 
957 /* Alias set used for setjmp buffer.  */
958 static alias_set_type setjmp_alias_set = -1;
959 
960 /* Construct the leading half of a __builtin_setjmp call.  Control will
961    return to RECEIVER_LABEL.  This is also called directly by the SJLJ
962    exception handling code.  */
963 
964 void
expand_builtin_setjmp_setup(rtx buf_addr,rtx receiver_label)965 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
966 {
967   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
968   rtx stack_save;
969   rtx mem;
970 
971   if (setjmp_alias_set == -1)
972     setjmp_alias_set = new_alias_set ();
973 
974   buf_addr = convert_memory_address (Pmode, buf_addr);
975 
976   buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
977 
978   /* We store the frame pointer and the address of receiver_label in
979      the buffer and use the rest of it for the stack save area, which
980      is machine-dependent.  */
981 
982   mem = gen_rtx_MEM (Pmode, buf_addr);
983   set_mem_alias_set (mem, setjmp_alias_set);
984   emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
985 
986   mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
987 					   GET_MODE_SIZE (Pmode))),
988   set_mem_alias_set (mem, setjmp_alias_set);
989 
990   emit_move_insn (validize_mem (mem),
991 		  force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
992 
993   stack_save = gen_rtx_MEM (sa_mode,
994 			    plus_constant (Pmode, buf_addr,
995 					   2 * GET_MODE_SIZE (Pmode)));
996   set_mem_alias_set (stack_save, setjmp_alias_set);
997   emit_stack_save (SAVE_NONLOCAL, &stack_save);
998 
999   /* If there is further processing to do, do it.  */
1000   if (targetm.have_builtin_setjmp_setup ())
1001     emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1002 
1003   /* We have a nonlocal label.   */
1004   cfun->has_nonlocal_label = 1;
1005 }
1006 
1007 /* Construct the trailing part of a __builtin_setjmp call.  This is
1008    also called directly by the SJLJ exception handling code.
1009    If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler.  */
1010 
1011 void
expand_builtin_setjmp_receiver(rtx receiver_label)1012 expand_builtin_setjmp_receiver (rtx receiver_label)
1013 {
1014   rtx chain;
1015 
1016   /* Mark the FP as used when we get here, so we have to make sure it's
1017      marked as used by this function.  */
1018   emit_use (hard_frame_pointer_rtx);
1019 
1020   /* Mark the static chain as clobbered here so life information
1021      doesn't get messed up for it.  */
1022   chain = rtx_for_static_chain (current_function_decl, true);
1023   if (chain && REG_P (chain))
1024     emit_clobber (chain);
1025 
1026   /* Now put in the code to restore the frame pointer, and argument
1027      pointer, if needed.  */
1028   if (! targetm.have_nonlocal_goto ())
1029     {
1030       /* First adjust our frame pointer to its actual value.  It was
1031 	 previously set to the start of the virtual area corresponding to
1032 	 the stacked variables when we branched here and now needs to be
1033 	 adjusted to the actual hardware fp value.
1034 
1035 	 Assignments to virtual registers are converted by
1036 	 instantiate_virtual_regs into the corresponding assignment
1037 	 to the underlying register (fp in this case) that makes
1038 	 the original assignment true.
1039 	 So the following insn will actually be decrementing fp by
1040 	 TARGET_STARTING_FRAME_OFFSET.  */
1041       emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
1042 
1043       /* Restoring the frame pointer also modifies the hard frame pointer.
1044 	 Mark it used (so that the previous assignment remains live once
1045 	 the frame pointer is eliminated) and clobbered (to represent the
1046 	 implicit update from the assignment).  */
1047       emit_use (hard_frame_pointer_rtx);
1048       emit_clobber (hard_frame_pointer_rtx);
1049     }
1050 
1051   if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1052     {
1053       /* If the argument pointer can be eliminated in favor of the
1054 	 frame pointer, we don't need to restore it.  We assume here
1055 	 that if such an elimination is present, it can always be used.
1056 	 This is the case on all known machines; if we don't make this
1057 	 assumption, we do unnecessary saving on many machines.  */
1058       size_t i;
1059       static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1060 
1061       for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1062 	if (elim_regs[i].from == ARG_POINTER_REGNUM
1063 	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1064 	  break;
1065 
1066       if (i == ARRAY_SIZE (elim_regs))
1067 	{
1068 	  /* Now restore our arg pointer from the address at which it
1069 	     was saved in our stack frame.  */
1070 	  emit_move_insn (crtl->args.internal_arg_pointer,
1071 			  copy_to_reg (get_arg_pointer_save_area ()));
1072 	}
1073     }
1074 
1075   if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1076     emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1077   else if (targetm.have_nonlocal_goto_receiver ())
1078     emit_insn (targetm.gen_nonlocal_goto_receiver ());
1079   else
1080     { /* Nothing */ }
1081 
1082   /* We must not allow the code we just generated to be reordered by
1083      scheduling.  Specifically, the update of the frame pointer must
1084      happen immediately, not later.  */
1085   emit_insn (gen_blockage ());
1086 }
1087 
1088 /* __builtin_longjmp is passed a pointer to an array of five words (not
1089    all will be used on all machines).  It operates similarly to the C
1090    library function of the same name, but is more efficient.  Much of
1091    the code below is copied from the handling of non-local gotos.  */
1092 
1093 static void
expand_builtin_longjmp(rtx buf_addr,rtx value)1094 expand_builtin_longjmp (rtx buf_addr, rtx value)
1095 {
1096   rtx fp, lab, stack;
1097   rtx_insn *insn, *last;
1098   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1099 
1100   /* DRAP is needed for stack realign if longjmp is expanded to current
1101      function  */
1102   if (SUPPORTS_STACK_ALIGNMENT)
1103     crtl->need_drap = true;
1104 
1105   if (setjmp_alias_set == -1)
1106     setjmp_alias_set = new_alias_set ();
1107 
1108   buf_addr = convert_memory_address (Pmode, buf_addr);
1109 
1110   buf_addr = force_reg (Pmode, buf_addr);
1111 
1112   /* We require that the user must pass a second argument of 1, because
1113      that is what builtin_setjmp will return.  */
1114   gcc_assert (value == const1_rtx);
1115 
1116   last = get_last_insn ();
1117   if (targetm.have_builtin_longjmp ())
1118     emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1119   else
1120     {
1121       fp = gen_rtx_MEM (Pmode, buf_addr);
1122       lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1123 					       GET_MODE_SIZE (Pmode)));
1124 
1125       stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1126 						   2 * GET_MODE_SIZE (Pmode)));
1127       set_mem_alias_set (fp, setjmp_alias_set);
1128       set_mem_alias_set (lab, setjmp_alias_set);
1129       set_mem_alias_set (stack, setjmp_alias_set);
1130 
1131       /* Pick up FP, label, and SP from the block and jump.  This code is
1132 	 from expand_goto in stmt.c; see there for detailed comments.  */
1133       if (targetm.have_nonlocal_goto ())
1134 	/* We have to pass a value to the nonlocal_goto pattern that will
1135 	   get copied into the static_chain pointer, but it does not matter
1136 	   what that value is, because builtin_setjmp does not use it.  */
1137 	emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1138       else
1139 	{
1140 	  lab = copy_to_reg (lab);
1141 
1142 	  emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1143 	  emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1144 
1145 	  /* Restore the frame pointer and stack pointer.  We must use a
1146 	     temporary since the setjmp buffer may be a local.  */
1147 	  fp = copy_to_reg (fp);
1148 	  emit_stack_restore (SAVE_NONLOCAL, stack);
1149 	  emit_move_insn (hard_frame_pointer_rtx, fp);
1150 
1151 	  emit_use (hard_frame_pointer_rtx);
1152 	  emit_use (stack_pointer_rtx);
1153 	  emit_indirect_jump (lab);
1154 	}
1155     }
1156 
1157   /* Search backwards and mark the jump insn as a non-local goto.
1158      Note that this precludes the use of __builtin_longjmp to a
1159      __builtin_setjmp target in the same function.  However, we've
1160      already cautioned the user that these functions are for
1161      internal exception handling use only.  */
1162   for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1163     {
1164       gcc_assert (insn != last);
1165 
1166       if (JUMP_P (insn))
1167 	{
1168 	  add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1169 	  break;
1170 	}
1171       else if (CALL_P (insn))
1172 	break;
1173     }
1174 }
1175 
1176 static inline bool
more_const_call_expr_args_p(const const_call_expr_arg_iterator * iter)1177 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1178 {
1179   return (iter->i < iter->n);
1180 }
1181 
1182 /* This function validates the types of a function call argument list
1183    against a specified list of tree_codes.  If the last specifier is a 0,
1184    that represents an ellipsis, otherwise the last specifier must be a
1185    VOID_TYPE.  */
1186 
1187 static bool
validate_arglist(const_tree callexpr,...)1188 validate_arglist (const_tree callexpr, ...)
1189 {
1190   enum tree_code code;
1191   bool res = 0;
1192   va_list ap;
1193   const_call_expr_arg_iterator iter;
1194   const_tree arg;
1195 
1196   va_start (ap, callexpr);
1197   init_const_call_expr_arg_iterator (callexpr, &iter);
1198 
1199   /* Get a bitmap of pointer argument numbers declared attribute nonnull.  */
1200   tree fn = CALL_EXPR_FN (callexpr);
1201   bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1202 
1203   for (unsigned argno = 1; ; ++argno)
1204     {
1205       code = (enum tree_code) va_arg (ap, int);
1206 
1207       switch (code)
1208 	{
1209 	case 0:
1210 	  /* This signifies an ellipses, any further arguments are all ok.  */
1211 	  res = true;
1212 	  goto end;
1213 	case VOID_TYPE:
1214 	  /* This signifies an endlink, if no arguments remain, return
1215 	     true, otherwise return false.  */
1216 	  res = !more_const_call_expr_args_p (&iter);
1217 	  goto end;
1218 	case POINTER_TYPE:
1219 	  /* The actual argument must be nonnull when either the whole
1220 	     called function has been declared nonnull, or when the formal
1221 	     argument corresponding to the actual argument has been.  */
1222 	  if (argmap
1223 	      && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1224 	    {
1225 	      arg = next_const_call_expr_arg (&iter);
1226 	      if (!validate_arg (arg, code) || integer_zerop (arg))
1227 		goto end;
1228 	      break;
1229 	    }
1230 	  /* FALLTHRU */
1231 	default:
1232 	  /* If no parameters remain or the parameter's code does not
1233 	     match the specified code, return false.  Otherwise continue
1234 	     checking any remaining arguments.  */
1235 	  arg = next_const_call_expr_arg (&iter);
1236 	  if (!validate_arg (arg, code))
1237 	    goto end;
1238 	  break;
1239 	}
1240     }
1241 
1242   /* We need gotos here since we can only have one VA_CLOSE in a
1243      function.  */
1244  end: ;
1245   va_end (ap);
1246 
1247   BITMAP_FREE (argmap);
1248 
1249   return res;
1250 }
1251 
1252 /* Expand a call to __builtin_nonlocal_goto.  We're passed the target label
1253    and the address of the save area.  */
1254 
1255 static rtx
expand_builtin_nonlocal_goto(tree exp)1256 expand_builtin_nonlocal_goto (tree exp)
1257 {
1258   tree t_label, t_save_area;
1259   rtx r_label, r_save_area, r_fp, r_sp;
1260   rtx_insn *insn;
1261 
1262   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1263     return NULL_RTX;
1264 
1265   t_label = CALL_EXPR_ARG (exp, 0);
1266   t_save_area = CALL_EXPR_ARG (exp, 1);
1267 
1268   r_label = expand_normal (t_label);
1269   r_label = convert_memory_address (Pmode, r_label);
1270   r_save_area = expand_normal (t_save_area);
1271   r_save_area = convert_memory_address (Pmode, r_save_area);
1272   /* Copy the address of the save location to a register just in case it was
1273      based on the frame pointer.   */
1274   r_save_area = copy_to_reg (r_save_area);
1275   r_fp = gen_rtx_MEM (Pmode, r_save_area);
1276   r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1277 		      plus_constant (Pmode, r_save_area,
1278 				     GET_MODE_SIZE (Pmode)));
1279 
1280   crtl->has_nonlocal_goto = 1;
1281 
1282   /* ??? We no longer need to pass the static chain value, afaik.  */
1283   if (targetm.have_nonlocal_goto ())
1284     emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1285   else
1286     {
1287       r_label = copy_to_reg (r_label);
1288 
1289       emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1290       emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1291 
1292       /* Restore the frame pointer and stack pointer.  We must use a
1293 	 temporary since the setjmp buffer may be a local.  */
1294       r_fp = copy_to_reg (r_fp);
1295       emit_stack_restore (SAVE_NONLOCAL, r_sp);
1296       emit_move_insn (hard_frame_pointer_rtx, r_fp);
1297 
1298       /* USE of hard_frame_pointer_rtx added for consistency;
1299 	 not clear if really needed.  */
1300       emit_use (hard_frame_pointer_rtx);
1301       emit_use (stack_pointer_rtx);
1302 
1303       /* If the architecture is using a GP register, we must
1304 	 conservatively assume that the target function makes use of it.
1305 	 The prologue of functions with nonlocal gotos must therefore
1306 	 initialize the GP register to the appropriate value, and we
1307 	 must then make sure that this value is live at the point
1308 	 of the jump.  (Note that this doesn't necessarily apply
1309 	 to targets with a nonlocal_goto pattern; they are free
1310 	 to implement it in their own way.  Note also that this is
1311 	 a no-op if the GP register is a global invariant.)  */
1312       unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1313       if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1314 	emit_use (pic_offset_table_rtx);
1315 
1316       emit_indirect_jump (r_label);
1317     }
1318 
1319   /* Search backwards to the jump insn and mark it as a
1320      non-local goto.  */
1321   for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1322     {
1323       if (JUMP_P (insn))
1324 	{
1325 	  add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1326 	  break;
1327 	}
1328       else if (CALL_P (insn))
1329 	break;
1330     }
1331 
1332   return const0_rtx;
1333 }
1334 
1335 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1336    (not all will be used on all machines) that was passed to __builtin_setjmp.
1337    It updates the stack pointer in that block to the current value.  This is
1338    also called directly by the SJLJ exception handling code.  */
1339 
1340 void
expand_builtin_update_setjmp_buf(rtx buf_addr)1341 expand_builtin_update_setjmp_buf (rtx buf_addr)
1342 {
1343   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1344   buf_addr = convert_memory_address (Pmode, buf_addr);
1345   rtx stack_save
1346     = gen_rtx_MEM (sa_mode,
1347 		   memory_address
1348 		   (sa_mode,
1349 		    plus_constant (Pmode, buf_addr,
1350 				   2 * GET_MODE_SIZE (Pmode))));
1351 
1352   emit_stack_save (SAVE_NONLOCAL, &stack_save);
1353 }
1354 
1355 /* Expand a call to __builtin_prefetch.  For a target that does not support
1356    data prefetch, evaluate the memory address argument in case it has side
1357    effects.  */
1358 
1359 static void
expand_builtin_prefetch(tree exp)1360 expand_builtin_prefetch (tree exp)
1361 {
1362   tree arg0, arg1, arg2;
1363   int nargs;
1364   rtx op0, op1, op2;
1365 
1366   if (!validate_arglist (exp, POINTER_TYPE, 0))
1367     return;
1368 
1369   arg0 = CALL_EXPR_ARG (exp, 0);
1370 
1371   /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1372      zero (read) and argument 2 (locality) defaults to 3 (high degree of
1373      locality).  */
1374   nargs = call_expr_nargs (exp);
1375   if (nargs > 1)
1376     arg1 = CALL_EXPR_ARG (exp, 1);
1377   else
1378     arg1 = integer_zero_node;
1379   if (nargs > 2)
1380     arg2 = CALL_EXPR_ARG (exp, 2);
1381   else
1382     arg2 = integer_three_node;
1383 
1384   /* Argument 0 is an address.  */
1385   op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1386 
1387   /* Argument 1 (read/write flag) must be a compile-time constant int.  */
1388   if (TREE_CODE (arg1) != INTEGER_CST)
1389     {
1390       error ("second argument to %<__builtin_prefetch%> must be a constant");
1391       arg1 = integer_zero_node;
1392     }
1393   op1 = expand_normal (arg1);
1394   /* Argument 1 must be either zero or one.  */
1395   if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1396     {
1397       warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1398 	       " using zero");
1399       op1 = const0_rtx;
1400     }
1401 
1402   /* Argument 2 (locality) must be a compile-time constant int.  */
1403   if (TREE_CODE (arg2) != INTEGER_CST)
1404     {
1405       error ("third argument to %<__builtin_prefetch%> must be a constant");
1406       arg2 = integer_zero_node;
1407     }
1408   op2 = expand_normal (arg2);
1409   /* Argument 2 must be 0, 1, 2, or 3.  */
1410   if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1411     {
1412       warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1413       op2 = const0_rtx;
1414     }
1415 
1416   if (targetm.have_prefetch ())
1417     {
1418       struct expand_operand ops[3];
1419 
1420       create_address_operand (&ops[0], op0);
1421       create_integer_operand (&ops[1], INTVAL (op1));
1422       create_integer_operand (&ops[2], INTVAL (op2));
1423       if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1424 	return;
1425     }
1426 
1427   /* Don't do anything with direct references to volatile memory, but
1428      generate code to handle other side effects.  */
1429   if (!MEM_P (op0) && side_effects_p (op0))
1430     emit_insn (op0);
1431 }
1432 
1433 /* Get a MEM rtx for expression EXP which is the address of an operand
1434    to be used in a string instruction (cmpstrsi, movmemsi, ..).  LEN is
1435    the maximum length of the block of memory that might be accessed or
1436    NULL if unknown.  */
1437 
1438 static rtx
get_memory_rtx(tree exp,tree len)1439 get_memory_rtx (tree exp, tree len)
1440 {
1441   tree orig_exp = exp;
1442   rtx addr, mem;
1443 
1444   /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1445      from its expression, for expr->a.b only <variable>.a.b is recorded.  */
1446   if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1447     exp = TREE_OPERAND (exp, 0);
1448 
1449   addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1450   mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1451 
1452   /* Get an expression we can use to find the attributes to assign to MEM.
1453      First remove any nops.  */
1454   while (CONVERT_EXPR_P (exp)
1455 	 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1456     exp = TREE_OPERAND (exp, 0);
1457 
1458   /* Build a MEM_REF representing the whole accessed area as a byte blob,
1459      (as builtin stringops may alias with anything).  */
1460   exp = fold_build2 (MEM_REF,
1461 		     build_array_type (char_type_node,
1462 				       build_range_type (sizetype,
1463 							 size_one_node, len)),
1464 		     exp, build_int_cst (ptr_type_node, 0));
1465 
1466   /* If the MEM_REF has no acceptable address, try to get the base object
1467      from the original address we got, and build an all-aliasing
1468      unknown-sized access to that one.  */
1469   if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1470     set_mem_attributes (mem, exp, 0);
1471   else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1472 	   && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1473 						     0))))
1474     {
1475       exp = build_fold_addr_expr (exp);
1476       exp = fold_build2 (MEM_REF,
1477 			 build_array_type (char_type_node,
1478 					   build_range_type (sizetype,
1479 							     size_zero_node,
1480 							     NULL)),
1481 			 exp, build_int_cst (ptr_type_node, 0));
1482       set_mem_attributes (mem, exp, 0);
1483     }
1484   set_mem_alias_set (mem, 0);
1485   return mem;
1486 }
1487 
1488 /* Built-in functions to perform an untyped call and return.  */
1489 
1490 #define apply_args_mode \
1491   (this_target_builtins->x_apply_args_mode)
1492 #define apply_result_mode \
1493   (this_target_builtins->x_apply_result_mode)
1494 
1495 /* Return the size required for the block returned by __builtin_apply_args,
1496    and initialize apply_args_mode.  */
1497 
1498 static int
apply_args_size(void)1499 apply_args_size (void)
1500 {
1501   static int size = -1;
1502   int align;
1503   unsigned int regno;
1504 
1505   /* The values computed by this function never change.  */
1506   if (size < 0)
1507     {
1508       /* The first value is the incoming arg-pointer.  */
1509       size = GET_MODE_SIZE (Pmode);
1510 
1511       /* The second value is the structure value address unless this is
1512 	 passed as an "invisible" first argument.  */
1513       if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1514 	size += GET_MODE_SIZE (Pmode);
1515 
1516       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1517 	if (FUNCTION_ARG_REGNO_P (regno))
1518 	  {
1519 	    fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1520 
1521 	    gcc_assert (mode != VOIDmode);
1522 
1523 	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1524 	    if (size % align != 0)
1525 	      size = CEIL (size, align) * align;
1526 	    size += GET_MODE_SIZE (mode);
1527 	    apply_args_mode[regno] = mode;
1528 	  }
1529 	else
1530 	  {
1531 	    apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1532 	  }
1533     }
1534   return size;
1535 }
1536 
1537 /* Return the size required for the block returned by __builtin_apply,
1538    and initialize apply_result_mode.  */
1539 
1540 static int
apply_result_size(void)1541 apply_result_size (void)
1542 {
1543   static int size = -1;
1544   int align, regno;
1545 
1546   /* The values computed by this function never change.  */
1547   if (size < 0)
1548     {
1549       size = 0;
1550 
1551       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1552 	if (targetm.calls.function_value_regno_p (regno))
1553 	  {
1554 	    fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1555 
1556 	    gcc_assert (mode != VOIDmode);
1557 
1558 	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1559 	    if (size % align != 0)
1560 	      size = CEIL (size, align) * align;
1561 	    size += GET_MODE_SIZE (mode);
1562 	    apply_result_mode[regno] = mode;
1563 	  }
1564 	else
1565 	  apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1566 
1567       /* Allow targets that use untyped_call and untyped_return to override
1568 	 the size so that machine-specific information can be stored here.  */
1569 #ifdef APPLY_RESULT_SIZE
1570       size = APPLY_RESULT_SIZE;
1571 #endif
1572     }
1573   return size;
1574 }
1575 
1576 /* Create a vector describing the result block RESULT.  If SAVEP is true,
1577    the result block is used to save the values; otherwise it is used to
1578    restore the values.  */
1579 
1580 static rtx
result_vector(int savep,rtx result)1581 result_vector (int savep, rtx result)
1582 {
1583   int regno, size, align, nelts;
1584   fixed_size_mode mode;
1585   rtx reg, mem;
1586   rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1587 
1588   size = nelts = 0;
1589   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1590     if ((mode = apply_result_mode[regno]) != VOIDmode)
1591       {
1592 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1593 	if (size % align != 0)
1594 	  size = CEIL (size, align) * align;
1595 	reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1596 	mem = adjust_address (result, mode, size);
1597 	savevec[nelts++] = (savep
1598 			    ? gen_rtx_SET (mem, reg)
1599 			    : gen_rtx_SET (reg, mem));
1600 	size += GET_MODE_SIZE (mode);
1601       }
1602   return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1603 }
1604 
1605 /* Save the state required to perform an untyped call with the same
1606    arguments as were passed to the current function.  */
1607 
1608 static rtx
expand_builtin_apply_args_1(void)1609 expand_builtin_apply_args_1 (void)
1610 {
1611   rtx registers, tem;
1612   int size, align, regno;
1613   fixed_size_mode mode;
1614   rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1615 
1616   /* Create a block where the arg-pointer, structure value address,
1617      and argument registers can be saved.  */
1618   registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1619 
1620   /* Walk past the arg-pointer and structure value address.  */
1621   size = GET_MODE_SIZE (Pmode);
1622   if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1623     size += GET_MODE_SIZE (Pmode);
1624 
1625   /* Save each register used in calling a function to the block.  */
1626   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1627     if ((mode = apply_args_mode[regno]) != VOIDmode)
1628       {
1629 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1630 	if (size % align != 0)
1631 	  size = CEIL (size, align) * align;
1632 
1633 	tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1634 
1635 	emit_move_insn (adjust_address (registers, mode, size), tem);
1636 	size += GET_MODE_SIZE (mode);
1637       }
1638 
1639   /* Save the arg pointer to the block.  */
1640   tem = copy_to_reg (crtl->args.internal_arg_pointer);
1641   /* We need the pointer as the caller actually passed them to us, not
1642      as we might have pretended they were passed.  Make sure it's a valid
1643      operand, as emit_move_insn isn't expected to handle a PLUS.  */
1644   if (STACK_GROWS_DOWNWARD)
1645     tem
1646       = force_operand (plus_constant (Pmode, tem,
1647 				      crtl->args.pretend_args_size),
1648 		       NULL_RTX);
1649   emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1650 
1651   size = GET_MODE_SIZE (Pmode);
1652 
1653   /* Save the structure value address unless this is passed as an
1654      "invisible" first argument.  */
1655   if (struct_incoming_value)
1656     {
1657       emit_move_insn (adjust_address (registers, Pmode, size),
1658 		      copy_to_reg (struct_incoming_value));
1659       size += GET_MODE_SIZE (Pmode);
1660     }
1661 
1662   /* Return the address of the block.  */
1663   return copy_addr_to_reg (XEXP (registers, 0));
1664 }
1665 
1666 /* __builtin_apply_args returns block of memory allocated on
1667    the stack into which is stored the arg pointer, structure
1668    value address, static chain, and all the registers that might
1669    possibly be used in performing a function call.  The code is
1670    moved to the start of the function so the incoming values are
1671    saved.  */
1672 
1673 static rtx
expand_builtin_apply_args(void)1674 expand_builtin_apply_args (void)
1675 {
1676   /* Don't do __builtin_apply_args more than once in a function.
1677      Save the result of the first call and reuse it.  */
1678   if (apply_args_value != 0)
1679     return apply_args_value;
1680   {
1681     /* When this function is called, it means that registers must be
1682        saved on entry to this function.  So we migrate the
1683        call to the first insn of this function.  */
1684     rtx temp;
1685 
1686     start_sequence ();
1687     temp = expand_builtin_apply_args_1 ();
1688     rtx_insn *seq = get_insns ();
1689     end_sequence ();
1690 
1691     apply_args_value = temp;
1692 
1693     /* Put the insns after the NOTE that starts the function.
1694        If this is inside a start_sequence, make the outer-level insn
1695        chain current, so the code is placed at the start of the
1696        function.  If internal_arg_pointer is a non-virtual pseudo,
1697        it needs to be placed after the function that initializes
1698        that pseudo.  */
1699     push_topmost_sequence ();
1700     if (REG_P (crtl->args.internal_arg_pointer)
1701 	&& REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1702       emit_insn_before (seq, parm_birth_insn);
1703     else
1704       emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1705     pop_topmost_sequence ();
1706     return temp;
1707   }
1708 }
1709 
1710 /* Perform an untyped call and save the state required to perform an
1711    untyped return of whatever value was returned by the given function.  */
1712 
1713 static rtx
expand_builtin_apply(rtx function,rtx arguments,rtx argsize)1714 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1715 {
1716   int size, align, regno;
1717   fixed_size_mode mode;
1718   rtx incoming_args, result, reg, dest, src;
1719   rtx_call_insn *call_insn;
1720   rtx old_stack_level = 0;
1721   rtx call_fusage = 0;
1722   rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1723 
1724   arguments = convert_memory_address (Pmode, arguments);
1725 
1726   /* Create a block where the return registers can be saved.  */
1727   result = assign_stack_local (BLKmode, apply_result_size (), -1);
1728 
1729   /* Fetch the arg pointer from the ARGUMENTS block.  */
1730   incoming_args = gen_reg_rtx (Pmode);
1731   emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1732   if (!STACK_GROWS_DOWNWARD)
1733     incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1734 					 incoming_args, 0, OPTAB_LIB_WIDEN);
1735 
1736   /* Push a new argument block and copy the arguments.  Do not allow
1737      the (potential) memcpy call below to interfere with our stack
1738      manipulations.  */
1739   do_pending_stack_adjust ();
1740   NO_DEFER_POP;
1741 
1742   /* Save the stack with nonlocal if available.  */
1743   if (targetm.have_save_stack_nonlocal ())
1744     emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1745   else
1746     emit_stack_save (SAVE_BLOCK, &old_stack_level);
1747 
1748   /* Allocate a block of memory onto the stack and copy the memory
1749      arguments to the outgoing arguments address.  We can pass TRUE
1750      as the 4th argument because we just saved the stack pointer
1751      and will restore it right after the call.  */
1752   allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1753 
1754   /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1755      may have already set current_function_calls_alloca to true.
1756      current_function_calls_alloca won't be set if argsize is zero,
1757      so we have to guarantee need_drap is true here.  */
1758   if (SUPPORTS_STACK_ALIGNMENT)
1759     crtl->need_drap = true;
1760 
1761   dest = virtual_outgoing_args_rtx;
1762   if (!STACK_GROWS_DOWNWARD)
1763     {
1764       if (CONST_INT_P (argsize))
1765 	dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1766       else
1767 	dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1768     }
1769   dest = gen_rtx_MEM (BLKmode, dest);
1770   set_mem_align (dest, PARM_BOUNDARY);
1771   src = gen_rtx_MEM (BLKmode, incoming_args);
1772   set_mem_align (src, PARM_BOUNDARY);
1773   emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1774 
1775   /* Refer to the argument block.  */
1776   apply_args_size ();
1777   arguments = gen_rtx_MEM (BLKmode, arguments);
1778   set_mem_align (arguments, PARM_BOUNDARY);
1779 
1780   /* Walk past the arg-pointer and structure value address.  */
1781   size = GET_MODE_SIZE (Pmode);
1782   if (struct_value)
1783     size += GET_MODE_SIZE (Pmode);
1784 
1785   /* Restore each of the registers previously saved.  Make USE insns
1786      for each of these registers for use in making the call.  */
1787   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1788     if ((mode = apply_args_mode[regno]) != VOIDmode)
1789       {
1790 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1791 	if (size % align != 0)
1792 	  size = CEIL (size, align) * align;
1793 	reg = gen_rtx_REG (mode, regno);
1794 	emit_move_insn (reg, adjust_address (arguments, mode, size));
1795 	use_reg (&call_fusage, reg);
1796 	size += GET_MODE_SIZE (mode);
1797       }
1798 
1799   /* Restore the structure value address unless this is passed as an
1800      "invisible" first argument.  */
1801   size = GET_MODE_SIZE (Pmode);
1802   if (struct_value)
1803     {
1804       rtx value = gen_reg_rtx (Pmode);
1805       emit_move_insn (value, adjust_address (arguments, Pmode, size));
1806       emit_move_insn (struct_value, value);
1807       if (REG_P (struct_value))
1808 	use_reg (&call_fusage, struct_value);
1809       size += GET_MODE_SIZE (Pmode);
1810     }
1811 
1812   /* All arguments and registers used for the call are set up by now!  */
1813   function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1814 
1815   /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
1816      and we don't want to load it into a register as an optimization,
1817      because prepare_call_address already did it if it should be done.  */
1818   if (GET_CODE (function) != SYMBOL_REF)
1819     function = memory_address (FUNCTION_MODE, function);
1820 
1821   /* Generate the actual call instruction and save the return value.  */
1822   if (targetm.have_untyped_call ())
1823     {
1824       rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1825       emit_call_insn (targetm.gen_untyped_call (mem, result,
1826 						result_vector (1, result)));
1827     }
1828   else if (targetm.have_call_value ())
1829     {
1830       rtx valreg = 0;
1831 
1832       /* Locate the unique return register.  It is not possible to
1833 	 express a call that sets more than one return register using
1834 	 call_value; use untyped_call for that.  In fact, untyped_call
1835 	 only needs to save the return registers in the given block.  */
1836       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1837 	if ((mode = apply_result_mode[regno]) != VOIDmode)
1838 	  {
1839 	    gcc_assert (!valreg); /* have_untyped_call required.  */
1840 
1841 	    valreg = gen_rtx_REG (mode, regno);
1842 	  }
1843 
1844       emit_insn (targetm.gen_call_value (valreg,
1845 					 gen_rtx_MEM (FUNCTION_MODE, function),
1846 					 const0_rtx, NULL_RTX, const0_rtx));
1847 
1848       emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1849     }
1850   else
1851     gcc_unreachable ();
1852 
1853   /* Find the CALL insn we just emitted, and attach the register usage
1854      information.  */
1855   call_insn = last_call_insn ();
1856   add_function_usage_to (call_insn, call_fusage);
1857 
1858   /* Restore the stack.  */
1859   if (targetm.have_save_stack_nonlocal ())
1860     emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1861   else
1862     emit_stack_restore (SAVE_BLOCK, old_stack_level);
1863   fixup_args_size_notes (call_insn, get_last_insn (), 0);
1864 
1865   OK_DEFER_POP;
1866 
1867   /* Return the address of the result block.  */
1868   result = copy_addr_to_reg (XEXP (result, 0));
1869   return convert_memory_address (ptr_mode, result);
1870 }
1871 
1872 /* Perform an untyped return.  */
1873 
1874 static void
expand_builtin_return(rtx result)1875 expand_builtin_return (rtx result)
1876 {
1877   int size, align, regno;
1878   fixed_size_mode mode;
1879   rtx reg;
1880   rtx_insn *call_fusage = 0;
1881 
1882   result = convert_memory_address (Pmode, result);
1883 
1884   apply_result_size ();
1885   result = gen_rtx_MEM (BLKmode, result);
1886 
1887   if (targetm.have_untyped_return ())
1888     {
1889       rtx vector = result_vector (0, result);
1890       emit_jump_insn (targetm.gen_untyped_return (result, vector));
1891       emit_barrier ();
1892       return;
1893     }
1894 
1895   /* Restore the return value and note that each value is used.  */
1896   size = 0;
1897   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1898     if ((mode = apply_result_mode[regno]) != VOIDmode)
1899       {
1900 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1901 	if (size % align != 0)
1902 	  size = CEIL (size, align) * align;
1903 	reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1904 	emit_move_insn (reg, adjust_address (result, mode, size));
1905 
1906 	push_to_sequence (call_fusage);
1907 	emit_use (reg);
1908 	call_fusage = get_insns ();
1909 	end_sequence ();
1910 	size += GET_MODE_SIZE (mode);
1911       }
1912 
1913   /* Put the USE insns before the return.  */
1914   emit_insn (call_fusage);
1915 
1916   /* Return whatever values was restored by jumping directly to the end
1917      of the function.  */
1918   expand_naked_return ();
1919 }
1920 
1921 /* Used by expand_builtin_classify_type and fold_builtin_classify_type.  */
1922 
1923 static enum type_class
type_to_class(tree type)1924 type_to_class (tree type)
1925 {
1926   switch (TREE_CODE (type))
1927     {
1928     case VOID_TYPE:	   return void_type_class;
1929     case INTEGER_TYPE:	   return integer_type_class;
1930     case ENUMERAL_TYPE:	   return enumeral_type_class;
1931     case BOOLEAN_TYPE:	   return boolean_type_class;
1932     case POINTER_TYPE:	   return pointer_type_class;
1933     case REFERENCE_TYPE:   return reference_type_class;
1934     case OFFSET_TYPE:	   return offset_type_class;
1935     case REAL_TYPE:	   return real_type_class;
1936     case COMPLEX_TYPE:	   return complex_type_class;
1937     case FUNCTION_TYPE:	   return function_type_class;
1938     case METHOD_TYPE:	   return method_type_class;
1939     case RECORD_TYPE:	   return record_type_class;
1940     case UNION_TYPE:
1941     case QUAL_UNION_TYPE:  return union_type_class;
1942     case ARRAY_TYPE:	   return (TYPE_STRING_FLAG (type)
1943 				   ? string_type_class : array_type_class);
1944     case LANG_TYPE:	   return lang_type_class;
1945     default:		   return no_type_class;
1946     }
1947 }
1948 
1949 /* Expand a call EXP to __builtin_classify_type.  */
1950 
1951 static rtx
expand_builtin_classify_type(tree exp)1952 expand_builtin_classify_type (tree exp)
1953 {
1954   if (call_expr_nargs (exp))
1955     return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1956   return GEN_INT (no_type_class);
1957 }
1958 
1959 /* This helper macro, meant to be used in mathfn_built_in below, determines
1960    which among a set of builtin math functions is appropriate for a given type
1961    mode.  The `F' (float) and `L' (long double) are automatically generated
1962    from the 'double' case.  If a function supports the _Float<N> and _Float<N>X
1963    types, there are additional types that are considered with 'F32', 'F64',
1964    'F128', etc. suffixes.  */
1965 #define CASE_MATHFN(MATHFN) \
1966   CASE_CFN_##MATHFN: \
1967   fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1968   fcodel = BUILT_IN_##MATHFN##L ; break;
1969 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1970    types.  */
1971 #define CASE_MATHFN_FLOATN(MATHFN) \
1972   CASE_CFN_##MATHFN: \
1973   fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1974   fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1975   fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1976   fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1977   fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1978   break;
1979 /* Similar to above, but appends _R after any F/L suffix.  */
1980 #define CASE_MATHFN_REENT(MATHFN) \
1981   case CFN_BUILT_IN_##MATHFN##_R: \
1982   case CFN_BUILT_IN_##MATHFN##F_R: \
1983   case CFN_BUILT_IN_##MATHFN##L_R: \
1984   fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1985   fcodel = BUILT_IN_##MATHFN##L_R ; break;
1986 
1987 /* Return a function equivalent to FN but operating on floating-point
1988    values of type TYPE, or END_BUILTINS if no such function exists.
1989    This is purely an operation on function codes; it does not guarantee
1990    that the target actually has an implementation of the function.  */
1991 
1992 static built_in_function
mathfn_built_in_2(tree type,combined_fn fn)1993 mathfn_built_in_2 (tree type, combined_fn fn)
1994 {
1995   tree mtype;
1996   built_in_function fcode, fcodef, fcodel;
1997   built_in_function fcodef16 = END_BUILTINS;
1998   built_in_function fcodef32 = END_BUILTINS;
1999   built_in_function fcodef64 = END_BUILTINS;
2000   built_in_function fcodef128 = END_BUILTINS;
2001   built_in_function fcodef32x = END_BUILTINS;
2002   built_in_function fcodef64x = END_BUILTINS;
2003   built_in_function fcodef128x = END_BUILTINS;
2004 
2005   switch (fn)
2006     {
2007     CASE_MATHFN (ACOS)
2008     CASE_MATHFN (ACOSH)
2009     CASE_MATHFN (ASIN)
2010     CASE_MATHFN (ASINH)
2011     CASE_MATHFN (ATAN)
2012     CASE_MATHFN (ATAN2)
2013     CASE_MATHFN (ATANH)
2014     CASE_MATHFN (CBRT)
2015     CASE_MATHFN_FLOATN (CEIL)
2016     CASE_MATHFN (CEXPI)
2017     CASE_MATHFN_FLOATN (COPYSIGN)
2018     CASE_MATHFN (COS)
2019     CASE_MATHFN (COSH)
2020     CASE_MATHFN (DREM)
2021     CASE_MATHFN (ERF)
2022     CASE_MATHFN (ERFC)
2023     CASE_MATHFN (EXP)
2024     CASE_MATHFN (EXP10)
2025     CASE_MATHFN (EXP2)
2026     CASE_MATHFN (EXPM1)
2027     CASE_MATHFN (FABS)
2028     CASE_MATHFN (FDIM)
2029     CASE_MATHFN_FLOATN (FLOOR)
2030     CASE_MATHFN_FLOATN (FMA)
2031     CASE_MATHFN_FLOATN (FMAX)
2032     CASE_MATHFN_FLOATN (FMIN)
2033     CASE_MATHFN (FMOD)
2034     CASE_MATHFN (FREXP)
2035     CASE_MATHFN (GAMMA)
2036     CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
2037     CASE_MATHFN (HUGE_VAL)
2038     CASE_MATHFN (HYPOT)
2039     CASE_MATHFN (ILOGB)
2040     CASE_MATHFN (ICEIL)
2041     CASE_MATHFN (IFLOOR)
2042     CASE_MATHFN (INF)
2043     CASE_MATHFN (IRINT)
2044     CASE_MATHFN (IROUND)
2045     CASE_MATHFN (ISINF)
2046     CASE_MATHFN (J0)
2047     CASE_MATHFN (J1)
2048     CASE_MATHFN (JN)
2049     CASE_MATHFN (LCEIL)
2050     CASE_MATHFN (LDEXP)
2051     CASE_MATHFN (LFLOOR)
2052     CASE_MATHFN (LGAMMA)
2053     CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
2054     CASE_MATHFN (LLCEIL)
2055     CASE_MATHFN (LLFLOOR)
2056     CASE_MATHFN (LLRINT)
2057     CASE_MATHFN (LLROUND)
2058     CASE_MATHFN (LOG)
2059     CASE_MATHFN (LOG10)
2060     CASE_MATHFN (LOG1P)
2061     CASE_MATHFN (LOG2)
2062     CASE_MATHFN (LOGB)
2063     CASE_MATHFN (LRINT)
2064     CASE_MATHFN (LROUND)
2065     CASE_MATHFN (MODF)
2066     CASE_MATHFN (NAN)
2067     CASE_MATHFN (NANS)
2068     CASE_MATHFN_FLOATN (NEARBYINT)
2069     CASE_MATHFN (NEXTAFTER)
2070     CASE_MATHFN (NEXTTOWARD)
2071     CASE_MATHFN (POW)
2072     CASE_MATHFN (POWI)
2073     CASE_MATHFN (POW10)
2074     CASE_MATHFN (REMAINDER)
2075     CASE_MATHFN (REMQUO)
2076     CASE_MATHFN_FLOATN (RINT)
2077     CASE_MATHFN_FLOATN (ROUND)
2078     CASE_MATHFN (SCALB)
2079     CASE_MATHFN (SCALBLN)
2080     CASE_MATHFN (SCALBN)
2081     CASE_MATHFN (SIGNBIT)
2082     CASE_MATHFN (SIGNIFICAND)
2083     CASE_MATHFN (SIN)
2084     CASE_MATHFN (SINCOS)
2085     CASE_MATHFN (SINH)
2086     CASE_MATHFN_FLOATN (SQRT)
2087     CASE_MATHFN (TAN)
2088     CASE_MATHFN (TANH)
2089     CASE_MATHFN (TGAMMA)
2090     CASE_MATHFN_FLOATN (TRUNC)
2091     CASE_MATHFN (Y0)
2092     CASE_MATHFN (Y1)
2093     CASE_MATHFN (YN)
2094 
2095     default:
2096       return END_BUILTINS;
2097     }
2098 
2099   mtype = TYPE_MAIN_VARIANT (type);
2100   if (mtype == double_type_node)
2101     return fcode;
2102   else if (mtype == float_type_node)
2103     return fcodef;
2104   else if (mtype == long_double_type_node)
2105     return fcodel;
2106   else if (mtype == float16_type_node)
2107     return fcodef16;
2108   else if (mtype == float32_type_node)
2109     return fcodef32;
2110   else if (mtype == float64_type_node)
2111     return fcodef64;
2112   else if (mtype == float128_type_node)
2113     return fcodef128;
2114   else if (mtype == float32x_type_node)
2115     return fcodef32x;
2116   else if (mtype == float64x_type_node)
2117     return fcodef64x;
2118   else if (mtype == float128x_type_node)
2119     return fcodef128x;
2120   else
2121     return END_BUILTINS;
2122 }
2123 
2124 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2125    if available.  If IMPLICIT_P is true use the implicit builtin declaration,
2126    otherwise use the explicit declaration.  If we can't do the conversion,
2127    return null.  */
2128 
2129 static tree
mathfn_built_in_1(tree type,combined_fn fn,bool implicit_p)2130 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2131 {
2132   built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2133   if (fcode2 == END_BUILTINS)
2134     return NULL_TREE;
2135 
2136   if (implicit_p && !builtin_decl_implicit_p (fcode2))
2137     return NULL_TREE;
2138 
2139   return builtin_decl_explicit (fcode2);
2140 }
2141 
2142 /* Like mathfn_built_in_1, but always use the implicit array.  */
2143 
2144 tree
mathfn_built_in(tree type,combined_fn fn)2145 mathfn_built_in (tree type, combined_fn fn)
2146 {
2147   return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2148 }
2149 
2150 /* Like mathfn_built_in_1, but take a built_in_function and
2151    always use the implicit array.  */
2152 
2153 tree
mathfn_built_in(tree type,enum built_in_function fn)2154 mathfn_built_in (tree type, enum built_in_function fn)
2155 {
2156   return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2157 }
2158 
2159 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2160    return its code, otherwise return IFN_LAST.  Note that this function
2161    only tests whether the function is defined in internals.def, not whether
2162    it is actually available on the target.  */
2163 
2164 internal_fn
associated_internal_fn(tree fndecl)2165 associated_internal_fn (tree fndecl)
2166 {
2167   gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2168   tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2169   switch (DECL_FUNCTION_CODE (fndecl))
2170     {
2171 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2172     CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2173 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2174     CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2175     CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2176 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2177     CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2178 #include "internal-fn.def"
2179 
2180     CASE_FLT_FN (BUILT_IN_POW10):
2181       return IFN_EXP10;
2182 
2183     CASE_FLT_FN (BUILT_IN_DREM):
2184       return IFN_REMAINDER;
2185 
2186     CASE_FLT_FN (BUILT_IN_SCALBN):
2187     CASE_FLT_FN (BUILT_IN_SCALBLN):
2188       if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2189 	return IFN_LDEXP;
2190       return IFN_LAST;
2191 
2192     default:
2193       return IFN_LAST;
2194     }
2195 }
2196 
2197 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2198    on the current target by a call to an internal function, return the
2199    code of that internal function, otherwise return IFN_LAST.  The caller
2200    is responsible for ensuring that any side-effects of the built-in
2201    call are dealt with correctly.  E.g. if CALL sets errno, the caller
2202    must decide that the errno result isn't needed or make it available
2203    in some other way.  */
2204 
2205 internal_fn
replacement_internal_fn(gcall * call)2206 replacement_internal_fn (gcall *call)
2207 {
2208   if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2209     {
2210       internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2211       if (ifn != IFN_LAST)
2212 	{
2213 	  tree_pair types = direct_internal_fn_types (ifn, call);
2214 	  optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2215 	  if (direct_internal_fn_supported_p (ifn, types, opt_type))
2216 	    return ifn;
2217 	}
2218     }
2219   return IFN_LAST;
2220 }
2221 
2222 /* Expand a call to the builtin trinary math functions (fma).
2223    Return NULL_RTX if a normal call should be emitted rather than expanding the
2224    function in-line.  EXP is the expression that is a call to the builtin
2225    function; if convenient, the result should be placed in TARGET.
2226    SUBTARGET may be used as the target for computing one of EXP's
2227    operands.  */
2228 
2229 static rtx
expand_builtin_mathfn_ternary(tree exp,rtx target,rtx subtarget)2230 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2231 {
2232   optab builtin_optab;
2233   rtx op0, op1, op2, result;
2234   rtx_insn *insns;
2235   tree fndecl = get_callee_fndecl (exp);
2236   tree arg0, arg1, arg2;
2237   machine_mode mode;
2238 
2239   if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2240     return NULL_RTX;
2241 
2242   arg0 = CALL_EXPR_ARG (exp, 0);
2243   arg1 = CALL_EXPR_ARG (exp, 1);
2244   arg2 = CALL_EXPR_ARG (exp, 2);
2245 
2246   switch (DECL_FUNCTION_CODE (fndecl))
2247     {
2248     CASE_FLT_FN (BUILT_IN_FMA):
2249     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2250       builtin_optab = fma_optab; break;
2251     default:
2252       gcc_unreachable ();
2253     }
2254 
2255   /* Make a suitable register to place result in.  */
2256   mode = TYPE_MODE (TREE_TYPE (exp));
2257 
2258   /* Before working hard, check whether the instruction is available.  */
2259   if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2260     return NULL_RTX;
2261 
2262   result = gen_reg_rtx (mode);
2263 
2264   /* Always stabilize the argument list.  */
2265   CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2266   CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2267   CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2268 
2269   op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2270   op1 = expand_normal (arg1);
2271   op2 = expand_normal (arg2);
2272 
2273   start_sequence ();
2274 
2275   /* Compute into RESULT.
2276      Set RESULT to wherever the result comes back.  */
2277   result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2278 			      result, 0);
2279 
2280   /* If we were unable to expand via the builtin, stop the sequence
2281      (without outputting the insns) and call to the library function
2282      with the stabilized argument list.  */
2283   if (result == 0)
2284     {
2285       end_sequence ();
2286       return expand_call (exp, target, target == const0_rtx);
2287     }
2288 
2289   /* Output the entire sequence.  */
2290   insns = get_insns ();
2291   end_sequence ();
2292   emit_insn (insns);
2293 
2294   return result;
2295 }
2296 
2297 /* Expand a call to the builtin sin and cos math functions.
2298    Return NULL_RTX if a normal call should be emitted rather than expanding the
2299    function in-line.  EXP is the expression that is a call to the builtin
2300    function; if convenient, the result should be placed in TARGET.
2301    SUBTARGET may be used as the target for computing one of EXP's
2302    operands.  */
2303 
2304 static rtx
expand_builtin_mathfn_3(tree exp,rtx target,rtx subtarget)2305 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2306 {
2307   optab builtin_optab;
2308   rtx op0;
2309   rtx_insn *insns;
2310   tree fndecl = get_callee_fndecl (exp);
2311   machine_mode mode;
2312   tree arg;
2313 
2314   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2315     return NULL_RTX;
2316 
2317   arg = CALL_EXPR_ARG (exp, 0);
2318 
2319   switch (DECL_FUNCTION_CODE (fndecl))
2320     {
2321     CASE_FLT_FN (BUILT_IN_SIN):
2322     CASE_FLT_FN (BUILT_IN_COS):
2323       builtin_optab = sincos_optab; break;
2324     default:
2325       gcc_unreachable ();
2326     }
2327 
2328   /* Make a suitable register to place result in.  */
2329   mode = TYPE_MODE (TREE_TYPE (exp));
2330 
2331   /* Check if sincos insn is available, otherwise fallback
2332      to sin or cos insn.  */
2333   if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2334     switch (DECL_FUNCTION_CODE (fndecl))
2335       {
2336       CASE_FLT_FN (BUILT_IN_SIN):
2337 	builtin_optab = sin_optab; break;
2338       CASE_FLT_FN (BUILT_IN_COS):
2339 	builtin_optab = cos_optab; break;
2340       default:
2341 	gcc_unreachable ();
2342       }
2343 
2344   /* Before working hard, check whether the instruction is available.  */
2345   if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2346     {
2347       rtx result = gen_reg_rtx (mode);
2348 
2349       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2350 	 need to expand the argument again.  This way, we will not perform
2351 	 side-effects more the once.  */
2352       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2353 
2354       op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2355 
2356       start_sequence ();
2357 
2358       /* Compute into RESULT.
2359 	 Set RESULT to wherever the result comes back.  */
2360       if (builtin_optab == sincos_optab)
2361 	{
2362 	  int ok;
2363 
2364 	  switch (DECL_FUNCTION_CODE (fndecl))
2365 	    {
2366 	    CASE_FLT_FN (BUILT_IN_SIN):
2367 	      ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2368 	      break;
2369 	    CASE_FLT_FN (BUILT_IN_COS):
2370 	      ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2371 	      break;
2372 	    default:
2373 	      gcc_unreachable ();
2374 	    }
2375 	  gcc_assert (ok);
2376 	}
2377       else
2378 	result = expand_unop (mode, builtin_optab, op0, result, 0);
2379 
2380       if (result != 0)
2381 	{
2382 	  /* Output the entire sequence.  */
2383 	  insns = get_insns ();
2384 	  end_sequence ();
2385 	  emit_insn (insns);
2386 	  return result;
2387 	}
2388 
2389       /* If we were unable to expand via the builtin, stop the sequence
2390 	 (without outputting the insns) and call to the library function
2391 	 with the stabilized argument list.  */
2392       end_sequence ();
2393     }
2394 
2395   return expand_call (exp, target, target == const0_rtx);
2396 }
2397 
2398 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2399    return an RTL instruction code that implements the functionality.
2400    If that isn't possible or available return CODE_FOR_nothing.  */
2401 
2402 static enum insn_code
interclass_mathfn_icode(tree arg,tree fndecl)2403 interclass_mathfn_icode (tree arg, tree fndecl)
2404 {
2405   bool errno_set = false;
2406   optab builtin_optab = unknown_optab;
2407   machine_mode mode;
2408 
2409   switch (DECL_FUNCTION_CODE (fndecl))
2410     {
2411     CASE_FLT_FN (BUILT_IN_ILOGB):
2412       errno_set = true; builtin_optab = ilogb_optab; break;
2413     CASE_FLT_FN (BUILT_IN_ISINF):
2414       builtin_optab = isinf_optab; break;
2415     case BUILT_IN_ISNORMAL:
2416     case BUILT_IN_ISFINITE:
2417     CASE_FLT_FN (BUILT_IN_FINITE):
2418     case BUILT_IN_FINITED32:
2419     case BUILT_IN_FINITED64:
2420     case BUILT_IN_FINITED128:
2421     case BUILT_IN_ISINFD32:
2422     case BUILT_IN_ISINFD64:
2423     case BUILT_IN_ISINFD128:
2424       /* These builtins have no optabs (yet).  */
2425       break;
2426     default:
2427       gcc_unreachable ();
2428     }
2429 
2430   /* There's no easy way to detect the case we need to set EDOM.  */
2431   if (flag_errno_math && errno_set)
2432     return CODE_FOR_nothing;
2433 
2434   /* Optab mode depends on the mode of the input argument.  */
2435   mode = TYPE_MODE (TREE_TYPE (arg));
2436 
2437   if (builtin_optab)
2438     return optab_handler (builtin_optab, mode);
2439   return CODE_FOR_nothing;
2440 }
2441 
2442 /* Expand a call to one of the builtin math functions that operate on
2443    floating point argument and output an integer result (ilogb, isinf,
2444    isnan, etc).
2445    Return 0 if a normal call should be emitted rather than expanding the
2446    function in-line.  EXP is the expression that is a call to the builtin
2447    function; if convenient, the result should be placed in TARGET.  */
2448 
2449 static rtx
expand_builtin_interclass_mathfn(tree exp,rtx target)2450 expand_builtin_interclass_mathfn (tree exp, rtx target)
2451 {
2452   enum insn_code icode = CODE_FOR_nothing;
2453   rtx op0;
2454   tree fndecl = get_callee_fndecl (exp);
2455   machine_mode mode;
2456   tree arg;
2457 
2458   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2459     return NULL_RTX;
2460 
2461   arg = CALL_EXPR_ARG (exp, 0);
2462   icode = interclass_mathfn_icode (arg, fndecl);
2463   mode = TYPE_MODE (TREE_TYPE (arg));
2464 
2465   if (icode != CODE_FOR_nothing)
2466     {
2467       struct expand_operand ops[1];
2468       rtx_insn *last = get_last_insn ();
2469       tree orig_arg = arg;
2470 
2471       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2472 	 need to expand the argument again.  This way, we will not perform
2473 	 side-effects more the once.  */
2474       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2475 
2476       op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2477 
2478       if (mode != GET_MODE (op0))
2479 	op0 = convert_to_mode (mode, op0, 0);
2480 
2481       create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2482       if (maybe_legitimize_operands (icode, 0, 1, ops)
2483 	  && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2484 	return ops[0].value;
2485 
2486       delete_insns_since (last);
2487       CALL_EXPR_ARG (exp, 0) = orig_arg;
2488     }
2489 
2490   return NULL_RTX;
2491 }
2492 
2493 /* Expand a call to the builtin sincos math function.
2494    Return NULL_RTX if a normal call should be emitted rather than expanding the
2495    function in-line.  EXP is the expression that is a call to the builtin
2496    function.  */
2497 
2498 static rtx
expand_builtin_sincos(tree exp)2499 expand_builtin_sincos (tree exp)
2500 {
2501   rtx op0, op1, op2, target1, target2;
2502   machine_mode mode;
2503   tree arg, sinp, cosp;
2504   int result;
2505   location_t loc = EXPR_LOCATION (exp);
2506   tree alias_type, alias_off;
2507 
2508   if (!validate_arglist (exp, REAL_TYPE,
2509  			 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2510     return NULL_RTX;
2511 
2512   arg = CALL_EXPR_ARG (exp, 0);
2513   sinp = CALL_EXPR_ARG (exp, 1);
2514   cosp = CALL_EXPR_ARG (exp, 2);
2515 
2516   /* Make a suitable register to place result in.  */
2517   mode = TYPE_MODE (TREE_TYPE (arg));
2518 
2519   /* Check if sincos insn is available, otherwise emit the call.  */
2520   if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2521     return NULL_RTX;
2522 
2523   target1 = gen_reg_rtx (mode);
2524   target2 = gen_reg_rtx (mode);
2525 
2526   op0 = expand_normal (arg);
2527   alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2528   alias_off = build_int_cst (alias_type, 0);
2529   op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2530 					sinp, alias_off));
2531   op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2532 					cosp, alias_off));
2533 
2534   /* Compute into target1 and target2.
2535      Set TARGET to wherever the result comes back.  */
2536   result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2537   gcc_assert (result);
2538 
2539   /* Move target1 and target2 to the memory locations indicated
2540      by op1 and op2.  */
2541   emit_move_insn (op1, target1);
2542   emit_move_insn (op2, target2);
2543 
2544   return const0_rtx;
2545 }
2546 
2547 /* Expand a call to the internal cexpi builtin to the sincos math function.
2548    EXP is the expression that is a call to the builtin function; if convenient,
2549    the result should be placed in TARGET.  */
2550 
2551 static rtx
expand_builtin_cexpi(tree exp,rtx target)2552 expand_builtin_cexpi (tree exp, rtx target)
2553 {
2554   tree fndecl = get_callee_fndecl (exp);
2555   tree arg, type;
2556   machine_mode mode;
2557   rtx op0, op1, op2;
2558   location_t loc = EXPR_LOCATION (exp);
2559 
2560   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2561     return NULL_RTX;
2562 
2563   arg = CALL_EXPR_ARG (exp, 0);
2564   type = TREE_TYPE (arg);
2565   mode = TYPE_MODE (TREE_TYPE (arg));
2566 
2567   /* Try expanding via a sincos optab, fall back to emitting a libcall
2568      to sincos or cexp.  We are sure we have sincos or cexp because cexpi
2569      is only generated from sincos, cexp or if we have either of them.  */
2570   if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2571     {
2572       op1 = gen_reg_rtx (mode);
2573       op2 = gen_reg_rtx (mode);
2574 
2575       op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2576 
2577       /* Compute into op1 and op2.  */
2578       expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2579     }
2580   else if (targetm.libc_has_function (function_sincos))
2581     {
2582       tree call, fn = NULL_TREE;
2583       tree top1, top2;
2584       rtx op1a, op2a;
2585 
2586       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2587 	fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2588       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2589 	fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2590       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2591 	fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2592       else
2593 	gcc_unreachable ();
2594 
2595       op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2596       op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2597       op1a = copy_addr_to_reg (XEXP (op1, 0));
2598       op2a = copy_addr_to_reg (XEXP (op2, 0));
2599       top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2600       top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2601 
2602       /* Make sure not to fold the sincos call again.  */
2603       call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2604       expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2605 				      call, 3, arg, top1, top2));
2606     }
2607   else
2608     {
2609       tree call, fn = NULL_TREE, narg;
2610       tree ctype = build_complex_type (type);
2611 
2612       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2613 	fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2614       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2615 	fn = builtin_decl_explicit (BUILT_IN_CEXP);
2616       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2617 	fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2618       else
2619 	gcc_unreachable ();
2620 
2621       /* If we don't have a decl for cexp create one.  This is the
2622 	 friendliest fallback if the user calls __builtin_cexpi
2623 	 without full target C99 function support.  */
2624       if (fn == NULL_TREE)
2625 	{
2626 	  tree fntype;
2627 	  const char *name = NULL;
2628 
2629 	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2630 	    name = "cexpf";
2631 	  else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2632 	    name = "cexp";
2633 	  else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2634 	    name = "cexpl";
2635 
2636 	  fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2637 	  fn = build_fn_decl (name, fntype);
2638 	}
2639 
2640       narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2641 			  build_real (type, dconst0), arg);
2642 
2643       /* Make sure not to fold the cexp call again.  */
2644       call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2645       return expand_expr (build_call_nary (ctype, call, 1, narg),
2646 			  target, VOIDmode, EXPAND_NORMAL);
2647     }
2648 
2649   /* Now build the proper return type.  */
2650   return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2651 			      make_tree (TREE_TYPE (arg), op2),
2652 			      make_tree (TREE_TYPE (arg), op1)),
2653 		      target, VOIDmode, EXPAND_NORMAL);
2654 }
2655 
2656 /* Conveniently construct a function call expression.  FNDECL names the
2657    function to be called, N is the number of arguments, and the "..."
2658    parameters are the argument expressions.  Unlike build_call_exr
2659    this doesn't fold the call, hence it will always return a CALL_EXPR.  */
2660 
2661 static tree
build_call_nofold_loc(location_t loc,tree fndecl,int n,...)2662 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2663 {
2664   va_list ap;
2665   tree fntype = TREE_TYPE (fndecl);
2666   tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2667 
2668   va_start (ap, n);
2669   fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2670   va_end (ap);
2671   SET_EXPR_LOCATION (fn, loc);
2672   return fn;
2673 }
2674 
2675 /* Expand a call to one of the builtin rounding functions gcc defines
2676    as an extension (lfloor and lceil).  As these are gcc extensions we
2677    do not need to worry about setting errno to EDOM.
2678    If expanding via optab fails, lower expression to (int)(floor(x)).
2679    EXP is the expression that is a call to the builtin function;
2680    if convenient, the result should be placed in TARGET.  */
2681 
2682 static rtx
expand_builtin_int_roundingfn(tree exp,rtx target)2683 expand_builtin_int_roundingfn (tree exp, rtx target)
2684 {
2685   convert_optab builtin_optab;
2686   rtx op0, tmp;
2687   rtx_insn *insns;
2688   tree fndecl = get_callee_fndecl (exp);
2689   enum built_in_function fallback_fn;
2690   tree fallback_fndecl;
2691   machine_mode mode;
2692   tree arg;
2693 
2694   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2695     return NULL_RTX;
2696 
2697   arg = CALL_EXPR_ARG (exp, 0);
2698 
2699   switch (DECL_FUNCTION_CODE (fndecl))
2700     {
2701     CASE_FLT_FN (BUILT_IN_ICEIL):
2702     CASE_FLT_FN (BUILT_IN_LCEIL):
2703     CASE_FLT_FN (BUILT_IN_LLCEIL):
2704       builtin_optab = lceil_optab;
2705       fallback_fn = BUILT_IN_CEIL;
2706       break;
2707 
2708     CASE_FLT_FN (BUILT_IN_IFLOOR):
2709     CASE_FLT_FN (BUILT_IN_LFLOOR):
2710     CASE_FLT_FN (BUILT_IN_LLFLOOR):
2711       builtin_optab = lfloor_optab;
2712       fallback_fn = BUILT_IN_FLOOR;
2713       break;
2714 
2715     default:
2716       gcc_unreachable ();
2717     }
2718 
2719   /* Make a suitable register to place result in.  */
2720   mode = TYPE_MODE (TREE_TYPE (exp));
2721 
2722   target = gen_reg_rtx (mode);
2723 
2724   /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2725      need to expand the argument again.  This way, we will not perform
2726      side-effects more the once.  */
2727   CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2728 
2729   op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2730 
2731   start_sequence ();
2732 
2733   /* Compute into TARGET.  */
2734   if (expand_sfix_optab (target, op0, builtin_optab))
2735     {
2736       /* Output the entire sequence.  */
2737       insns = get_insns ();
2738       end_sequence ();
2739       emit_insn (insns);
2740       return target;
2741     }
2742 
2743   /* If we were unable to expand via the builtin, stop the sequence
2744      (without outputting the insns).  */
2745   end_sequence ();
2746 
2747   /* Fall back to floating point rounding optab.  */
2748   fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2749 
2750   /* For non-C99 targets we may end up without a fallback fndecl here
2751      if the user called __builtin_lfloor directly.  In this case emit
2752      a call to the floor/ceil variants nevertheless.  This should result
2753      in the best user experience for not full C99 targets.  */
2754   if (fallback_fndecl == NULL_TREE)
2755     {
2756       tree fntype;
2757       const char *name = NULL;
2758 
2759       switch (DECL_FUNCTION_CODE (fndecl))
2760 	{
2761 	case BUILT_IN_ICEIL:
2762 	case BUILT_IN_LCEIL:
2763 	case BUILT_IN_LLCEIL:
2764 	  name = "ceil";
2765 	  break;
2766 	case BUILT_IN_ICEILF:
2767 	case BUILT_IN_LCEILF:
2768 	case BUILT_IN_LLCEILF:
2769 	  name = "ceilf";
2770 	  break;
2771 	case BUILT_IN_ICEILL:
2772 	case BUILT_IN_LCEILL:
2773 	case BUILT_IN_LLCEILL:
2774 	  name = "ceill";
2775 	  break;
2776 	case BUILT_IN_IFLOOR:
2777 	case BUILT_IN_LFLOOR:
2778 	case BUILT_IN_LLFLOOR:
2779 	  name = "floor";
2780 	  break;
2781 	case BUILT_IN_IFLOORF:
2782 	case BUILT_IN_LFLOORF:
2783 	case BUILT_IN_LLFLOORF:
2784 	  name = "floorf";
2785 	  break;
2786 	case BUILT_IN_IFLOORL:
2787 	case BUILT_IN_LFLOORL:
2788 	case BUILT_IN_LLFLOORL:
2789 	  name = "floorl";
2790 	  break;
2791 	default:
2792 	  gcc_unreachable ();
2793 	}
2794 
2795       fntype = build_function_type_list (TREE_TYPE (arg),
2796 					 TREE_TYPE (arg), NULL_TREE);
2797       fallback_fndecl = build_fn_decl (name, fntype);
2798     }
2799 
2800   exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2801 
2802   tmp = expand_normal (exp);
2803   tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2804 
2805   /* Truncate the result of floating point optab to integer
2806      via expand_fix ().  */
2807   target = gen_reg_rtx (mode);
2808   expand_fix (target, tmp, 0);
2809 
2810   return target;
2811 }
2812 
2813 /* Expand a call to one of the builtin math functions doing integer
2814    conversion (lrint).
2815    Return 0 if a normal call should be emitted rather than expanding the
2816    function in-line.  EXP is the expression that is a call to the builtin
2817    function; if convenient, the result should be placed in TARGET.  */
2818 
2819 static rtx
expand_builtin_int_roundingfn_2(tree exp,rtx target)2820 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2821 {
2822   convert_optab builtin_optab;
2823   rtx op0;
2824   rtx_insn *insns;
2825   tree fndecl = get_callee_fndecl (exp);
2826   tree arg;
2827   machine_mode mode;
2828   enum built_in_function fallback_fn = BUILT_IN_NONE;
2829 
2830   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2831     return NULL_RTX;
2832 
2833   arg = CALL_EXPR_ARG (exp, 0);
2834 
2835   switch (DECL_FUNCTION_CODE (fndecl))
2836     {
2837     CASE_FLT_FN (BUILT_IN_IRINT):
2838       fallback_fn = BUILT_IN_LRINT;
2839       gcc_fallthrough ();
2840     CASE_FLT_FN (BUILT_IN_LRINT):
2841     CASE_FLT_FN (BUILT_IN_LLRINT):
2842       builtin_optab = lrint_optab;
2843       break;
2844 
2845     CASE_FLT_FN (BUILT_IN_IROUND):
2846       fallback_fn = BUILT_IN_LROUND;
2847       gcc_fallthrough ();
2848     CASE_FLT_FN (BUILT_IN_LROUND):
2849     CASE_FLT_FN (BUILT_IN_LLROUND):
2850       builtin_optab = lround_optab;
2851       break;
2852 
2853     default:
2854       gcc_unreachable ();
2855     }
2856 
2857   /* There's no easy way to detect the case we need to set EDOM.  */
2858   if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2859     return NULL_RTX;
2860 
2861   /* Make a suitable register to place result in.  */
2862   mode = TYPE_MODE (TREE_TYPE (exp));
2863 
2864   /* There's no easy way to detect the case we need to set EDOM.  */
2865   if (!flag_errno_math)
2866     {
2867       rtx result = gen_reg_rtx (mode);
2868 
2869       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2870 	 need to expand the argument again.  This way, we will not perform
2871 	 side-effects more the once.  */
2872       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2873 
2874       op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2875 
2876       start_sequence ();
2877 
2878       if (expand_sfix_optab (result, op0, builtin_optab))
2879 	{
2880 	  /* Output the entire sequence.  */
2881 	  insns = get_insns ();
2882 	  end_sequence ();
2883 	  emit_insn (insns);
2884 	  return result;
2885 	}
2886 
2887       /* If we were unable to expand via the builtin, stop the sequence
2888 	 (without outputting the insns) and call to the library function
2889 	 with the stabilized argument list.  */
2890       end_sequence ();
2891     }
2892 
2893   if (fallback_fn != BUILT_IN_NONE)
2894     {
2895       /* Fall back to rounding to long int.  Use implicit_p 0 - for non-C99
2896 	 targets, (int) round (x) should never be transformed into
2897 	 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2898 	 a call to lround in the hope that the target provides at least some
2899 	 C99 functions.  This should result in the best user experience for
2900 	 not full C99 targets.  */
2901       tree fallback_fndecl = mathfn_built_in_1
2902 	(TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2903 
2904       exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2905 				   fallback_fndecl, 1, arg);
2906 
2907       target = expand_call (exp, NULL_RTX, target == const0_rtx);
2908       target = maybe_emit_group_store (target, TREE_TYPE (exp));
2909       return convert_to_mode (mode, target, 0);
2910     }
2911 
2912   return expand_call (exp, target, target == const0_rtx);
2913 }
2914 
2915 /* Expand a call to the powi built-in mathematical function.  Return NULL_RTX if
2916    a normal call should be emitted rather than expanding the function
2917    in-line.  EXP is the expression that is a call to the builtin
2918    function; if convenient, the result should be placed in TARGET.  */
2919 
2920 static rtx
expand_builtin_powi(tree exp,rtx target)2921 expand_builtin_powi (tree exp, rtx target)
2922 {
2923   tree arg0, arg1;
2924   rtx op0, op1;
2925   machine_mode mode;
2926   machine_mode mode2;
2927 
2928   if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2929     return NULL_RTX;
2930 
2931   arg0 = CALL_EXPR_ARG (exp, 0);
2932   arg1 = CALL_EXPR_ARG (exp, 1);
2933   mode = TYPE_MODE (TREE_TYPE (exp));
2934 
2935   /* Emit a libcall to libgcc.  */
2936 
2937   /* Mode of the 2nd argument must match that of an int.  */
2938   mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2939 
2940   if (target == NULL_RTX)
2941     target = gen_reg_rtx (mode);
2942 
2943   op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2944   if (GET_MODE (op0) != mode)
2945     op0 = convert_to_mode (mode, op0, 0);
2946   op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2947   if (GET_MODE (op1) != mode2)
2948     op1 = convert_to_mode (mode2, op1, 0);
2949 
2950   target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2951 				    target, LCT_CONST, mode,
2952 				    op0, mode, op1, mode2);
2953 
2954   return target;
2955 }
2956 
2957 /* Expand expression EXP which is a call to the strlen builtin.  Return
2958    NULL_RTX if we failed and the caller should emit a normal call, otherwise
2959    try to get the result in TARGET, if convenient.  */
2960 
2961 static rtx
expand_builtin_strlen(tree exp,rtx target,machine_mode target_mode)2962 expand_builtin_strlen (tree exp, rtx target,
2963 		       machine_mode target_mode)
2964 {
2965   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2966     return NULL_RTX;
2967 
2968   struct expand_operand ops[4];
2969   rtx pat;
2970   tree len;
2971   tree src = CALL_EXPR_ARG (exp, 0);
2972   rtx src_reg;
2973   rtx_insn *before_strlen;
2974   machine_mode insn_mode;
2975   enum insn_code icode = CODE_FOR_nothing;
2976   unsigned int align;
2977 
2978   /* If the length can be computed at compile-time, return it.  */
2979   len = c_strlen (src, 0);
2980   if (len)
2981     return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2982 
2983   /* If the length can be computed at compile-time and is constant
2984      integer, but there are side-effects in src, evaluate
2985      src for side-effects, then return len.
2986      E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2987      can be optimized into: i++; x = 3;  */
2988   len = c_strlen (src, 1);
2989   if (len && TREE_CODE (len) == INTEGER_CST)
2990     {
2991       expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2992       return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2993     }
2994 
2995   align = get_pointer_alignment (src) / BITS_PER_UNIT;
2996 
2997   /* If SRC is not a pointer type, don't do this operation inline.  */
2998   if (align == 0)
2999     return NULL_RTX;
3000 
3001   /* Bail out if we can't compute strlen in the right mode.  */
3002   FOR_EACH_MODE_FROM (insn_mode, target_mode)
3003     {
3004       icode = optab_handler (strlen_optab, insn_mode);
3005       if (icode != CODE_FOR_nothing)
3006 	break;
3007     }
3008   if (insn_mode == VOIDmode)
3009     return NULL_RTX;
3010 
3011   /* Make a place to hold the source address.  We will not expand
3012      the actual source until we are sure that the expansion will
3013      not fail -- there are trees that cannot be expanded twice.  */
3014   src_reg = gen_reg_rtx (Pmode);
3015 
3016   /* Mark the beginning of the strlen sequence so we can emit the
3017      source operand later.  */
3018   before_strlen = get_last_insn ();
3019 
3020   create_output_operand (&ops[0], target, insn_mode);
3021   create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3022   create_integer_operand (&ops[2], 0);
3023   create_integer_operand (&ops[3], align);
3024   if (!maybe_expand_insn (icode, 4, ops))
3025     return NULL_RTX;
3026 
3027   /* Check to see if the argument was declared attribute nonstring
3028      and if so, issue a warning since at this point it's not known
3029      to be nul-terminated.  */
3030   maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3031 
3032   /* Now that we are assured of success, expand the source.  */
3033   start_sequence ();
3034   pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3035   if (pat != src_reg)
3036     {
3037 #ifdef POINTERS_EXTEND_UNSIGNED
3038       if (GET_MODE (pat) != Pmode)
3039 	pat = convert_to_mode (Pmode, pat,
3040 			       POINTERS_EXTEND_UNSIGNED);
3041 #endif
3042       emit_move_insn (src_reg, pat);
3043     }
3044   pat = get_insns ();
3045   end_sequence ();
3046 
3047   if (before_strlen)
3048     emit_insn_after (pat, before_strlen);
3049   else
3050     emit_insn_before (pat, get_insns ());
3051 
3052   /* Return the value in the proper mode for this function.  */
3053   if (GET_MODE (ops[0].value) == target_mode)
3054     target = ops[0].value;
3055   else if (target != 0)
3056     convert_move (target, ops[0].value, 0);
3057   else
3058     target = convert_to_mode (target_mode, ops[0].value, 0);
3059 
3060   return target;
3061 }
3062 
3063 /* Expand call EXP to the strnlen built-in, returning the result
3064    and setting it in TARGET.  Otherwise return NULL_RTX on failure.  */
3065 
3066 static rtx
expand_builtin_strnlen(tree exp,rtx target,machine_mode target_mode)3067 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3068 {
3069   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3070     return NULL_RTX;
3071 
3072   tree src = CALL_EXPR_ARG (exp, 0);
3073   tree bound = CALL_EXPR_ARG (exp, 1);
3074 
3075   if (!bound)
3076     return NULL_RTX;
3077 
3078   location_t loc = UNKNOWN_LOCATION;
3079   if (EXPR_HAS_LOCATION (exp))
3080     loc = EXPR_LOCATION (exp);
3081 
3082   tree maxobjsize = max_object_size ();
3083   tree func = get_callee_fndecl (exp);
3084 
3085   /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3086      so these conversions aren't necessary.  */
3087   c_strlen_data lendata = { };
3088   tree len = c_strlen (src, 0, &lendata, 1);
3089   if (len)
3090     len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3091 
3092   if (TREE_CODE (bound) == INTEGER_CST)
3093     {
3094       if (!TREE_NO_WARNING (exp)
3095 	  && tree_int_cst_lt (maxobjsize, bound)
3096 	  && warning_at (loc, OPT_Wstringop_overflow_,
3097 			 "%K%qD specified bound %E "
3098 			 "exceeds maximum object size %E",
3099 			 exp, func, bound, maxobjsize))
3100 	TREE_NO_WARNING (exp) = true;
3101 
3102       bool exact = true;
3103       if (!len || TREE_CODE (len) != INTEGER_CST)
3104 	{
3105 	  /* Clear EXACT if LEN may be less than SRC suggests,
3106 	     such as in
3107 	       strnlen (&a[i], sizeof a)
3108 	     where the value of i is unknown.  Unless i's value is
3109 	     zero, the call is unsafe because the bound is greater. */
3110 	  lendata.decl = unterminated_array (src, &len, &exact);
3111 	  if (!lendata.decl)
3112 	    return NULL_RTX;
3113 	}
3114 
3115       if (lendata.decl && (tree_int_cst_lt (len, bound) || !exact))
3116 	{
3117 	  location_t warnloc
3118 	    = expansion_point_location_if_in_system_header (loc);
3119 
3120 	  if (!TREE_NO_WARNING (exp)
3121 	      && warning_at (warnloc, OPT_Wstringop_overflow_,
3122 			     exact
3123 			     ? G_("%K%qD specified bound %E exceeds the size "
3124 				  "%E of unterminated array")
3125 			     : G_("%K%qD specified bound %E may exceed the "
3126 				  "size of at most %E of unterminated array"),
3127 			     exp, func, bound, len))
3128 	    {
3129 	      inform (DECL_SOURCE_LOCATION (lendata.decl),
3130 		      "referenced argument declared here");
3131 	      TREE_NO_WARNING (exp) = true;
3132 	    }
3133 	  return NULL_RTX;
3134 	}
3135 
3136       if (!len)
3137 	return NULL_RTX;
3138 
3139       len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3140       return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3141     }
3142 
3143   if (TREE_CODE (bound) != SSA_NAME)
3144     return NULL_RTX;
3145 
3146   wide_int min, max;
3147   enum value_range_kind rng = get_range_info (bound, &min, &max);
3148   if (rng != VR_RANGE)
3149     return NULL_RTX;
3150 
3151   if (!TREE_NO_WARNING (exp)
3152       && wi::ltu_p (wi::to_wide (maxobjsize, min.get_precision ()), min)
3153       && warning_at (loc, OPT_Wstringop_overflow_,
3154 		     "%K%qD specified bound [%wu, %wu] "
3155 		     "exceeds maximum object size %E",
3156 		     exp, func, min.to_uhwi (), max.to_uhwi (), maxobjsize))
3157     TREE_NO_WARNING (exp) = true;
3158 
3159   bool exact = true;
3160   if (!len || TREE_CODE (len) != INTEGER_CST)
3161     {
3162       lendata.decl = unterminated_array (src, &len, &exact);
3163       if (!lendata.decl)
3164 	return NULL_RTX;
3165     }
3166 
3167   if (lendata.decl
3168       && !TREE_NO_WARNING (exp)
3169       && (wi::ltu_p (wi::to_wide (len), min)
3170 	  || !exact))
3171     {
3172       location_t warnloc
3173 	= expansion_point_location_if_in_system_header (loc);
3174 
3175       if (warning_at (warnloc, OPT_Wstringop_overflow_,
3176 		      exact
3177 		      ? G_("%K%qD specified bound [%wu, %wu] exceeds "
3178 			   "the size %E of unterminated array")
3179 		      : G_("%K%qD specified bound [%wu, %wu] may exceed "
3180 			   "the size of at most %E of unterminated array"),
3181 		      exp, func, min.to_uhwi (), max.to_uhwi (), len))
3182 	{
3183 	  inform (DECL_SOURCE_LOCATION (lendata.decl),
3184 		  "referenced argument declared here");
3185 	  TREE_NO_WARNING (exp) = true;
3186 	}
3187     }
3188 
3189   if (lendata.decl)
3190     return NULL_RTX;
3191 
3192   if (wi::gtu_p (min, wi::to_wide (len)))
3193     return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3194 
3195   len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3196   return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3197 }
3198 
3199 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
3200    bytes from constant string DATA + OFFSET and return it as target
3201    constant.  */
3202 
3203 static rtx
builtin_memcpy_read_str(void * data,HOST_WIDE_INT offset,scalar_int_mode mode)3204 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
3205 			 scalar_int_mode mode)
3206 {
3207   const char *str = (const char *) data;
3208 
3209   gcc_assert (offset >= 0
3210 	      && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
3211 		  <= strlen (str) + 1));
3212 
3213   return c_readstr (str + offset, mode);
3214 }
3215 
3216 /* LEN specify length of the block of memcpy/memset operation.
3217    Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3218    In some cases we can make very likely guess on max size, then we
3219    set it into PROBABLE_MAX_SIZE.  */
3220 
3221 static void
determine_block_size(tree len,rtx len_rtx,unsigned HOST_WIDE_INT * min_size,unsigned HOST_WIDE_INT * max_size,unsigned HOST_WIDE_INT * probable_max_size)3222 determine_block_size (tree len, rtx len_rtx,
3223 		      unsigned HOST_WIDE_INT *min_size,
3224 		      unsigned HOST_WIDE_INT *max_size,
3225 		      unsigned HOST_WIDE_INT *probable_max_size)
3226 {
3227   if (CONST_INT_P (len_rtx))
3228     {
3229       *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3230       return;
3231     }
3232   else
3233     {
3234       wide_int min, max;
3235       enum value_range_kind range_type = VR_UNDEFINED;
3236 
3237       /* Determine bounds from the type.  */
3238       if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3239 	*min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3240       else
3241 	*min_size = 0;
3242       if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3243 	*probable_max_size = *max_size
3244 	  = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3245       else
3246 	*probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3247 
3248       if (TREE_CODE (len) == SSA_NAME)
3249 	range_type = get_range_info (len, &min, &max);
3250       if (range_type == VR_RANGE)
3251 	{
3252 	  if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3253 	    *min_size = min.to_uhwi ();
3254 	  if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3255 	    *probable_max_size = *max_size = max.to_uhwi ();
3256 	}
3257       else if (range_type == VR_ANTI_RANGE)
3258 	{
3259 	  /* Anti range 0...N lets us to determine minimal size to N+1.  */
3260 	  if (min == 0)
3261 	    {
3262 	      if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
3263 		*min_size = max.to_uhwi () + 1;
3264 	    }
3265 	  /* Code like
3266 
3267 	     int n;
3268 	     if (n < 100)
3269 	       memcpy (a, b, n)
3270 
3271 	     Produce anti range allowing negative values of N.  We still
3272 	     can use the information and make a guess that N is not negative.
3273 	     */
3274 	  else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3275 	    *probable_max_size = min.to_uhwi () - 1;
3276 	}
3277     }
3278   gcc_checking_assert (*max_size <=
3279 		       (unsigned HOST_WIDE_INT)
3280 			  GET_MODE_MASK (GET_MODE (len_rtx)));
3281 }
3282 
3283 /* Try to verify that the sizes and lengths of the arguments to a string
3284    manipulation function given by EXP are within valid bounds and that
3285    the operation does not lead to buffer overflow or read past the end.
3286    Arguments other than EXP may be null.  When non-null, the arguments
3287    have the following meaning:
3288    DST is the destination of a copy call or NULL otherwise.
3289    SRC is the source of a copy call or NULL otherwise.
3290    DSTWRITE is the number of bytes written into the destination obtained
3291    from the user-supplied size argument to the function (such as in
3292    memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
3293    MAXREAD is the user-supplied bound on the length of the source sequence
3294    (such as in strncat(d, s, N).  It specifies the upper limit on the number
3295    of bytes to write.  If NULL, it's taken to be the same as DSTWRITE.
3296    SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
3297    expression EXP is a string function call (as opposed to a memory call
3298    like memcpy).  As an exception, SRCSTR can also be an integer denoting
3299    the precomputed size of the source string or object (for functions like
3300    memcpy).
3301    DSTSIZE is the size of the destination object specified by the last
3302    argument to the _chk builtins, typically resulting from the expansion
3303    of __builtin_object_size (such as in __builtin___strcpy_chk(DST, SRC,
3304    DSTSIZE).
3305 
3306    When DSTWRITE is null LEN is checked to verify that it doesn't exceed
3307    SIZE_MAX.
3308 
3309    If the call is successfully verified as safe return true, otherwise
3310    return false.  */
3311 
3312 static bool
check_access(tree exp,tree,tree,tree dstwrite,tree maxread,tree srcstr,tree dstsize)3313 check_access (tree exp, tree, tree, tree dstwrite,
3314 	      tree maxread, tree srcstr, tree dstsize)
3315 {
3316   int opt = OPT_Wstringop_overflow_;
3317 
3318   /* The size of the largest object is half the address space, or
3319      PTRDIFF_MAX.  (This is way too permissive.)  */
3320   tree maxobjsize = max_object_size ();
3321 
3322   /* Either the length of the source string for string functions or
3323      the size of the source object for raw memory functions.  */
3324   tree slen = NULL_TREE;
3325 
3326   tree range[2] = { NULL_TREE, NULL_TREE };
3327 
3328   /* Set to true when the exact number of bytes written by a string
3329      function like strcpy is not known and the only thing that is
3330      known is that it must be at least one (for the terminating nul).  */
3331   bool at_least_one = false;
3332   if (srcstr)
3333     {
3334       /* SRCSTR is normally a pointer to string but as a special case
3335 	 it can be an integer denoting the length of a string.  */
3336       if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
3337 	{
3338 	  /* Try to determine the range of lengths the source string
3339 	     refers to.  If it can be determined and is less than
3340 	     the upper bound given by MAXREAD add one to it for
3341 	     the terminating nul.  Otherwise, set it to one for
3342 	     the same reason, or to MAXREAD as appropriate.  */
3343 	  c_strlen_data lendata = { };
3344 	  get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
3345 	  range[0] = lendata.minlen;
3346 	  range[1] = lendata.maxbound;
3347 	  if (range[0] && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
3348 	    {
3349 	      if (maxread && tree_int_cst_le (maxread, range[0]))
3350 		range[0] = range[1] = maxread;
3351 	      else
3352 		range[0] = fold_build2 (PLUS_EXPR, size_type_node,
3353 					range[0], size_one_node);
3354 
3355 	      if (maxread && tree_int_cst_le (maxread, range[1]))
3356 		range[1] = maxread;
3357 	      else if (!integer_all_onesp (range[1]))
3358 		range[1] = fold_build2 (PLUS_EXPR, size_type_node,
3359 					range[1], size_one_node);
3360 
3361 	      slen = range[0];
3362 	    }
3363 	  else
3364 	    {
3365 	      at_least_one = true;
3366 	      slen = size_one_node;
3367 	    }
3368 	}
3369       else
3370 	slen = srcstr;
3371     }
3372 
3373   if (!dstwrite && !maxread)
3374     {
3375       /* When the only available piece of data is the object size
3376 	 there is nothing to do.  */
3377       if (!slen)
3378 	return true;
3379 
3380       /* Otherwise, when the length of the source sequence is known
3381 	 (as with strlen), set DSTWRITE to it.  */
3382       if (!range[0])
3383 	dstwrite = slen;
3384     }
3385 
3386   if (!dstsize)
3387     dstsize = maxobjsize;
3388 
3389   if (dstwrite)
3390     get_size_range (dstwrite, range);
3391 
3392   tree func = get_callee_fndecl (exp);
3393 
3394   /* First check the number of bytes to be written against the maximum
3395      object size.  */
3396   if (range[0]
3397       && TREE_CODE (range[0]) == INTEGER_CST
3398       && tree_int_cst_lt (maxobjsize, range[0]))
3399     {
3400       if (TREE_NO_WARNING (exp))
3401 	return false;
3402 
3403       location_t loc = tree_nonartificial_location (exp);
3404       loc = expansion_point_location_if_in_system_header (loc);
3405 
3406       bool warned;
3407       if (range[0] == range[1])
3408 	warned = warning_at (loc, opt,
3409 			     "%K%qD specified size %E "
3410 			     "exceeds maximum object size %E",
3411 			     exp, func, range[0], maxobjsize);
3412       else
3413 	warned = warning_at (loc, opt,
3414 			     "%K%qD specified size between %E and %E "
3415 			     "exceeds maximum object size %E",
3416 			     exp, func,
3417 			     range[0], range[1], maxobjsize);
3418       if (warned)
3419 	TREE_NO_WARNING (exp) = true;
3420 
3421       return false;
3422     }
3423 
3424   /* The number of bytes to write is "exact" if DSTWRITE is non-null,
3425      constant, and in range of unsigned HOST_WIDE_INT.  */
3426   bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
3427 
3428   /* Next check the number of bytes to be written against the destination
3429      object size.  */
3430   if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
3431     {
3432       if (range[0]
3433 	  && TREE_CODE (range[0]) == INTEGER_CST
3434 	  && ((tree_fits_uhwi_p (dstsize)
3435 	       && tree_int_cst_lt (dstsize, range[0]))
3436 	      || (dstwrite
3437 		  && tree_fits_uhwi_p (dstwrite)
3438 		  && tree_int_cst_lt (dstwrite, range[0]))))
3439 	{
3440 	  if (TREE_NO_WARNING (exp))
3441 	    return false;
3442 
3443 	  location_t loc = tree_nonartificial_location (exp);
3444 	  loc = expansion_point_location_if_in_system_header (loc);
3445 
3446 	  if (dstwrite == slen && at_least_one)
3447 	    {
3448 	      /* This is a call to strcpy with a destination of 0 size
3449 		 and a source of unknown length.  The call will write
3450 		 at least one byte past the end of the destination.  */
3451 	      warning_at (loc, opt,
3452 			  "%K%qD writing %E or more bytes into a region "
3453 			  "of size %E overflows the destination",
3454 			  exp, func, range[0], dstsize);
3455 	    }
3456 	  else if (tree_int_cst_equal (range[0], range[1]))
3457 	    warning_n (loc, opt, tree_to_uhwi (range[0]),
3458 		       "%K%qD writing %E byte into a region "
3459 		       "of size %E overflows the destination",
3460 		       "%K%qD writing %E bytes into a region "
3461 		       "of size %E overflows the destination",
3462 		       exp, func, range[0], dstsize);
3463 	  else if (tree_int_cst_sign_bit (range[1]))
3464 	    {
3465 	      /* Avoid printing the upper bound if it's invalid.  */
3466 	      warning_at (loc, opt,
3467 			  "%K%qD writing %E or more bytes into a region "
3468 			  "of size %E overflows the destination",
3469 			  exp, func, range[0], dstsize);
3470 	    }
3471 	  else
3472 	    warning_at (loc, opt,
3473 			"%K%qD writing between %E and %E bytes into "
3474 			"a region of size %E overflows the destination",
3475 			exp, func, range[0], range[1],
3476 			dstsize);
3477 
3478 	  /* Return error when an overflow has been detected.  */
3479 	  return false;
3480 	}
3481     }
3482 
3483   /* Check the maximum length of the source sequence against the size
3484      of the destination object if known, or against the maximum size
3485      of an object.  */
3486   if (maxread)
3487     {
3488       get_size_range (maxread, range);
3489 
3490       /* Use the lower end for MAXREAD from now on.  */
3491       if (range[0])
3492 	maxread = range[0];
3493 
3494       if (range[0] && dstsize && tree_fits_uhwi_p (dstsize))
3495 	{
3496 	  location_t loc = tree_nonartificial_location (exp);
3497 	  loc = expansion_point_location_if_in_system_header (loc);
3498 
3499 	  if (tree_int_cst_lt (maxobjsize, range[0]))
3500 	    {
3501 	      if (TREE_NO_WARNING (exp))
3502 		return false;
3503 
3504 	      /* Warn about crazy big sizes first since that's more
3505 		 likely to be meaningful than saying that the bound
3506 		 is greater than the object size if both are big.  */
3507 	      if (range[0] == range[1])
3508 		warning_at (loc, opt,
3509 			    "%K%qD specified bound %E "
3510 			    "exceeds maximum object size %E",
3511 			    exp, func,
3512 			    range[0], maxobjsize);
3513 	      else
3514 		warning_at (loc, opt,
3515 			    "%K%qD specified bound between %E and %E "
3516 			    "exceeds maximum object size %E",
3517 			    exp, func,
3518 			    range[0], range[1], maxobjsize);
3519 
3520 	      return false;
3521 	    }
3522 
3523 	  if (dstsize != maxobjsize && tree_int_cst_lt (dstsize, range[0]))
3524 	    {
3525 	      if (TREE_NO_WARNING (exp))
3526 		return false;
3527 
3528 	      if (tree_int_cst_equal (range[0], range[1]))
3529 		warning_at (loc, opt,
3530 			    "%K%qD specified bound %E "
3531 			    "exceeds destination size %E",
3532 			    exp, func,
3533 			    range[0], dstsize);
3534 	      else
3535 		warning_at (loc, opt,
3536 			    "%K%qD specified bound between %E and %E "
3537 			    "exceeds destination size %E",
3538 			    exp, func,
3539 			    range[0], range[1], dstsize);
3540 	      return false;
3541 	    }
3542 	}
3543     }
3544 
3545   /* Check for reading past the end of SRC.  */
3546   if (slen
3547       && slen == srcstr
3548       && dstwrite && range[0]
3549       && tree_int_cst_lt (slen, range[0]))
3550     {
3551       if (TREE_NO_WARNING (exp))
3552 	return false;
3553 
3554       location_t loc = tree_nonartificial_location (exp);
3555 
3556       if (tree_int_cst_equal (range[0], range[1]))
3557 	warning_n (loc, opt, tree_to_uhwi (range[0]),
3558 		   "%K%qD reading %E byte from a region of size %E",
3559 		   "%K%qD reading %E bytes from a region of size %E",
3560 		    exp, func, range[0], slen);
3561       else if (tree_int_cst_sign_bit (range[1]))
3562 	{
3563 	  /* Avoid printing the upper bound if it's invalid.  */
3564 	  warning_at (loc, opt,
3565 		      "%K%qD reading %E or more bytes from a region "
3566 		      "of size %E",
3567 		      exp, func, range[0], slen);
3568 	}
3569       else
3570 	warning_at (loc, opt,
3571 		    "%K%qD reading between %E and %E bytes from a region "
3572 		    "of size %E",
3573 		    exp, func, range[0], range[1], slen);
3574       return false;
3575     }
3576 
3577   return true;
3578 }
3579 
3580 /* Helper to compute the size of the object referenced by the DEST
3581    expression which must have pointer type, using Object Size type
3582    OSTYPE (only the least significant 2 bits are used).  Return
3583    an estimate of the size of the object if successful or NULL when
3584    the size cannot be determined.  When the referenced object involves
3585    a non-constant offset in some range the returned value represents
3586    the largest size given the smallest non-negative offset in the
3587    range.  The function is intended for diagnostics and should not
3588    be used to influence code generation or optimization.  */
3589 
3590 tree
compute_objsize(tree dest,int ostype)3591 compute_objsize (tree dest, int ostype)
3592 {
3593   unsigned HOST_WIDE_INT size;
3594 
3595   /* Only the two least significant bits are meaningful.  */
3596   ostype &= 3;
3597 
3598   if (compute_builtin_object_size (dest, ostype, &size))
3599     return build_int_cst (sizetype, size);
3600 
3601   if (TREE_CODE (dest) == SSA_NAME)
3602     {
3603       gimple *stmt = SSA_NAME_DEF_STMT (dest);
3604       if (!is_gimple_assign (stmt))
3605 	return NULL_TREE;
3606 
3607       dest = gimple_assign_rhs1 (stmt);
3608 
3609       tree_code code = gimple_assign_rhs_code (stmt);
3610       if (code == POINTER_PLUS_EXPR)
3611 	{
3612 	  /* compute_builtin_object_size fails for addresses with
3613 	     non-constant offsets.  Try to determine the range of
3614 	     such an offset here and use it to adjust the constant
3615 	     size.  */
3616 	  tree off = gimple_assign_rhs2 (stmt);
3617 	  if (TREE_CODE (off) == INTEGER_CST)
3618 	    {
3619 	      if (tree size = compute_objsize (dest, ostype))
3620 		{
3621 		  wide_int wioff = wi::to_wide (off);
3622 		  wide_int wisiz = wi::to_wide (size);
3623 
3624 		  /* Ignore negative offsets for now.  For others,
3625 		     use the lower bound as the most optimistic
3626 		     estimate of the (remaining) size.  */
3627 		  if (wi::sign_mask (wioff))
3628 		    ;
3629 		  else if (wi::ltu_p (wioff, wisiz))
3630 		    return wide_int_to_tree (TREE_TYPE (size),
3631 					     wi::sub (wisiz, wioff));
3632 		  else
3633 		    return size_zero_node;
3634 		}
3635 	    }
3636 	  else if (TREE_CODE (off) == SSA_NAME
3637 	      && INTEGRAL_TYPE_P (TREE_TYPE (off)))
3638 	    {
3639 	      wide_int min, max;
3640 	      enum value_range_kind rng = get_range_info (off, &min, &max);
3641 
3642 	      if (rng == VR_RANGE)
3643 		{
3644 		  if (tree size = compute_objsize (dest, ostype))
3645 		    {
3646 		      wide_int wisiz = wi::to_wide (size);
3647 
3648 		      /* Ignore negative offsets for now.  For others,
3649 			 use the lower bound as the most optimistic
3650 			 estimate of the (remaining)size.  */
3651 		      if (wi::sign_mask (min)
3652 			  || wi::sign_mask (max))
3653 			;
3654 		      else if (wi::ltu_p (min, wisiz))
3655 			return wide_int_to_tree (TREE_TYPE (size),
3656 						 wi::sub (wisiz, min));
3657 		      else
3658 			return size_zero_node;
3659 		    }
3660 		}
3661 	    }
3662 	}
3663       else if (code != ADDR_EXPR)
3664 	return NULL_TREE;
3665     }
3666 
3667   /* Unless computing the largest size (for memcpy and other raw memory
3668      functions), try to determine the size of the object from its type.  */
3669   if (!ostype)
3670     return NULL_TREE;
3671 
3672   if (TREE_CODE (dest) != ADDR_EXPR)
3673     return NULL_TREE;
3674 
3675   tree type = TREE_TYPE (dest);
3676   if (TREE_CODE (type) == POINTER_TYPE)
3677     type = TREE_TYPE (type);
3678 
3679   type = TYPE_MAIN_VARIANT (type);
3680 
3681   if (TREE_CODE (type) == ARRAY_TYPE
3682       && !array_at_struct_end_p (TREE_OPERAND (dest, 0)))
3683     {
3684       /* Return the constant size unless it's zero (that's a zero-length
3685 	 array likely at the end of a struct).  */
3686       tree size = TYPE_SIZE_UNIT (type);
3687       if (size && TREE_CODE (size) == INTEGER_CST
3688 	  && !integer_zerop (size))
3689 	return size;
3690     }
3691 
3692   return NULL_TREE;
3693 }
3694 
3695 /* Helper to determine and check the sizes of the source and the destination
3696    of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls.  EXP is the
3697    call expression, DEST is the destination argument, SRC is the source
3698    argument or null, and LEN is the number of bytes.  Use Object Size type-0
3699    regardless of the OPT_Wstringop_overflow_ setting.  Return true on success
3700    (no overflow or invalid sizes), false otherwise.  */
3701 
3702 static bool
check_memop_access(tree exp,tree dest,tree src,tree size)3703 check_memop_access (tree exp, tree dest, tree src, tree size)
3704 {
3705   /* For functions like memset and memcpy that operate on raw memory
3706      try to determine the size of the largest source and destination
3707      object using type-0 Object Size regardless of the object size
3708      type specified by the option.  */
3709   tree srcsize = src ? compute_objsize (src, 0) : NULL_TREE;
3710   tree dstsize = compute_objsize (dest, 0);
3711 
3712   return check_access (exp, dest, src, size, /*maxread=*/NULL_TREE,
3713 		       srcsize, dstsize);
3714 }
3715 
3716 /* Validate memchr arguments without performing any expansion.
3717    Return NULL_RTX.  */
3718 
3719 static rtx
expand_builtin_memchr(tree exp,rtx)3720 expand_builtin_memchr (tree exp, rtx)
3721 {
3722   if (!validate_arglist (exp,
3723  			 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3724     return NULL_RTX;
3725 
3726   tree arg1 = CALL_EXPR_ARG (exp, 0);
3727   tree len = CALL_EXPR_ARG (exp, 2);
3728 
3729   /* Diagnose calls where the specified length exceeds the size
3730      of the object.  */
3731   if (warn_stringop_overflow)
3732     {
3733       tree size = compute_objsize (arg1, 0);
3734       check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, len,
3735 		    /*maxread=*/NULL_TREE, size, /*objsize=*/NULL_TREE);
3736     }
3737 
3738   return NULL_RTX;
3739 }
3740 
3741 /* Expand a call EXP to the memcpy builtin.
3742    Return NULL_RTX if we failed, the caller should emit a normal call,
3743    otherwise try to get the result in TARGET, if convenient (and in
3744    mode MODE if that's convenient).  */
3745 
3746 static rtx
expand_builtin_memcpy(tree exp,rtx target)3747 expand_builtin_memcpy (tree exp, rtx target)
3748 {
3749   if (!validate_arglist (exp,
3750  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3751     return NULL_RTX;
3752 
3753   tree dest = CALL_EXPR_ARG (exp, 0);
3754   tree src = CALL_EXPR_ARG (exp, 1);
3755   tree len = CALL_EXPR_ARG (exp, 2);
3756 
3757   check_memop_access (exp, dest, src, len);
3758 
3759   return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3760 					  /*retmode=*/ RETURN_BEGIN);
3761 }
3762 
3763 /* Check a call EXP to the memmove built-in for validity.
3764    Return NULL_RTX on both success and failure.  */
3765 
3766 static rtx
expand_builtin_memmove(tree exp,rtx)3767 expand_builtin_memmove (tree exp, rtx)
3768 {
3769   if (!validate_arglist (exp,
3770  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3771     return NULL_RTX;
3772 
3773   tree dest = CALL_EXPR_ARG (exp, 0);
3774   tree src = CALL_EXPR_ARG (exp, 1);
3775   tree len = CALL_EXPR_ARG (exp, 2);
3776 
3777   check_memop_access (exp, dest, src, len);
3778 
3779   return NULL_RTX;
3780 }
3781 
3782 /* Expand a call EXP to the mempcpy builtin.
3783    Return NULL_RTX if we failed; the caller should emit a normal call,
3784    otherwise try to get the result in TARGET, if convenient (and in
3785    mode MODE if that's convenient).  */
3786 
3787 static rtx
expand_builtin_mempcpy(tree exp,rtx target)3788 expand_builtin_mempcpy (tree exp, rtx target)
3789 {
3790   if (!validate_arglist (exp,
3791  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3792     return NULL_RTX;
3793 
3794   tree dest = CALL_EXPR_ARG (exp, 0);
3795   tree src = CALL_EXPR_ARG (exp, 1);
3796   tree len = CALL_EXPR_ARG (exp, 2);
3797 
3798   /* Policy does not generally allow using compute_objsize (which
3799      is used internally by check_memop_size) to change code generation
3800      or drive optimization decisions.
3801 
3802      In this instance it is safe because the code we generate has
3803      the same semantics regardless of the return value of
3804      check_memop_sizes.   Exactly the same amount of data is copied
3805      and the return value is exactly the same in both cases.
3806 
3807      Furthermore, check_memop_size always uses mode 0 for the call to
3808      compute_objsize, so the imprecise nature of compute_objsize is
3809      avoided.  */
3810 
3811   /* Avoid expanding mempcpy into memcpy when the call is determined
3812      to overflow the buffer.  This also prevents the same overflow
3813      from being diagnosed again when expanding memcpy.  */
3814   if (!check_memop_access (exp, dest, src, len))
3815     return NULL_RTX;
3816 
3817   return expand_builtin_mempcpy_args (dest, src, len,
3818 				      target, exp, /*retmode=*/ RETURN_END);
3819 }
3820 
3821 /* Helper function to do the actual work for expand of memory copy family
3822    functions (memcpy, mempcpy, stpcpy).  Expansing should assign LEN bytes
3823    of memory from SRC to DEST and assign to TARGET if convenient.  Return
3824    value is based on RETMODE argument.  */
3825 
3826 static rtx
expand_builtin_memory_copy_args(tree dest,tree src,tree len,rtx target,tree exp,memop_ret retmode)3827 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3828 				 rtx target, tree exp, memop_ret retmode)
3829 {
3830   const char *src_str;
3831   unsigned int src_align = get_pointer_alignment (src);
3832   unsigned int dest_align = get_pointer_alignment (dest);
3833   rtx dest_mem, src_mem, dest_addr, len_rtx;
3834   HOST_WIDE_INT expected_size = -1;
3835   unsigned int expected_align = 0;
3836   unsigned HOST_WIDE_INT min_size;
3837   unsigned HOST_WIDE_INT max_size;
3838   unsigned HOST_WIDE_INT probable_max_size;
3839 
3840   /* If DEST is not a pointer type, call the normal function.  */
3841   if (dest_align == 0)
3842     return NULL_RTX;
3843 
3844   /* If either SRC is not a pointer type, don't do this
3845      operation in-line.  */
3846   if (src_align == 0)
3847     return NULL_RTX;
3848 
3849   if (currently_expanding_gimple_stmt)
3850     stringop_block_profile (currently_expanding_gimple_stmt,
3851 			    &expected_align, &expected_size);
3852 
3853   if (expected_align < dest_align)
3854     expected_align = dest_align;
3855   dest_mem = get_memory_rtx (dest, len);
3856   set_mem_align (dest_mem, dest_align);
3857   len_rtx = expand_normal (len);
3858   determine_block_size (len, len_rtx, &min_size, &max_size,
3859 			&probable_max_size);
3860   src_str = c_getstr (src);
3861 
3862   /* If SRC is a string constant and block move would be done
3863      by pieces, we can avoid loading the string from memory
3864      and only stored the computed constants.  */
3865   if (src_str
3866       && CONST_INT_P (len_rtx)
3867       && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3868       && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3869 			      CONST_CAST (char *, src_str),
3870 			      dest_align, false))
3871     {
3872       dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3873 				  builtin_memcpy_read_str,
3874 				  CONST_CAST (char *, src_str),
3875 				  dest_align, false, retmode);
3876       dest_mem = force_operand (XEXP (dest_mem, 0), target);
3877       dest_mem = convert_memory_address (ptr_mode, dest_mem);
3878       return dest_mem;
3879     }
3880 
3881   src_mem = get_memory_rtx (src, len);
3882   set_mem_align (src_mem, src_align);
3883 
3884   /* Copy word part most expediently.  */
3885   enum block_op_methods method = BLOCK_OP_NORMAL;
3886   if (CALL_EXPR_TAILCALL (exp)
3887       && (retmode == RETURN_BEGIN || target == const0_rtx))
3888     method = BLOCK_OP_TAILCALL;
3889   if (retmode == RETURN_END && target != const0_rtx)
3890     method = BLOCK_OP_NO_LIBCALL_RET;
3891   dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3892 				     expected_align, expected_size,
3893 				     min_size, max_size, probable_max_size);
3894   if (dest_addr == pc_rtx)
3895     return NULL_RTX;
3896 
3897   if (dest_addr == 0)
3898     {
3899       dest_addr = force_operand (XEXP (dest_mem, 0), target);
3900       dest_addr = convert_memory_address (ptr_mode, dest_addr);
3901     }
3902 
3903   if (retmode != RETURN_BEGIN && target != const0_rtx)
3904     {
3905       dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3906       /* stpcpy pointer to last byte.  */
3907       if (retmode == RETURN_END_MINUS_ONE)
3908 	dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3909     }
3910 
3911   return dest_addr;
3912 }
3913 
3914 static rtx
expand_builtin_mempcpy_args(tree dest,tree src,tree len,rtx target,tree orig_exp,memop_ret retmode)3915 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3916 			     rtx target, tree orig_exp, memop_ret retmode)
3917 {
3918   return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3919 					  retmode);
3920 }
3921 
3922 /* Expand into a movstr instruction, if one is available.  Return NULL_RTX if
3923    we failed, the caller should emit a normal call, otherwise try to
3924    get the result in TARGET, if convenient.
3925    Return value is based on RETMODE argument.  */
3926 
3927 static rtx
expand_movstr(tree dest,tree src,rtx target,memop_ret retmode)3928 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3929 {
3930   struct expand_operand ops[3];
3931   rtx dest_mem;
3932   rtx src_mem;
3933 
3934   if (!targetm.have_movstr ())
3935     return NULL_RTX;
3936 
3937   dest_mem = get_memory_rtx (dest, NULL);
3938   src_mem = get_memory_rtx (src, NULL);
3939   if (retmode == RETURN_BEGIN)
3940     {
3941       target = force_reg (Pmode, XEXP (dest_mem, 0));
3942       dest_mem = replace_equiv_address (dest_mem, target);
3943     }
3944 
3945   create_output_operand (&ops[0],
3946 			 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3947   create_fixed_operand (&ops[1], dest_mem);
3948   create_fixed_operand (&ops[2], src_mem);
3949   if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3950     return NULL_RTX;
3951 
3952   if (retmode != RETURN_BEGIN && target != const0_rtx)
3953     {
3954       target = ops[0].value;
3955       /* movstr is supposed to set end to the address of the NUL
3956 	 terminator.  If the caller requested a mempcpy-like return value,
3957 	 adjust it.  */
3958       if (retmode == RETURN_END)
3959 	{
3960 	  rtx tem = plus_constant (GET_MODE (target),
3961 				   gen_lowpart (GET_MODE (target), target), 1);
3962 	  emit_move_insn (target, force_operand (tem, NULL_RTX));
3963 	}
3964     }
3965   return target;
3966 }
3967 
3968 /* Do some very basic size validation of a call to the strcpy builtin
3969    given by EXP.  Return NULL_RTX to have the built-in expand to a call
3970    to the library function.  */
3971 
3972 static rtx
expand_builtin_strcat(tree exp,rtx)3973 expand_builtin_strcat (tree exp, rtx)
3974 {
3975   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
3976       || !warn_stringop_overflow)
3977     return NULL_RTX;
3978 
3979   tree dest = CALL_EXPR_ARG (exp, 0);
3980   tree src = CALL_EXPR_ARG (exp, 1);
3981 
3982   /* There is no way here to determine the length of the string in
3983      the destination to which the SRC string is being appended so
3984      just diagnose cases when the souce string is longer than
3985      the destination object.  */
3986 
3987   tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
3988 
3989   check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, src,
3990 		destsize);
3991 
3992   return NULL_RTX;
3993 }
3994 
3995 /* Expand expression EXP, which is a call to the strcpy builtin.  Return
3996    NULL_RTX if we failed the caller should emit a normal call, otherwise
3997    try to get the result in TARGET, if convenient (and in mode MODE if that's
3998    convenient).  */
3999 
4000 static rtx
expand_builtin_strcpy(tree exp,rtx target)4001 expand_builtin_strcpy (tree exp, rtx target)
4002 {
4003   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4004     return NULL_RTX;
4005 
4006   tree dest = CALL_EXPR_ARG (exp, 0);
4007   tree src = CALL_EXPR_ARG (exp, 1);
4008 
4009   if (warn_stringop_overflow)
4010     {
4011       tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4012       check_access (exp, dest, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4013 		    src, destsize);
4014     }
4015 
4016   if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
4017     {
4018       /* Check to see if the argument was declared attribute nonstring
4019 	 and if so, issue a warning since at this point it's not known
4020 	 to be nul-terminated.  */
4021       tree fndecl = get_callee_fndecl (exp);
4022       maybe_warn_nonstring_arg (fndecl, exp);
4023       return ret;
4024     }
4025 
4026   return NULL_RTX;
4027 }
4028 
4029 /* Helper function to do the actual work for expand_builtin_strcpy.  The
4030    arguments to the builtin_strcpy call DEST and SRC are broken out
4031    so that this can also be called without constructing an actual CALL_EXPR.
4032    The other arguments and return value are the same as for
4033    expand_builtin_strcpy.  */
4034 
4035 static rtx
expand_builtin_strcpy_args(tree exp,tree dest,tree src,rtx target)4036 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
4037 {
4038   /* Detect strcpy calls with unterminated arrays..  */
4039   if (tree nonstr = unterminated_array (src))
4040     {
4041       /* NONSTR refers to the non-nul terminated constant array.  */
4042       if (!TREE_NO_WARNING (exp))
4043 	warn_string_no_nul (EXPR_LOCATION (exp), "strcpy", src, nonstr);
4044       return NULL_RTX;
4045     }
4046 
4047   return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
4048 }
4049 
4050 /* Expand a call EXP to the stpcpy builtin.
4051    Return NULL_RTX if we failed the caller should emit a normal call,
4052    otherwise try to get the result in TARGET, if convenient (and in
4053    mode MODE if that's convenient).  */
4054 
4055 static rtx
expand_builtin_stpcpy_1(tree exp,rtx target,machine_mode mode)4056 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
4057 {
4058   tree dst, src;
4059   location_t loc = EXPR_LOCATION (exp);
4060 
4061   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4062     return NULL_RTX;
4063 
4064   dst = CALL_EXPR_ARG (exp, 0);
4065   src = CALL_EXPR_ARG (exp, 1);
4066 
4067   if (warn_stringop_overflow)
4068     {
4069       tree destsize = compute_objsize (dst, warn_stringop_overflow - 1);
4070       check_access (exp, dst, src, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE,
4071 		    src, destsize);
4072     }
4073 
4074   /* If return value is ignored, transform stpcpy into strcpy.  */
4075   if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
4076     {
4077       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
4078       tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
4079       return expand_expr (result, target, mode, EXPAND_NORMAL);
4080     }
4081   else
4082     {
4083       tree len, lenp1;
4084       rtx ret;
4085 
4086       /* Ensure we get an actual string whose length can be evaluated at
4087 	 compile-time, not an expression containing a string.  This is
4088 	 because the latter will potentially produce pessimized code
4089 	 when used to produce the return value.  */
4090       c_strlen_data lendata = { };
4091       if (!c_getstr (src, NULL)
4092 	  || !(len = c_strlen (src, 0, &lendata, 1)))
4093 	return expand_movstr (dst, src, target,
4094 			      /*retmode=*/ RETURN_END_MINUS_ONE);
4095 
4096       if (lendata.decl && !TREE_NO_WARNING (exp))
4097 	warn_string_no_nul (EXPR_LOCATION (exp), "stpcpy", src, lendata.decl);
4098 
4099       lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
4100       ret = expand_builtin_mempcpy_args (dst, src, lenp1,
4101 					 target, exp,
4102 					 /*retmode=*/ RETURN_END_MINUS_ONE);
4103 
4104       if (ret)
4105 	return ret;
4106 
4107       if (TREE_CODE (len) == INTEGER_CST)
4108 	{
4109 	  rtx len_rtx = expand_normal (len);
4110 
4111 	  if (CONST_INT_P (len_rtx))
4112 	    {
4113 	      ret = expand_builtin_strcpy_args (exp, dst, src, target);
4114 
4115 	      if (ret)
4116 		{
4117 		  if (! target)
4118 		    {
4119 		      if (mode != VOIDmode)
4120 			target = gen_reg_rtx (mode);
4121 		      else
4122 			target = gen_reg_rtx (GET_MODE (ret));
4123 		    }
4124 		  if (GET_MODE (target) != GET_MODE (ret))
4125 		    ret = gen_lowpart (GET_MODE (target), ret);
4126 
4127 		  ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
4128 		  ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
4129 		  gcc_assert (ret);
4130 
4131 		  return target;
4132 		}
4133 	    }
4134 	}
4135 
4136       return expand_movstr (dst, src, target,
4137 			    /*retmode=*/ RETURN_END_MINUS_ONE);
4138     }
4139 }
4140 
4141 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
4142    arguments while being careful to avoid duplicate warnings (which could
4143    be issued if the expander were to expand the call, resulting in it
4144    being emitted in expand_call().  */
4145 
4146 static rtx
expand_builtin_stpcpy(tree exp,rtx target,machine_mode mode)4147 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
4148 {
4149   if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
4150     {
4151       /* The call has been successfully expanded.  Check for nonstring
4152 	 arguments and issue warnings as appropriate.  */
4153       maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
4154       return ret;
4155     }
4156 
4157   return NULL_RTX;
4158 }
4159 
4160 /* Check a call EXP to the stpncpy built-in for validity.
4161    Return NULL_RTX on both success and failure.  */
4162 
4163 static rtx
expand_builtin_stpncpy(tree exp,rtx)4164 expand_builtin_stpncpy (tree exp, rtx)
4165 {
4166   if (!validate_arglist (exp,
4167 			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4168       || !warn_stringop_overflow)
4169     return NULL_RTX;
4170 
4171   /* The source and destination of the call.  */
4172   tree dest = CALL_EXPR_ARG (exp, 0);
4173   tree src = CALL_EXPR_ARG (exp, 1);
4174 
4175   /* The exact number of bytes to write (not the maximum).  */
4176   tree len = CALL_EXPR_ARG (exp, 2);
4177 
4178   /* The size of the destination object.  */
4179   tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4180 
4181   check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src, destsize);
4182 
4183   return NULL_RTX;
4184 }
4185 
4186 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
4187    bytes from constant string DATA + OFFSET and return it as target
4188    constant.  */
4189 
4190 rtx
builtin_strncpy_read_str(void * data,HOST_WIDE_INT offset,scalar_int_mode mode)4191 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
4192 			  scalar_int_mode mode)
4193 {
4194   const char *str = (const char *) data;
4195 
4196   if ((unsigned HOST_WIDE_INT) offset > strlen (str))
4197     return const0_rtx;
4198 
4199   return c_readstr (str + offset, mode);
4200 }
4201 
4202 /* Helper to check the sizes of sequences and the destination of calls
4203    to __builtin_strncat and __builtin___strncat_chk.  Returns true on
4204    success (no overflow or invalid sizes), false otherwise.  */
4205 
4206 static bool
check_strncat_sizes(tree exp,tree objsize)4207 check_strncat_sizes (tree exp, tree objsize)
4208 {
4209   tree dest = CALL_EXPR_ARG (exp, 0);
4210   tree src = CALL_EXPR_ARG (exp, 1);
4211   tree maxread = CALL_EXPR_ARG (exp, 2);
4212 
4213   /* Try to determine the range of lengths that the source expression
4214      refers to.  */
4215   c_strlen_data lendata = { };
4216   get_range_strlen (src, &lendata, /* eltsize = */ 1);
4217 
4218   /* Try to verify that the destination is big enough for the shortest
4219      string.  */
4220 
4221   if (!objsize && warn_stringop_overflow)
4222     {
4223       /* If it hasn't been provided by __strncat_chk, try to determine
4224 	 the size of the destination object into which the source is
4225 	 being copied.  */
4226       objsize = compute_objsize (dest, warn_stringop_overflow - 1);
4227     }
4228 
4229   /* Add one for the terminating nul.  */
4230   tree srclen = (lendata.minlen
4231 		 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
4232 				size_one_node)
4233 		 : NULL_TREE);
4234 
4235   /* The strncat function copies at most MAXREAD bytes and always appends
4236      the terminating nul so the specified upper bound should never be equal
4237      to (or greater than) the size of the destination.  */
4238   if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
4239       && tree_int_cst_equal (objsize, maxread))
4240     {
4241       location_t loc = tree_nonartificial_location (exp);
4242       loc = expansion_point_location_if_in_system_header (loc);
4243 
4244       warning_at (loc, OPT_Wstringop_overflow_,
4245 		  "%K%qD specified bound %E equals destination size",
4246 		  exp, get_callee_fndecl (exp), maxread);
4247 
4248       return false;
4249     }
4250 
4251   if (!srclen
4252       || (maxread && tree_fits_uhwi_p (maxread)
4253 	  && tree_fits_uhwi_p (srclen)
4254 	  && tree_int_cst_lt (maxread, srclen)))
4255     srclen = maxread;
4256 
4257   /* The number of bytes to write is LEN but check_access will also
4258      check SRCLEN if LEN's value isn't known.  */
4259   return check_access (exp, dest, src, /*size=*/NULL_TREE, maxread, srclen,
4260 		       objsize);
4261 }
4262 
4263 /* Similar to expand_builtin_strcat, do some very basic size validation
4264    of a call to the strcpy builtin given by EXP.  Return NULL_RTX to have
4265    the built-in expand to a call to the library function.  */
4266 
4267 static rtx
expand_builtin_strncat(tree exp,rtx)4268 expand_builtin_strncat (tree exp, rtx)
4269 {
4270   if (!validate_arglist (exp,
4271 			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
4272       || !warn_stringop_overflow)
4273     return NULL_RTX;
4274 
4275   tree dest = CALL_EXPR_ARG (exp, 0);
4276   tree src = CALL_EXPR_ARG (exp, 1);
4277   /* The upper bound on the number of bytes to write.  */
4278   tree maxread = CALL_EXPR_ARG (exp, 2);
4279   /* The length of the source sequence.  */
4280   tree slen = c_strlen (src, 1);
4281 
4282   /* Try to determine the range of lengths that the source expression
4283      refers to.  Since the lengths are only used for warning and not
4284      for code generation disable strict mode below.  */
4285   tree maxlen = slen;
4286   if (!maxlen)
4287     {
4288       c_strlen_data lendata = { };
4289       get_range_strlen (src, &lendata, /* eltsize = */ 1);
4290       maxlen = lendata.maxbound;
4291     }
4292 
4293   /* Try to verify that the destination is big enough for the shortest
4294      string.  First try to determine the size of the destination object
4295      into which the source is being copied.  */
4296   tree destsize = compute_objsize (dest, warn_stringop_overflow - 1);
4297 
4298   /* Add one for the terminating nul.  */
4299   tree srclen = (maxlen
4300 		 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
4301 				size_one_node)
4302 		 : NULL_TREE);
4303 
4304   /* The strncat function copies at most MAXREAD bytes and always appends
4305      the terminating nul so the specified upper bound should never be equal
4306      to (or greater than) the size of the destination.  */
4307   if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
4308       && tree_int_cst_equal (destsize, maxread))
4309     {
4310       location_t loc = tree_nonartificial_location (exp);
4311       loc = expansion_point_location_if_in_system_header (loc);
4312 
4313       warning_at (loc, OPT_Wstringop_overflow_,
4314 		  "%K%qD specified bound %E equals destination size",
4315 		  exp, get_callee_fndecl (exp), maxread);
4316 
4317       return NULL_RTX;
4318     }
4319 
4320   if (!srclen
4321       || (maxread && tree_fits_uhwi_p (maxread)
4322 	  && tree_fits_uhwi_p (srclen)
4323 	  && tree_int_cst_lt (maxread, srclen)))
4324     srclen = maxread;
4325 
4326   /* The number of bytes to write is SRCLEN.  */
4327   check_access (exp, dest, src, NULL_TREE, maxread, srclen, destsize);
4328 
4329   return NULL_RTX;
4330 }
4331 
4332 /* Expand expression EXP, which is a call to the strncpy builtin.  Return
4333    NULL_RTX if we failed the caller should emit a normal call.  */
4334 
4335 static rtx
expand_builtin_strncpy(tree exp,rtx target)4336 expand_builtin_strncpy (tree exp, rtx target)
4337 {
4338   location_t loc = EXPR_LOCATION (exp);
4339 
4340   if (validate_arglist (exp,
4341  			POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4342     {
4343       tree dest = CALL_EXPR_ARG (exp, 0);
4344       tree src = CALL_EXPR_ARG (exp, 1);
4345       /* The number of bytes to write (not the maximum).  */
4346       tree len = CALL_EXPR_ARG (exp, 2);
4347       /* The length of the source sequence.  */
4348       tree slen = c_strlen (src, 1);
4349 
4350       if (warn_stringop_overflow)
4351 	{
4352 	  tree destsize = compute_objsize (dest,
4353 					   warn_stringop_overflow - 1);
4354 
4355 	  /* The number of bytes to write is LEN but check_access will also
4356 	     check SLEN if LEN's value isn't known.  */
4357 	  check_access (exp, dest, src, len, /*maxread=*/NULL_TREE, src,
4358 			destsize);
4359 	}
4360 
4361       /* We must be passed a constant len and src parameter.  */
4362       if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
4363 	return NULL_RTX;
4364 
4365       slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
4366 
4367       /* We're required to pad with trailing zeros if the requested
4368 	 len is greater than strlen(s2)+1.  In that case try to
4369 	 use store_by_pieces, if it fails, punt.  */
4370       if (tree_int_cst_lt (slen, len))
4371 	{
4372 	  unsigned int dest_align = get_pointer_alignment (dest);
4373 	  const char *p = c_getstr (src);
4374 	  rtx dest_mem;
4375 
4376 	  if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
4377 	      || !can_store_by_pieces (tree_to_uhwi (len),
4378 				       builtin_strncpy_read_str,
4379 				       CONST_CAST (char *, p),
4380 				       dest_align, false))
4381 	    return NULL_RTX;
4382 
4383 	  dest_mem = get_memory_rtx (dest, len);
4384 	  store_by_pieces (dest_mem, tree_to_uhwi (len),
4385 			   builtin_strncpy_read_str,
4386 			   CONST_CAST (char *, p), dest_align, false,
4387 			   RETURN_BEGIN);
4388 	  dest_mem = force_operand (XEXP (dest_mem, 0), target);
4389 	  dest_mem = convert_memory_address (ptr_mode, dest_mem);
4390 	  return dest_mem;
4391 	}
4392     }
4393   return NULL_RTX;
4394 }
4395 
4396 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
4397    bytes from constant string DATA + OFFSET and return it as target
4398    constant.  */
4399 
4400 rtx
builtin_memset_read_str(void * data,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,scalar_int_mode mode)4401 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4402 			 scalar_int_mode mode)
4403 {
4404   const char *c = (const char *) data;
4405   char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
4406 
4407   memset (p, *c, GET_MODE_SIZE (mode));
4408 
4409   return c_readstr (p, mode);
4410 }
4411 
4412 /* Callback routine for store_by_pieces.  Return the RTL of a register
4413    containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
4414    char value given in the RTL register data.  For example, if mode is
4415    4 bytes wide, return the RTL for 0x01010101*data.  */
4416 
4417 static rtx
builtin_memset_gen_str(void * data,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,scalar_int_mode mode)4418 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
4419 			scalar_int_mode mode)
4420 {
4421   rtx target, coeff;
4422   size_t size;
4423   char *p;
4424 
4425   size = GET_MODE_SIZE (mode);
4426   if (size == 1)
4427     return (rtx) data;
4428 
4429   p = XALLOCAVEC (char, size);
4430   memset (p, 1, size);
4431   coeff = c_readstr (p, mode);
4432 
4433   target = convert_to_mode (mode, (rtx) data, 1);
4434   target = expand_mult (mode, target, coeff, NULL_RTX, 1);
4435   return force_reg (mode, target);
4436 }
4437 
4438 /* Expand expression EXP, which is a call to the memset builtin.  Return
4439    NULL_RTX if we failed the caller should emit a normal call, otherwise
4440    try to get the result in TARGET, if convenient (and in mode MODE if that's
4441    convenient).  */
4442 
4443 static rtx
expand_builtin_memset(tree exp,rtx target,machine_mode mode)4444 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
4445 {
4446   if (!validate_arglist (exp,
4447  			 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
4448     return NULL_RTX;
4449 
4450   tree dest = CALL_EXPR_ARG (exp, 0);
4451   tree val = CALL_EXPR_ARG (exp, 1);
4452   tree len = CALL_EXPR_ARG (exp, 2);
4453 
4454   check_memop_access (exp, dest, NULL_TREE, len);
4455 
4456   return expand_builtin_memset_args (dest, val, len, target, mode, exp);
4457 }
4458 
4459 /* Helper function to do the actual work for expand_builtin_memset.  The
4460    arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4461    so that this can also be called without constructing an actual CALL_EXPR.
4462    The other arguments and return value are the same as for
4463    expand_builtin_memset.  */
4464 
4465 static rtx
expand_builtin_memset_args(tree dest,tree val,tree len,rtx target,machine_mode mode,tree orig_exp)4466 expand_builtin_memset_args (tree dest, tree val, tree len,
4467 			    rtx target, machine_mode mode, tree orig_exp)
4468 {
4469   tree fndecl, fn;
4470   enum built_in_function fcode;
4471   machine_mode val_mode;
4472   char c;
4473   unsigned int dest_align;
4474   rtx dest_mem, dest_addr, len_rtx;
4475   HOST_WIDE_INT expected_size = -1;
4476   unsigned int expected_align = 0;
4477   unsigned HOST_WIDE_INT min_size;
4478   unsigned HOST_WIDE_INT max_size;
4479   unsigned HOST_WIDE_INT probable_max_size;
4480 
4481   dest_align = get_pointer_alignment (dest);
4482 
4483   /* If DEST is not a pointer type, don't do this operation in-line.  */
4484   if (dest_align == 0)
4485     return NULL_RTX;
4486 
4487   if (currently_expanding_gimple_stmt)
4488     stringop_block_profile (currently_expanding_gimple_stmt,
4489 			    &expected_align, &expected_size);
4490 
4491   if (expected_align < dest_align)
4492     expected_align = dest_align;
4493 
4494   /* If the LEN parameter is zero, return DEST.  */
4495   if (integer_zerop (len))
4496     {
4497       /* Evaluate and ignore VAL in case it has side-effects.  */
4498       expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4499       return expand_expr (dest, target, mode, EXPAND_NORMAL);
4500     }
4501 
4502   /* Stabilize the arguments in case we fail.  */
4503   dest = builtin_save_expr (dest);
4504   val = builtin_save_expr (val);
4505   len = builtin_save_expr (len);
4506 
4507   len_rtx = expand_normal (len);
4508   determine_block_size (len, len_rtx, &min_size, &max_size,
4509 			&probable_max_size);
4510   dest_mem = get_memory_rtx (dest, len);
4511   val_mode = TYPE_MODE (unsigned_char_type_node);
4512 
4513   if (TREE_CODE (val) != INTEGER_CST)
4514     {
4515       rtx val_rtx;
4516 
4517       val_rtx = expand_normal (val);
4518       val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4519 
4520       /* Assume that we can memset by pieces if we can store
4521        * the coefficients by pieces (in the required modes).
4522        * We can't pass builtin_memset_gen_str as that emits RTL.  */
4523       c = 1;
4524       if (tree_fits_uhwi_p (len)
4525 	  && can_store_by_pieces (tree_to_uhwi (len),
4526 				  builtin_memset_read_str, &c, dest_align,
4527 				  true))
4528 	{
4529 	  val_rtx = force_reg (val_mode, val_rtx);
4530 	  store_by_pieces (dest_mem, tree_to_uhwi (len),
4531 			   builtin_memset_gen_str, val_rtx, dest_align,
4532 			   true, RETURN_BEGIN);
4533 	}
4534       else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4535 					dest_align, expected_align,
4536 					expected_size, min_size, max_size,
4537 					probable_max_size))
4538 	goto do_libcall;
4539 
4540       dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4541       dest_mem = convert_memory_address (ptr_mode, dest_mem);
4542       return dest_mem;
4543     }
4544 
4545   if (target_char_cast (val, &c))
4546     goto do_libcall;
4547 
4548   if (c)
4549     {
4550       if (tree_fits_uhwi_p (len)
4551 	  && can_store_by_pieces (tree_to_uhwi (len),
4552 				  builtin_memset_read_str, &c, dest_align,
4553 				  true))
4554 	store_by_pieces (dest_mem, tree_to_uhwi (len),
4555 			 builtin_memset_read_str, &c, dest_align, true,
4556 			 RETURN_BEGIN);
4557       else if (!set_storage_via_setmem (dest_mem, len_rtx,
4558 					gen_int_mode (c, val_mode),
4559 					dest_align, expected_align,
4560 					expected_size, min_size, max_size,
4561 					probable_max_size))
4562 	goto do_libcall;
4563 
4564       dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4565       dest_mem = convert_memory_address (ptr_mode, dest_mem);
4566       return dest_mem;
4567     }
4568 
4569   set_mem_align (dest_mem, dest_align);
4570   dest_addr = clear_storage_hints (dest_mem, len_rtx,
4571 				   CALL_EXPR_TAILCALL (orig_exp)
4572 				   ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4573 				   expected_align, expected_size,
4574 				   min_size, max_size,
4575 				   probable_max_size);
4576 
4577   if (dest_addr == 0)
4578     {
4579       dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4580       dest_addr = convert_memory_address (ptr_mode, dest_addr);
4581     }
4582 
4583   return dest_addr;
4584 
4585  do_libcall:
4586   fndecl = get_callee_fndecl (orig_exp);
4587   fcode = DECL_FUNCTION_CODE (fndecl);
4588   if (fcode == BUILT_IN_MEMSET)
4589     fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4590 				dest, val, len);
4591   else if (fcode == BUILT_IN_BZERO)
4592     fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4593 				dest, len);
4594   else
4595     gcc_unreachable ();
4596   gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4597   CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4598   return expand_call (fn, target, target == const0_rtx);
4599 }
4600 
4601 /* Expand expression EXP, which is a call to the bzero builtin.  Return
4602    NULL_RTX if we failed the caller should emit a normal call.  */
4603 
4604 static rtx
expand_builtin_bzero(tree exp)4605 expand_builtin_bzero (tree exp)
4606 {
4607   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4608     return NULL_RTX;
4609 
4610   tree dest = CALL_EXPR_ARG (exp, 0);
4611   tree size = CALL_EXPR_ARG (exp, 1);
4612 
4613   check_memop_access (exp, dest, NULL_TREE, size);
4614 
4615   /* New argument list transforming bzero(ptr x, int y) to
4616      memset(ptr x, int 0, size_t y).   This is done this way
4617      so that if it isn't expanded inline, we fallback to
4618      calling bzero instead of memset.  */
4619 
4620   location_t loc = EXPR_LOCATION (exp);
4621 
4622   return expand_builtin_memset_args (dest, integer_zero_node,
4623 				     fold_convert_loc (loc,
4624 						       size_type_node, size),
4625 				     const0_rtx, VOIDmode, exp);
4626 }
4627 
4628 /* Try to expand cmpstr operation ICODE with the given operands.
4629    Return the result rtx on success, otherwise return null.  */
4630 
4631 static rtx
expand_cmpstr(insn_code icode,rtx target,rtx arg1_rtx,rtx arg2_rtx,HOST_WIDE_INT align)4632 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4633 	       HOST_WIDE_INT align)
4634 {
4635   machine_mode insn_mode = insn_data[icode].operand[0].mode;
4636 
4637   if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4638     target = NULL_RTX;
4639 
4640   struct expand_operand ops[4];
4641   create_output_operand (&ops[0], target, insn_mode);
4642   create_fixed_operand (&ops[1], arg1_rtx);
4643   create_fixed_operand (&ops[2], arg2_rtx);
4644   create_integer_operand (&ops[3], align);
4645   if (maybe_expand_insn (icode, 4, ops))
4646     return ops[0].value;
4647   return NULL_RTX;
4648 }
4649 
4650 /* Expand expression EXP, which is a call to the memcmp built-in function.
4651    Return NULL_RTX if we failed and the caller should emit a normal call,
4652    otherwise try to get the result in TARGET, if convenient.
4653    RESULT_EQ is true if we can relax the returned value to be either zero
4654    or nonzero, without caring about the sign.  */
4655 
4656 static rtx
expand_builtin_memcmp(tree exp,rtx target,bool result_eq)4657 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4658 {
4659   if (!validate_arglist (exp,
4660  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4661     return NULL_RTX;
4662 
4663   tree arg1 = CALL_EXPR_ARG (exp, 0);
4664   tree arg2 = CALL_EXPR_ARG (exp, 1);
4665   tree len = CALL_EXPR_ARG (exp, 2);
4666   enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4667   bool no_overflow = true;
4668 
4669   /* Diagnose calls where the specified length exceeds the size of either
4670      object.  */
4671   tree size = compute_objsize (arg1, 0);
4672   no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4673 			      len, /*maxread=*/NULL_TREE, size,
4674 			      /*objsize=*/NULL_TREE);
4675   if (no_overflow)
4676     {
4677       size = compute_objsize (arg2, 0);
4678       no_overflow = check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE,
4679 				  len,  /*maxread=*/NULL_TREE, size,
4680 				  /*objsize=*/NULL_TREE);
4681     }
4682 
4683   /* If the specified length exceeds the size of either object,
4684      call the function.  */
4685   if (!no_overflow)
4686     return NULL_RTX;
4687 
4688   /* Due to the performance benefit, always inline the calls first
4689      when result_eq is false.  */
4690   rtx result = NULL_RTX;
4691 
4692   if (!result_eq && fcode != BUILT_IN_BCMP)
4693     {
4694       result = inline_expand_builtin_string_cmp (exp, target);
4695       if (result)
4696 	return result;
4697     }
4698 
4699   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4700   location_t loc = EXPR_LOCATION (exp);
4701 
4702   unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4703   unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4704 
4705   /* If we don't have POINTER_TYPE, call the function.  */
4706   if (arg1_align == 0 || arg2_align == 0)
4707     return NULL_RTX;
4708 
4709   rtx arg1_rtx = get_memory_rtx (arg1, len);
4710   rtx arg2_rtx = get_memory_rtx (arg2, len);
4711   rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4712 
4713   /* Set MEM_SIZE as appropriate.  */
4714   if (CONST_INT_P (len_rtx))
4715     {
4716       set_mem_size (arg1_rtx, INTVAL (len_rtx));
4717       set_mem_size (arg2_rtx, INTVAL (len_rtx));
4718     }
4719 
4720   by_pieces_constfn constfn = NULL;
4721 
4722   const char *src_str = c_getstr (arg2);
4723   if (result_eq && src_str == NULL)
4724     {
4725       src_str = c_getstr (arg1);
4726       if (src_str != NULL)
4727 	std::swap (arg1_rtx, arg2_rtx);
4728     }
4729 
4730   /* If SRC is a string constant and block move would be done
4731      by pieces, we can avoid loading the string from memory
4732      and only stored the computed constants.  */
4733   if (src_str
4734       && CONST_INT_P (len_rtx)
4735       && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1)
4736     constfn = builtin_memcpy_read_str;
4737 
4738   result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4739 				 TREE_TYPE (len), target,
4740 				 result_eq, constfn,
4741 				 CONST_CAST (char *, src_str));
4742 
4743   if (result)
4744     {
4745       /* Return the value in the proper mode for this function.  */
4746       if (GET_MODE (result) == mode)
4747 	return result;
4748 
4749       if (target != 0)
4750 	{
4751 	  convert_move (target, result, 0);
4752 	  return target;
4753 	}
4754 
4755       return convert_to_mode (mode, result, 0);
4756     }
4757 
4758   return NULL_RTX;
4759 }
4760 
4761 /* Expand expression EXP, which is a call to the strcmp builtin.  Return NULL_RTX
4762    if we failed the caller should emit a normal call, otherwise try to get
4763    the result in TARGET, if convenient.  */
4764 
4765 static rtx
expand_builtin_strcmp(tree exp,ATTRIBUTE_UNUSED rtx target)4766 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4767 {
4768   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4769     return NULL_RTX;
4770 
4771   /* Due to the performance benefit, always inline the calls first.  */
4772   rtx result = NULL_RTX;
4773   result = inline_expand_builtin_string_cmp (exp, target);
4774   if (result)
4775     return result;
4776 
4777   insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4778   insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4779   if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4780     return NULL_RTX;
4781 
4782   tree arg1 = CALL_EXPR_ARG (exp, 0);
4783   tree arg2 = CALL_EXPR_ARG (exp, 1);
4784 
4785   unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4786   unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4787 
4788   /* If we don't have POINTER_TYPE, call the function.  */
4789   if (arg1_align == 0 || arg2_align == 0)
4790     return NULL_RTX;
4791 
4792   /* Stabilize the arguments in case gen_cmpstr(n)si fail.  */
4793   arg1 = builtin_save_expr (arg1);
4794   arg2 = builtin_save_expr (arg2);
4795 
4796   rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4797   rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4798 
4799   /* Try to call cmpstrsi.  */
4800   if (cmpstr_icode != CODE_FOR_nothing)
4801     result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4802 			    MIN (arg1_align, arg2_align));
4803 
4804   /* Try to determine at least one length and call cmpstrnsi.  */
4805   if (!result && cmpstrn_icode != CODE_FOR_nothing)
4806     {
4807       tree len;
4808       rtx arg3_rtx;
4809 
4810       tree len1 = c_strlen (arg1, 1);
4811       tree len2 = c_strlen (arg2, 1);
4812 
4813       if (len1)
4814 	len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4815       if (len2)
4816 	len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4817 
4818       /* If we don't have a constant length for the first, use the length
4819 	 of the second, if we know it.  We don't require a constant for
4820 	 this case; some cost analysis could be done if both are available
4821 	 but neither is constant.  For now, assume they're equally cheap,
4822 	 unless one has side effects.  If both strings have constant lengths,
4823 	 use the smaller.  */
4824 
4825       if (!len1)
4826 	len = len2;
4827       else if (!len2)
4828 	len = len1;
4829       else if (TREE_SIDE_EFFECTS (len1))
4830 	len = len2;
4831       else if (TREE_SIDE_EFFECTS (len2))
4832 	len = len1;
4833       else if (TREE_CODE (len1) != INTEGER_CST)
4834 	len = len2;
4835       else if (TREE_CODE (len2) != INTEGER_CST)
4836 	len = len1;
4837       else if (tree_int_cst_lt (len1, len2))
4838 	len = len1;
4839       else
4840 	len = len2;
4841 
4842       /* If both arguments have side effects, we cannot optimize.  */
4843       if (len && !TREE_SIDE_EFFECTS (len))
4844 	{
4845 	  arg3_rtx = expand_normal (len);
4846 	  result = expand_cmpstrn_or_cmpmem
4847 	    (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4848 	     arg3_rtx, MIN (arg1_align, arg2_align));
4849 	}
4850     }
4851 
4852   tree fndecl = get_callee_fndecl (exp);
4853   if (result)
4854     {
4855       /* Check to see if the argument was declared attribute nonstring
4856 	 and if so, issue a warning since at this point it's not known
4857 	 to be nul-terminated.  */
4858       maybe_warn_nonstring_arg (fndecl, exp);
4859 
4860       /* Return the value in the proper mode for this function.  */
4861       machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4862       if (GET_MODE (result) == mode)
4863 	return result;
4864       if (target == 0)
4865 	return convert_to_mode (mode, result, 0);
4866       convert_move (target, result, 0);
4867       return target;
4868     }
4869 
4870   /* Expand the library call ourselves using a stabilized argument
4871      list to avoid re-evaluating the function's arguments twice.  */
4872   tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4873   gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4874   CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4875   return expand_call (fn, target, target == const0_rtx);
4876 }
4877 
4878 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4879    NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
4880    the result in TARGET, if convenient.  */
4881 
4882 static rtx
expand_builtin_strncmp(tree exp,ATTRIBUTE_UNUSED rtx target,ATTRIBUTE_UNUSED machine_mode mode)4883 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4884 			ATTRIBUTE_UNUSED machine_mode mode)
4885 {
4886   if (!validate_arglist (exp,
4887  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4888     return NULL_RTX;
4889 
4890   /* Due to the performance benefit, always inline the calls first.  */
4891   rtx result = NULL_RTX;
4892   result = inline_expand_builtin_string_cmp (exp, target);
4893   if (result)
4894     return result;
4895 
4896   /* If c_strlen can determine an expression for one of the string
4897      lengths, and it doesn't have side effects, then emit cmpstrnsi
4898      using length MIN(strlen(string)+1, arg3).  */
4899   insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4900   if (cmpstrn_icode == CODE_FOR_nothing)
4901     return NULL_RTX;
4902 
4903   tree len;
4904 
4905   tree arg1 = CALL_EXPR_ARG (exp, 0);
4906   tree arg2 = CALL_EXPR_ARG (exp, 1);
4907   tree arg3 = CALL_EXPR_ARG (exp, 2);
4908 
4909   unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4910   unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4911 
4912   tree len1 = c_strlen (arg1, 1);
4913   tree len2 = c_strlen (arg2, 1);
4914 
4915   location_t loc = EXPR_LOCATION (exp);
4916 
4917   if (len1)
4918     len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4919   if (len2)
4920     len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4921 
4922   tree len3 = fold_convert_loc (loc, sizetype, arg3);
4923 
4924   /* If we don't have a constant length for the first, use the length
4925      of the second, if we know it.  If neither string is constant length,
4926      use the given length argument.  We don't require a constant for
4927      this case; some cost analysis could be done if both are available
4928      but neither is constant.  For now, assume they're equally cheap,
4929      unless one has side effects.  If both strings have constant lengths,
4930      use the smaller.  */
4931 
4932   if (!len1 && !len2)
4933     len = len3;
4934   else if (!len1)
4935     len = len2;
4936   else if (!len2)
4937     len = len1;
4938   else if (TREE_SIDE_EFFECTS (len1))
4939     len = len2;
4940   else if (TREE_SIDE_EFFECTS (len2))
4941     len = len1;
4942   else if (TREE_CODE (len1) != INTEGER_CST)
4943     len = len2;
4944   else if (TREE_CODE (len2) != INTEGER_CST)
4945     len = len1;
4946   else if (tree_int_cst_lt (len1, len2))
4947     len = len1;
4948   else
4949     len = len2;
4950 
4951   /* If we are not using the given length, we must incorporate it here.
4952      The actual new length parameter will be MIN(len,arg3) in this case.  */
4953   if (len != len3)
4954     {
4955       len = fold_convert_loc (loc, sizetype, len);
4956       len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4957     }
4958   rtx arg1_rtx = get_memory_rtx (arg1, len);
4959   rtx arg2_rtx = get_memory_rtx (arg2, len);
4960   rtx arg3_rtx = expand_normal (len);
4961   result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4962 				     arg2_rtx, TREE_TYPE (len), arg3_rtx,
4963 				     MIN (arg1_align, arg2_align));
4964 
4965   tree fndecl = get_callee_fndecl (exp);
4966   if (result)
4967     {
4968       /* Check to see if the argument was declared attribute nonstring
4969 	 and if so, issue a warning since at this point it's not known
4970 	 to be nul-terminated.  */
4971       maybe_warn_nonstring_arg (fndecl, exp);
4972 
4973       /* Return the value in the proper mode for this function.  */
4974       mode = TYPE_MODE (TREE_TYPE (exp));
4975       if (GET_MODE (result) == mode)
4976 	return result;
4977       if (target == 0)
4978 	return convert_to_mode (mode, result, 0);
4979       convert_move (target, result, 0);
4980       return target;
4981     }
4982 
4983   /* Expand the library call ourselves using a stabilized argument
4984      list to avoid re-evaluating the function's arguments twice.  */
4985   tree fn = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4986   gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4987   CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4988   return expand_call (fn, target, target == const0_rtx);
4989 }
4990 
4991 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4992    if that's convenient.  */
4993 
4994 rtx
expand_builtin_saveregs(void)4995 expand_builtin_saveregs (void)
4996 {
4997   rtx val;
4998   rtx_insn *seq;
4999 
5000   /* Don't do __builtin_saveregs more than once in a function.
5001      Save the result of the first call and reuse it.  */
5002   if (saveregs_value != 0)
5003     return saveregs_value;
5004 
5005   /* When this function is called, it means that registers must be
5006      saved on entry to this function.  So we migrate the call to the
5007      first insn of this function.  */
5008 
5009   start_sequence ();
5010 
5011   /* Do whatever the machine needs done in this case.  */
5012   val = targetm.calls.expand_builtin_saveregs ();
5013 
5014   seq = get_insns ();
5015   end_sequence ();
5016 
5017   saveregs_value = val;
5018 
5019   /* Put the insns after the NOTE that starts the function.  If this
5020      is inside a start_sequence, make the outer-level insn chain current, so
5021      the code is placed at the start of the function.  */
5022   push_topmost_sequence ();
5023   emit_insn_after (seq, entry_of_function ());
5024   pop_topmost_sequence ();
5025 
5026   return val;
5027 }
5028 
5029 /* Expand a call to __builtin_next_arg.  */
5030 
5031 static rtx
expand_builtin_next_arg(void)5032 expand_builtin_next_arg (void)
5033 {
5034   /* Checking arguments is already done in fold_builtin_next_arg
5035      that must be called before this function.  */
5036   return expand_binop (ptr_mode, add_optab,
5037 		       crtl->args.internal_arg_pointer,
5038 		       crtl->args.arg_offset_rtx,
5039 		       NULL_RTX, 0, OPTAB_LIB_WIDEN);
5040 }
5041 
5042 /* Make it easier for the backends by protecting the valist argument
5043    from multiple evaluations.  */
5044 
5045 static tree
stabilize_va_list_loc(location_t loc,tree valist,int needs_lvalue)5046 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
5047 {
5048   tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
5049 
5050   /* The current way of determining the type of valist is completely
5051      bogus.  We should have the information on the va builtin instead.  */
5052   if (!vatype)
5053     vatype = targetm.fn_abi_va_list (cfun->decl);
5054 
5055   if (TREE_CODE (vatype) == ARRAY_TYPE)
5056     {
5057       if (TREE_SIDE_EFFECTS (valist))
5058 	valist = save_expr (valist);
5059 
5060       /* For this case, the backends will be expecting a pointer to
5061 	 vatype, but it's possible we've actually been given an array
5062 	 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
5063 	 So fix it.  */
5064       if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
5065 	{
5066 	  tree p1 = build_pointer_type (TREE_TYPE (vatype));
5067 	  valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
5068 	}
5069     }
5070   else
5071     {
5072       tree pt = build_pointer_type (vatype);
5073 
5074       if (! needs_lvalue)
5075 	{
5076 	  if (! TREE_SIDE_EFFECTS (valist))
5077 	    return valist;
5078 
5079 	  valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
5080 	  TREE_SIDE_EFFECTS (valist) = 1;
5081 	}
5082 
5083       if (TREE_SIDE_EFFECTS (valist))
5084 	valist = save_expr (valist);
5085       valist = fold_build2_loc (loc, MEM_REF,
5086 				vatype, valist, build_int_cst (pt, 0));
5087     }
5088 
5089   return valist;
5090 }
5091 
5092 /* The "standard" definition of va_list is void*.  */
5093 
5094 tree
std_build_builtin_va_list(void)5095 std_build_builtin_va_list (void)
5096 {
5097   return ptr_type_node;
5098 }
5099 
5100 /* The "standard" abi va_list is va_list_type_node.  */
5101 
5102 tree
std_fn_abi_va_list(tree fndecl ATTRIBUTE_UNUSED)5103 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
5104 {
5105   return va_list_type_node;
5106 }
5107 
5108 /* The "standard" type of va_list is va_list_type_node.  */
5109 
5110 tree
std_canonical_va_list_type(tree type)5111 std_canonical_va_list_type (tree type)
5112 {
5113   tree wtype, htype;
5114 
5115   wtype = va_list_type_node;
5116   htype = type;
5117 
5118   if (TREE_CODE (wtype) == ARRAY_TYPE)
5119     {
5120       /* If va_list is an array type, the argument may have decayed
5121 	 to a pointer type, e.g. by being passed to another function.
5122 	 In that case, unwrap both types so that we can compare the
5123 	 underlying records.  */
5124       if (TREE_CODE (htype) == ARRAY_TYPE
5125 	  || POINTER_TYPE_P (htype))
5126 	{
5127 	  wtype = TREE_TYPE (wtype);
5128 	  htype = TREE_TYPE (htype);
5129 	}
5130     }
5131   if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
5132     return va_list_type_node;
5133 
5134   return NULL_TREE;
5135 }
5136 
5137 /* The "standard" implementation of va_start: just assign `nextarg' to
5138    the variable.  */
5139 
5140 void
std_expand_builtin_va_start(tree valist,rtx nextarg)5141 std_expand_builtin_va_start (tree valist, rtx nextarg)
5142 {
5143   rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
5144   convert_move (va_r, nextarg, 0);
5145 }
5146 
5147 /* Expand EXP, a call to __builtin_va_start.  */
5148 
5149 static rtx
expand_builtin_va_start(tree exp)5150 expand_builtin_va_start (tree exp)
5151 {
5152   rtx nextarg;
5153   tree valist;
5154   location_t loc = EXPR_LOCATION (exp);
5155 
5156   if (call_expr_nargs (exp) < 2)
5157     {
5158       error_at (loc, "too few arguments to function %<va_start%>");
5159       return const0_rtx;
5160     }
5161 
5162   if (fold_builtin_next_arg (exp, true))
5163     return const0_rtx;
5164 
5165   nextarg = expand_builtin_next_arg ();
5166   valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
5167 
5168   if (targetm.expand_builtin_va_start)
5169     targetm.expand_builtin_va_start (valist, nextarg);
5170   else
5171     std_expand_builtin_va_start (valist, nextarg);
5172 
5173   return const0_rtx;
5174 }
5175 
5176 /* Expand EXP, a call to __builtin_va_end.  */
5177 
5178 static rtx
expand_builtin_va_end(tree exp)5179 expand_builtin_va_end (tree exp)
5180 {
5181   tree valist = CALL_EXPR_ARG (exp, 0);
5182 
5183   /* Evaluate for side effects, if needed.  I hate macros that don't
5184      do that.  */
5185   if (TREE_SIDE_EFFECTS (valist))
5186     expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
5187 
5188   return const0_rtx;
5189 }
5190 
5191 /* Expand EXP, a call to __builtin_va_copy.  We do this as a
5192    builtin rather than just as an assignment in stdarg.h because of the
5193    nastiness of array-type va_list types.  */
5194 
5195 static rtx
expand_builtin_va_copy(tree exp)5196 expand_builtin_va_copy (tree exp)
5197 {
5198   tree dst, src, t;
5199   location_t loc = EXPR_LOCATION (exp);
5200 
5201   dst = CALL_EXPR_ARG (exp, 0);
5202   src = CALL_EXPR_ARG (exp, 1);
5203 
5204   dst = stabilize_va_list_loc (loc, dst, 1);
5205   src = stabilize_va_list_loc (loc, src, 0);
5206 
5207   gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
5208 
5209   if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
5210     {
5211       t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
5212       TREE_SIDE_EFFECTS (t) = 1;
5213       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
5214     }
5215   else
5216     {
5217       rtx dstb, srcb, size;
5218 
5219       /* Evaluate to pointers.  */
5220       dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
5221       srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
5222       size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
5223       		  NULL_RTX, VOIDmode, EXPAND_NORMAL);
5224 
5225       dstb = convert_memory_address (Pmode, dstb);
5226       srcb = convert_memory_address (Pmode, srcb);
5227 
5228       /* "Dereference" to BLKmode memories.  */
5229       dstb = gen_rtx_MEM (BLKmode, dstb);
5230       set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
5231       set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5232       srcb = gen_rtx_MEM (BLKmode, srcb);
5233       set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
5234       set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
5235 
5236       /* Copy.  */
5237       emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
5238     }
5239 
5240   return const0_rtx;
5241 }
5242 
5243 /* Expand a call to one of the builtin functions __builtin_frame_address or
5244    __builtin_return_address.  */
5245 
5246 static rtx
expand_builtin_frame_address(tree fndecl,tree exp)5247 expand_builtin_frame_address (tree fndecl, tree exp)
5248 {
5249   /* The argument must be a nonnegative integer constant.
5250      It counts the number of frames to scan up the stack.
5251      The value is either the frame pointer value or the return
5252      address saved in that frame.  */
5253   if (call_expr_nargs (exp) == 0)
5254     /* Warning about missing arg was already issued.  */
5255     return const0_rtx;
5256   else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
5257     {
5258       error ("invalid argument to %qD", fndecl);
5259       return const0_rtx;
5260     }
5261   else
5262     {
5263       /* Number of frames to scan up the stack.  */
5264       unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
5265 
5266       rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
5267 
5268       /* Some ports cannot access arbitrary stack frames.  */
5269       if (tem == NULL)
5270 	{
5271 	  warning (0, "unsupported argument to %qD", fndecl);
5272 	  return const0_rtx;
5273 	}
5274 
5275       if (count)
5276 	{
5277 	  /* Warn since no effort is made to ensure that any frame
5278 	     beyond the current one exists or can be safely reached.  */
5279 	  warning (OPT_Wframe_address, "calling %qD with "
5280 		   "a nonzero argument is unsafe", fndecl);
5281 	}
5282 
5283       /* For __builtin_frame_address, return what we've got.  */
5284       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5285 	return tem;
5286 
5287       if (!REG_P (tem)
5288 	  && ! CONSTANT_P (tem))
5289 	tem = copy_addr_to_reg (tem);
5290       return tem;
5291     }
5292 }
5293 
5294 /* Expand EXP, a call to the alloca builtin.  Return NULL_RTX if we
5295    failed and the caller should emit a normal call.  */
5296 
5297 static rtx
expand_builtin_alloca(tree exp)5298 expand_builtin_alloca (tree exp)
5299 {
5300   rtx op0;
5301   rtx result;
5302   unsigned int align;
5303   tree fndecl = get_callee_fndecl (exp);
5304   HOST_WIDE_INT max_size;
5305   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5306   bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
5307   bool valid_arglist
5308     = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5309        ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
5310 			   VOID_TYPE)
5311        : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
5312 	 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
5313 	 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
5314 
5315   if (!valid_arglist)
5316     return NULL_RTX;
5317 
5318   if ((alloca_for_var
5319        && warn_vla_limit >= HOST_WIDE_INT_MAX
5320        && warn_alloc_size_limit < warn_vla_limit)
5321       || (!alloca_for_var
5322 	  && warn_alloca_limit >= HOST_WIDE_INT_MAX
5323 	  && warn_alloc_size_limit < warn_alloca_limit
5324 	  ))
5325     {
5326       /* -Walloca-larger-than and -Wvla-larger-than settings of
5327 	 less than HOST_WIDE_INT_MAX override the more general
5328 	 -Walloc-size-larger-than so unless either of the former
5329 	 options is smaller than the last one (wchich would imply
5330 	 that the call was already checked), check the alloca
5331 	 arguments for overflow.  */
5332       tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
5333       int idx[] = { 0, -1 };
5334       maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
5335     }
5336 
5337   /* Compute the argument.  */
5338   op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
5339 
5340   /* Compute the alignment.  */
5341   align = (fcode == BUILT_IN_ALLOCA
5342 	   ? BIGGEST_ALIGNMENT
5343 	   : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
5344 
5345   /* Compute the maximum size.  */
5346   max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
5347               ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
5348               : -1);
5349 
5350   /* Allocate the desired space.  If the allocation stems from the declaration
5351      of a variable-sized object, it cannot accumulate.  */
5352   result
5353     = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
5354   result = convert_memory_address (ptr_mode, result);
5355 
5356   return result;
5357 }
5358 
5359 /* Emit a call to __asan_allocas_unpoison call in EXP.  Add to second argument
5360    of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
5361    STACK_DYNAMIC_OFFSET value.  See motivation for this in comment to
5362    handle_builtin_stack_restore function.  */
5363 
5364 static rtx
expand_asan_emit_allocas_unpoison(tree exp)5365 expand_asan_emit_allocas_unpoison (tree exp)
5366 {
5367   tree arg0 = CALL_EXPR_ARG (exp, 0);
5368   tree arg1 = CALL_EXPR_ARG (exp, 1);
5369   rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5370   rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
5371   rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
5372 				 stack_pointer_rtx, NULL_RTX, 0,
5373 				 OPTAB_LIB_WIDEN);
5374   off = convert_modes (ptr_mode, Pmode, off, 0);
5375   bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
5376 			     OPTAB_LIB_WIDEN);
5377   rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
5378   ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
5379 				 top, ptr_mode, bot, ptr_mode);
5380   return ret;
5381 }
5382 
5383 /* Expand a call to bswap builtin in EXP.
5384    Return NULL_RTX if a normal call should be emitted rather than expanding the
5385    function in-line.  If convenient, the result should be placed in TARGET.
5386    SUBTARGET may be used as the target for computing one of EXP's operands.  */
5387 
5388 static rtx
expand_builtin_bswap(machine_mode target_mode,tree exp,rtx target,rtx subtarget)5389 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
5390 		      rtx subtarget)
5391 {
5392   tree arg;
5393   rtx op0;
5394 
5395   if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5396     return NULL_RTX;
5397 
5398   arg = CALL_EXPR_ARG (exp, 0);
5399   op0 = expand_expr (arg,
5400 		     subtarget && GET_MODE (subtarget) == target_mode
5401 		     ? subtarget : NULL_RTX,
5402 		     target_mode, EXPAND_NORMAL);
5403   if (GET_MODE (op0) != target_mode)
5404     op0 = convert_to_mode (target_mode, op0, 1);
5405 
5406   target = expand_unop (target_mode, bswap_optab, op0, target, 1);
5407 
5408   gcc_assert (target);
5409 
5410   return convert_to_mode (target_mode, target, 1);
5411 }
5412 
5413 /* Expand a call to a unary builtin in EXP.
5414    Return NULL_RTX if a normal call should be emitted rather than expanding the
5415    function in-line.  If convenient, the result should be placed in TARGET.
5416    SUBTARGET may be used as the target for computing one of EXP's operands.  */
5417 
5418 static rtx
expand_builtin_unop(machine_mode target_mode,tree exp,rtx target,rtx subtarget,optab op_optab)5419 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5420 		     rtx subtarget, optab op_optab)
5421 {
5422   rtx op0;
5423 
5424   if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5425     return NULL_RTX;
5426 
5427   /* Compute the argument.  */
5428   op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5429 		     (subtarget
5430 		      && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5431 			  == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5432 		     VOIDmode, EXPAND_NORMAL);
5433   /* Compute op, into TARGET if possible.
5434      Set TARGET to wherever the result comes back.  */
5435   target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5436 			op_optab, op0, target, op_optab != clrsb_optab);
5437   gcc_assert (target);
5438 
5439   return convert_to_mode (target_mode, target, 0);
5440 }
5441 
5442 /* Expand a call to __builtin_expect.  We just return our argument
5443    as the builtin_expect semantic should've been already executed by
5444    tree branch prediction pass. */
5445 
5446 static rtx
expand_builtin_expect(tree exp,rtx target)5447 expand_builtin_expect (tree exp, rtx target)
5448 {
5449   tree arg;
5450 
5451   if (call_expr_nargs (exp) < 2)
5452     return const0_rtx;
5453   arg = CALL_EXPR_ARG (exp, 0);
5454 
5455   target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5456   /* When guessing was done, the hints should be already stripped away.  */
5457   gcc_assert (!flag_guess_branch_prob
5458 	      || optimize == 0 || seen_error ());
5459   return target;
5460 }
5461 
5462 /* Expand a call to __builtin_expect_with_probability.  We just return our
5463    argument as the builtin_expect semantic should've been already executed by
5464    tree branch prediction pass.  */
5465 
5466 static rtx
expand_builtin_expect_with_probability(tree exp,rtx target)5467 expand_builtin_expect_with_probability (tree exp, rtx target)
5468 {
5469   tree arg;
5470 
5471   if (call_expr_nargs (exp) < 3)
5472     return const0_rtx;
5473   arg = CALL_EXPR_ARG (exp, 0);
5474 
5475   target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5476   /* When guessing was done, the hints should be already stripped away.  */
5477   gcc_assert (!flag_guess_branch_prob
5478 	      || optimize == 0 || seen_error ());
5479   return target;
5480 }
5481 
5482 
5483 /* Expand a call to __builtin_assume_aligned.  We just return our first
5484    argument as the builtin_assume_aligned semantic should've been already
5485    executed by CCP.  */
5486 
5487 static rtx
expand_builtin_assume_aligned(tree exp,rtx target)5488 expand_builtin_assume_aligned (tree exp, rtx target)
5489 {
5490   if (call_expr_nargs (exp) < 2)
5491     return const0_rtx;
5492   target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5493 			EXPAND_NORMAL);
5494   gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5495 	      && (call_expr_nargs (exp) < 3
5496 		  || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5497   return target;
5498 }
5499 
5500 void
expand_builtin_trap(void)5501 expand_builtin_trap (void)
5502 {
5503   if (targetm.have_trap ())
5504     {
5505       rtx_insn *insn = emit_insn (targetm.gen_trap ());
5506       /* For trap insns when not accumulating outgoing args force
5507 	 REG_ARGS_SIZE note to prevent crossjumping of calls with
5508 	 different args sizes.  */
5509       if (!ACCUMULATE_OUTGOING_ARGS)
5510 	add_args_size_note (insn, stack_pointer_delta);
5511     }
5512   else
5513     {
5514       tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5515       tree call_expr = build_call_expr (fn, 0);
5516       expand_call (call_expr, NULL_RTX, false);
5517     }
5518 
5519   emit_barrier ();
5520 }
5521 
5522 /* Expand a call to __builtin_unreachable.  We do nothing except emit
5523    a barrier saying that control flow will not pass here.
5524 
5525    It is the responsibility of the program being compiled to ensure
5526    that control flow does never reach __builtin_unreachable.  */
5527 static void
expand_builtin_unreachable(void)5528 expand_builtin_unreachable (void)
5529 {
5530   emit_barrier ();
5531 }
5532 
5533 /* Expand EXP, a call to fabs, fabsf or fabsl.
5534    Return NULL_RTX if a normal call should be emitted rather than expanding
5535    the function inline.  If convenient, the result should be placed
5536    in TARGET.  SUBTARGET may be used as the target for computing
5537    the operand.  */
5538 
5539 static rtx
expand_builtin_fabs(tree exp,rtx target,rtx subtarget)5540 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5541 {
5542   machine_mode mode;
5543   tree arg;
5544   rtx op0;
5545 
5546   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5547     return NULL_RTX;
5548 
5549   arg = CALL_EXPR_ARG (exp, 0);
5550   CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5551   mode = TYPE_MODE (TREE_TYPE (arg));
5552   op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5553   return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5554 }
5555 
5556 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5557    Return NULL is a normal call should be emitted rather than expanding the
5558    function inline.  If convenient, the result should be placed in TARGET.
5559    SUBTARGET may be used as the target for computing the operand.  */
5560 
5561 static rtx
expand_builtin_copysign(tree exp,rtx target,rtx subtarget)5562 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5563 {
5564   rtx op0, op1;
5565   tree arg;
5566 
5567   if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5568     return NULL_RTX;
5569 
5570   arg = CALL_EXPR_ARG (exp, 0);
5571   op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5572 
5573   arg = CALL_EXPR_ARG (exp, 1);
5574   op1 = expand_normal (arg);
5575 
5576   return expand_copysign (op0, op1, target);
5577 }
5578 
5579 /* Expand a call to __builtin___clear_cache.  */
5580 
5581 static rtx
expand_builtin___clear_cache(tree exp)5582 expand_builtin___clear_cache (tree exp)
5583 {
5584   if (!targetm.code_for_clear_cache)
5585     {
5586 #ifdef CLEAR_INSN_CACHE
5587       /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5588 	 does something.  Just do the default expansion to a call to
5589 	 __clear_cache().  */
5590       return NULL_RTX;
5591 #else
5592       /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5593 	 does nothing.  There is no need to call it.  Do nothing.  */
5594       return const0_rtx;
5595 #endif /* CLEAR_INSN_CACHE */
5596     }
5597 
5598   /* We have a "clear_cache" insn, and it will handle everything.  */
5599   tree begin, end;
5600   rtx begin_rtx, end_rtx;
5601 
5602   /* We must not expand to a library call.  If we did, any
5603      fallback library function in libgcc that might contain a call to
5604      __builtin___clear_cache() would recurse infinitely.  */
5605   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5606     {
5607       error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5608       return const0_rtx;
5609     }
5610 
5611   if (targetm.have_clear_cache ())
5612     {
5613       struct expand_operand ops[2];
5614 
5615       begin = CALL_EXPR_ARG (exp, 0);
5616       begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5617 
5618       end = CALL_EXPR_ARG (exp, 1);
5619       end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5620 
5621       create_address_operand (&ops[0], begin_rtx);
5622       create_address_operand (&ops[1], end_rtx);
5623       if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5624 	return const0_rtx;
5625     }
5626   return const0_rtx;
5627 }
5628 
5629 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT.  */
5630 
5631 static rtx
round_trampoline_addr(rtx tramp)5632 round_trampoline_addr (rtx tramp)
5633 {
5634   rtx temp, addend, mask;
5635 
5636   /* If we don't need too much alignment, we'll have been guaranteed
5637      proper alignment by get_trampoline_type.  */
5638   if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5639     return tramp;
5640 
5641   /* Round address up to desired boundary.  */
5642   temp = gen_reg_rtx (Pmode);
5643   addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5644   mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5645 
5646   temp  = expand_simple_binop (Pmode, PLUS, tramp, addend,
5647 			       temp, 0, OPTAB_LIB_WIDEN);
5648   tramp = expand_simple_binop (Pmode, AND, temp, mask,
5649 			       temp, 0, OPTAB_LIB_WIDEN);
5650 
5651   return tramp;
5652 }
5653 
5654 static rtx
expand_builtin_init_trampoline(tree exp,bool onstack)5655 expand_builtin_init_trampoline (tree exp, bool onstack)
5656 {
5657   tree t_tramp, t_func, t_chain;
5658   rtx m_tramp, r_tramp, r_chain, tmp;
5659 
5660   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5661 			 POINTER_TYPE, VOID_TYPE))
5662     return NULL_RTX;
5663 
5664   t_tramp = CALL_EXPR_ARG (exp, 0);
5665   t_func = CALL_EXPR_ARG (exp, 1);
5666   t_chain = CALL_EXPR_ARG (exp, 2);
5667 
5668   r_tramp = expand_normal (t_tramp);
5669   m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5670   MEM_NOTRAP_P (m_tramp) = 1;
5671 
5672   /* If ONSTACK, the TRAMP argument should be the address of a field
5673      within the local function's FRAME decl.  Either way, let's see if
5674      we can fill in the MEM_ATTRs for this memory.  */
5675   if (TREE_CODE (t_tramp) == ADDR_EXPR)
5676     set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5677 
5678   /* Creator of a heap trampoline is responsible for making sure the
5679      address is aligned to at least STACK_BOUNDARY.  Normally malloc
5680      will ensure this anyhow.  */
5681   tmp = round_trampoline_addr (r_tramp);
5682   if (tmp != r_tramp)
5683     {
5684       m_tramp = change_address (m_tramp, BLKmode, tmp);
5685       set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5686       set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5687     }
5688 
5689   /* The FUNC argument should be the address of the nested function.
5690      Extract the actual function decl to pass to the hook.  */
5691   gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5692   t_func = TREE_OPERAND (t_func, 0);
5693   gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5694 
5695   r_chain = expand_normal (t_chain);
5696 
5697   /* Generate insns to initialize the trampoline.  */
5698   targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5699 
5700   if (onstack)
5701     {
5702       trampolines_created = 1;
5703 
5704       if (targetm.calls.custom_function_descriptors != 0)
5705 	warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5706 		    "trampoline generated for nested function %qD", t_func);
5707     }
5708 
5709   return const0_rtx;
5710 }
5711 
5712 static rtx
expand_builtin_adjust_trampoline(tree exp)5713 expand_builtin_adjust_trampoline (tree exp)
5714 {
5715   rtx tramp;
5716 
5717   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5718     return NULL_RTX;
5719 
5720   tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5721   tramp = round_trampoline_addr (tramp);
5722   if (targetm.calls.trampoline_adjust_address)
5723     tramp = targetm.calls.trampoline_adjust_address (tramp);
5724 
5725   return tramp;
5726 }
5727 
5728 /* Expand a call to the builtin descriptor initialization routine.
5729    A descriptor is made up of a couple of pointers to the static
5730    chain and the code entry in this order.  */
5731 
5732 static rtx
expand_builtin_init_descriptor(tree exp)5733 expand_builtin_init_descriptor (tree exp)
5734 {
5735   tree t_descr, t_func, t_chain;
5736   rtx m_descr, r_descr, r_func, r_chain;
5737 
5738   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5739 			 VOID_TYPE))
5740     return NULL_RTX;
5741 
5742   t_descr = CALL_EXPR_ARG (exp, 0);
5743   t_func = CALL_EXPR_ARG (exp, 1);
5744   t_chain = CALL_EXPR_ARG (exp, 2);
5745 
5746   r_descr = expand_normal (t_descr);
5747   m_descr = gen_rtx_MEM (BLKmode, r_descr);
5748   MEM_NOTRAP_P (m_descr) = 1;
5749 
5750   r_func = expand_normal (t_func);
5751   r_chain = expand_normal (t_chain);
5752 
5753   /* Generate insns to initialize the descriptor.  */
5754   emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5755   emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5756 				     POINTER_SIZE / BITS_PER_UNIT), r_func);
5757 
5758   return const0_rtx;
5759 }
5760 
5761 /* Expand a call to the builtin descriptor adjustment routine.  */
5762 
5763 static rtx
expand_builtin_adjust_descriptor(tree exp)5764 expand_builtin_adjust_descriptor (tree exp)
5765 {
5766   rtx tramp;
5767 
5768   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5769     return NULL_RTX;
5770 
5771   tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5772 
5773   /* Unalign the descriptor to allow runtime identification.  */
5774   tramp = plus_constant (ptr_mode, tramp,
5775 			 targetm.calls.custom_function_descriptors);
5776 
5777   return force_operand (tramp, NULL_RTX);
5778 }
5779 
5780 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5781    function.  The function first checks whether the back end provides
5782    an insn to implement signbit for the respective mode.  If not, it
5783    checks whether the floating point format of the value is such that
5784    the sign bit can be extracted.  If that is not the case, error out.
5785    EXP is the expression that is a call to the builtin function; if
5786    convenient, the result should be placed in TARGET.  */
5787 static rtx
expand_builtin_signbit(tree exp,rtx target)5788 expand_builtin_signbit (tree exp, rtx target)
5789 {
5790   const struct real_format *fmt;
5791   scalar_float_mode fmode;
5792   scalar_int_mode rmode, imode;
5793   tree arg;
5794   int word, bitpos;
5795   enum insn_code icode;
5796   rtx temp;
5797   location_t loc = EXPR_LOCATION (exp);
5798 
5799   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5800     return NULL_RTX;
5801 
5802   arg = CALL_EXPR_ARG (exp, 0);
5803   fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5804   rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5805   fmt = REAL_MODE_FORMAT (fmode);
5806 
5807   arg = builtin_save_expr (arg);
5808 
5809   /* Expand the argument yielding a RTX expression. */
5810   temp = expand_normal (arg);
5811 
5812   /* Check if the back end provides an insn that handles signbit for the
5813      argument's mode. */
5814   icode = optab_handler (signbit_optab, fmode);
5815   if (icode != CODE_FOR_nothing)
5816     {
5817       rtx_insn *last = get_last_insn ();
5818       target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5819       if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5820 	return target;
5821       delete_insns_since (last);
5822     }
5823 
5824   /* For floating point formats without a sign bit, implement signbit
5825      as "ARG < 0.0".  */
5826   bitpos = fmt->signbit_ro;
5827   if (bitpos < 0)
5828   {
5829     /* But we can't do this if the format supports signed zero.  */
5830     gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5831 
5832     arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5833 		       build_real (TREE_TYPE (arg), dconst0));
5834     return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5835   }
5836 
5837   if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5838     {
5839       imode = int_mode_for_mode (fmode).require ();
5840       temp = gen_lowpart (imode, temp);
5841     }
5842   else
5843     {
5844       imode = word_mode;
5845       /* Handle targets with different FP word orders.  */
5846       if (FLOAT_WORDS_BIG_ENDIAN)
5847 	word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5848       else
5849 	word = bitpos / BITS_PER_WORD;
5850       temp = operand_subword_force (temp, word, fmode);
5851       bitpos = bitpos % BITS_PER_WORD;
5852     }
5853 
5854   /* Force the intermediate word_mode (or narrower) result into a
5855      register.  This avoids attempting to create paradoxical SUBREGs
5856      of floating point modes below.  */
5857   temp = force_reg (imode, temp);
5858 
5859   /* If the bitpos is within the "result mode" lowpart, the operation
5860      can be implement with a single bitwise AND.  Otherwise, we need
5861      a right shift and an AND.  */
5862 
5863   if (bitpos < GET_MODE_BITSIZE (rmode))
5864     {
5865       wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5866 
5867       if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5868 	temp = gen_lowpart (rmode, temp);
5869       temp = expand_binop (rmode, and_optab, temp,
5870 			   immed_wide_int_const (mask, rmode),
5871 			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
5872     }
5873   else
5874     {
5875       /* Perform a logical right shift to place the signbit in the least
5876 	 significant bit, then truncate the result to the desired mode
5877 	 and mask just this bit.  */
5878       temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5879       temp = gen_lowpart (rmode, temp);
5880       temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5881 			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
5882     }
5883 
5884   return temp;
5885 }
5886 
5887 /* Expand fork or exec calls.  TARGET is the desired target of the
5888    call.  EXP is the call. FN is the
5889    identificator of the actual function.  IGNORE is nonzero if the
5890    value is to be ignored.  */
5891 
5892 static rtx
expand_builtin_fork_or_exec(tree fn,tree exp,rtx target,int ignore)5893 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5894 {
5895   tree id, decl;
5896   tree call;
5897 
5898   /* If we are not profiling, just call the function.  */
5899   if (!profile_arc_flag)
5900     return NULL_RTX;
5901 
5902   /* Otherwise call the wrapper.  This should be equivalent for the rest of
5903      compiler, so the code does not diverge, and the wrapper may run the
5904      code necessary for keeping the profiling sane.  */
5905 
5906   switch (DECL_FUNCTION_CODE (fn))
5907     {
5908     case BUILT_IN_FORK:
5909       id = get_identifier ("__gcov_fork");
5910       break;
5911 
5912     case BUILT_IN_EXECL:
5913       id = get_identifier ("__gcov_execl");
5914       break;
5915 
5916     case BUILT_IN_EXECV:
5917       id = get_identifier ("__gcov_execv");
5918       break;
5919 
5920     case BUILT_IN_EXECLP:
5921       id = get_identifier ("__gcov_execlp");
5922       break;
5923 
5924     case BUILT_IN_EXECLE:
5925       id = get_identifier ("__gcov_execle");
5926       break;
5927 
5928     case BUILT_IN_EXECVP:
5929       id = get_identifier ("__gcov_execvp");
5930       break;
5931 
5932     case BUILT_IN_EXECVE:
5933       id = get_identifier ("__gcov_execve");
5934       break;
5935 
5936     default:
5937       gcc_unreachable ();
5938     }
5939 
5940   decl = build_decl (DECL_SOURCE_LOCATION (fn),
5941 		     FUNCTION_DECL, id, TREE_TYPE (fn));
5942   DECL_EXTERNAL (decl) = 1;
5943   TREE_PUBLIC (decl) = 1;
5944   DECL_ARTIFICIAL (decl) = 1;
5945   TREE_NOTHROW (decl) = 1;
5946   DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5947   DECL_VISIBILITY_SPECIFIED (decl) = 1;
5948   call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5949   return expand_call (call, target, ignore);
5950  }
5951 
5952 
5953 
5954 /* Reconstitute a mode for a __sync intrinsic operation.  Since the type of
5955    the pointer in these functions is void*, the tree optimizers may remove
5956    casts.  The mode computed in expand_builtin isn't reliable either, due
5957    to __sync_bool_compare_and_swap.
5958 
5959    FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5960    group of builtins.  This gives us log2 of the mode size.  */
5961 
5962 static inline machine_mode
get_builtin_sync_mode(int fcode_diff)5963 get_builtin_sync_mode (int fcode_diff)
5964 {
5965   /* The size is not negotiable, so ask not to get BLKmode in return
5966      if the target indicates that a smaller size would be better.  */
5967   return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5968 }
5969 
5970 /* Expand the memory expression LOC and return the appropriate memory operand
5971    for the builtin_sync operations.  */
5972 
5973 static rtx
get_builtin_sync_mem(tree loc,machine_mode mode)5974 get_builtin_sync_mem (tree loc, machine_mode mode)
5975 {
5976   rtx addr, mem;
5977   int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5978 				    ? TREE_TYPE (TREE_TYPE (loc))
5979 				    : TREE_TYPE (loc));
5980   scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5981 
5982   addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5983   addr = convert_memory_address (addr_mode, addr);
5984 
5985   /* Note that we explicitly do not want any alias information for this
5986      memory, so that we kill all other live memories.  Otherwise we don't
5987      satisfy the full barrier semantics of the intrinsic.  */
5988   mem = gen_rtx_MEM (mode, addr);
5989 
5990   set_mem_addr_space (mem, addr_space);
5991 
5992   mem = validize_mem (mem);
5993 
5994   /* The alignment needs to be at least according to that of the mode.  */
5995   set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5996 			   get_pointer_alignment (loc)));
5997   set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5998   MEM_VOLATILE_P (mem) = 1;
5999 
6000   return mem;
6001 }
6002 
6003 /* Make sure an argument is in the right mode.
6004    EXP is the tree argument.
6005    MODE is the mode it should be in.  */
6006 
6007 static rtx
expand_expr_force_mode(tree exp,machine_mode mode)6008 expand_expr_force_mode (tree exp, machine_mode mode)
6009 {
6010   rtx val;
6011   machine_mode old_mode;
6012 
6013   val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
6014   /* If VAL is promoted to a wider mode, convert it back to MODE.  Take care
6015      of CONST_INTs, where we know the old_mode only from the call argument.  */
6016 
6017   old_mode = GET_MODE (val);
6018   if (old_mode == VOIDmode)
6019     old_mode = TYPE_MODE (TREE_TYPE (exp));
6020   val = convert_modes (mode, old_mode, val, 1);
6021   return val;
6022 }
6023 
6024 
6025 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
6026    EXP is the CALL_EXPR.  CODE is the rtx code
6027    that corresponds to the arithmetic or logical operation from the name;
6028    an exception here is that NOT actually means NAND.  TARGET is an optional
6029    place for us to store the results; AFTER is true if this is the
6030    fetch_and_xxx form.  */
6031 
6032 static rtx
expand_builtin_sync_operation(machine_mode mode,tree exp,enum rtx_code code,bool after,rtx target)6033 expand_builtin_sync_operation (machine_mode mode, tree exp,
6034 			       enum rtx_code code, bool after,
6035 			       rtx target)
6036 {
6037   rtx val, mem;
6038   location_t loc = EXPR_LOCATION (exp);
6039 
6040   if (code == NOT && warn_sync_nand)
6041     {
6042       tree fndecl = get_callee_fndecl (exp);
6043       enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6044 
6045       static bool warned_f_a_n, warned_n_a_f;
6046 
6047       switch (fcode)
6048 	{
6049 	case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6050 	case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6051 	case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6052 	case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6053 	case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6054 	  if (warned_f_a_n)
6055 	    break;
6056 
6057 	  fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
6058 	  inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6059 	  warned_f_a_n = true;
6060 	  break;
6061 
6062 	case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6063 	case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6064 	case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6065 	case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6066 	case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6067 	  if (warned_n_a_f)
6068 	    break;
6069 
6070 	 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
6071 	  inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
6072 	  warned_n_a_f = true;
6073 	  break;
6074 
6075 	default:
6076 	  gcc_unreachable ();
6077 	}
6078     }
6079 
6080   /* Expand the operands.  */
6081   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6082   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6083 
6084   return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
6085 				 after);
6086 }
6087 
6088 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
6089    intrinsics. EXP is the CALL_EXPR.  IS_BOOL is
6090    true if this is the boolean form.  TARGET is a place for us to store the
6091    results; this is NOT optional if IS_BOOL is true.  */
6092 
6093 static rtx
expand_builtin_compare_and_swap(machine_mode mode,tree exp,bool is_bool,rtx target)6094 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
6095 				 bool is_bool, rtx target)
6096 {
6097   rtx old_val, new_val, mem;
6098   rtx *pbool, *poval;
6099 
6100   /* Expand the operands.  */
6101   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6102   old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6103   new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6104 
6105   pbool = poval = NULL;
6106   if (target != const0_rtx)
6107     {
6108       if (is_bool)
6109 	pbool = &target;
6110       else
6111 	poval = &target;
6112     }
6113   if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
6114 				       false, MEMMODEL_SYNC_SEQ_CST,
6115 				       MEMMODEL_SYNC_SEQ_CST))
6116     return NULL_RTX;
6117 
6118   return target;
6119 }
6120 
6121 /* Expand the __sync_lock_test_and_set intrinsic.  Note that the most
6122    general form is actually an atomic exchange, and some targets only
6123    support a reduced form with the second argument being a constant 1.
6124    EXP is the CALL_EXPR; TARGET is an optional place for us to store
6125    the results.  */
6126 
6127 static rtx
expand_builtin_sync_lock_test_and_set(machine_mode mode,tree exp,rtx target)6128 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
6129 				       rtx target)
6130 {
6131   rtx val, mem;
6132 
6133   /* Expand the operands.  */
6134   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6135   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6136 
6137   return expand_sync_lock_test_and_set (target, mem, val);
6138 }
6139 
6140 /* Expand the __sync_lock_release intrinsic.  EXP is the CALL_EXPR.  */
6141 
6142 static void
expand_builtin_sync_lock_release(machine_mode mode,tree exp)6143 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
6144 {
6145   rtx mem;
6146 
6147   /* Expand the operands.  */
6148   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6149 
6150   expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
6151 }
6152 
6153 /* Given an integer representing an ``enum memmodel'', verify its
6154    correctness and return the memory model enum.  */
6155 
6156 static enum memmodel
get_memmodel(tree exp)6157 get_memmodel (tree exp)
6158 {
6159   rtx op;
6160   unsigned HOST_WIDE_INT val;
6161   location_t loc
6162     = expansion_point_location_if_in_system_header (input_location);
6163 
6164   /* If the parameter is not a constant, it's a run time value so we'll just
6165      convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking.  */
6166   if (TREE_CODE (exp) != INTEGER_CST)
6167     return MEMMODEL_SEQ_CST;
6168 
6169   op = expand_normal (exp);
6170 
6171   val = INTVAL (op);
6172   if (targetm.memmodel_check)
6173     val = targetm.memmodel_check (val);
6174   else if (val & ~MEMMODEL_MASK)
6175     {
6176       warning_at (loc, OPT_Winvalid_memory_model,
6177 		  "unknown architecture specifier in memory model to builtin");
6178       return MEMMODEL_SEQ_CST;
6179     }
6180 
6181   /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
6182   if (memmodel_base (val) >= MEMMODEL_LAST)
6183     {
6184       warning_at (loc, OPT_Winvalid_memory_model,
6185 		  "invalid memory model argument to builtin");
6186       return MEMMODEL_SEQ_CST;
6187     }
6188 
6189   /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
6190      be conservative and promote consume to acquire.  */
6191   if (val == MEMMODEL_CONSUME)
6192     val = MEMMODEL_ACQUIRE;
6193 
6194   return (enum memmodel) val;
6195 }
6196 
6197 /* Expand the __atomic_exchange intrinsic:
6198    	TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
6199    EXP is the CALL_EXPR.
6200    TARGET is an optional place for us to store the results.  */
6201 
6202 static rtx
expand_builtin_atomic_exchange(machine_mode mode,tree exp,rtx target)6203 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
6204 {
6205   rtx val, mem;
6206   enum memmodel model;
6207 
6208   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6209 
6210   if (!flag_inline_atomics)
6211     return NULL_RTX;
6212 
6213   /* Expand the operands.  */
6214   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6215   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6216 
6217   return expand_atomic_exchange (target, mem, val, model);
6218 }
6219 
6220 /* Expand the __atomic_compare_exchange intrinsic:
6221    	bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
6222 					TYPE desired, BOOL weak,
6223 					enum memmodel success,
6224 					enum memmodel failure)
6225    EXP is the CALL_EXPR.
6226    TARGET is an optional place for us to store the results.  */
6227 
6228 static rtx
expand_builtin_atomic_compare_exchange(machine_mode mode,tree exp,rtx target)6229 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
6230 					rtx target)
6231 {
6232   rtx expect, desired, mem, oldval;
6233   rtx_code_label *label;
6234   enum memmodel success, failure;
6235   tree weak;
6236   bool is_weak;
6237   location_t loc
6238     = expansion_point_location_if_in_system_header (input_location);
6239 
6240   success = get_memmodel (CALL_EXPR_ARG (exp, 4));
6241   failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
6242 
6243   if (failure > success)
6244     {
6245       warning_at (loc, OPT_Winvalid_memory_model,
6246 		  "failure memory model cannot be stronger than success "
6247 		  "memory model for %<__atomic_compare_exchange%>");
6248       success = MEMMODEL_SEQ_CST;
6249     }
6250 
6251   if (is_mm_release (failure) || is_mm_acq_rel (failure))
6252     {
6253       warning_at (loc, OPT_Winvalid_memory_model,
6254 		  "invalid failure memory model for "
6255 		  "%<__atomic_compare_exchange%>");
6256       failure = MEMMODEL_SEQ_CST;
6257       success = MEMMODEL_SEQ_CST;
6258     }
6259 
6260 
6261   if (!flag_inline_atomics)
6262     return NULL_RTX;
6263 
6264   /* Expand the operands.  */
6265   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6266 
6267   expect = expand_normal (CALL_EXPR_ARG (exp, 1));
6268   expect = convert_memory_address (Pmode, expect);
6269   expect = gen_rtx_MEM (mode, expect);
6270   desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
6271 
6272   weak = CALL_EXPR_ARG (exp, 3);
6273   is_weak = false;
6274   if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
6275     is_weak = true;
6276 
6277   if (target == const0_rtx)
6278     target = NULL;
6279 
6280   /* Lest the rtl backend create a race condition with an imporoper store
6281      to memory, always create a new pseudo for OLDVAL.  */
6282   oldval = NULL;
6283 
6284   if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
6285 				       is_weak, success, failure))
6286     return NULL_RTX;
6287 
6288   /* Conditionally store back to EXPECT, lest we create a race condition
6289      with an improper store to memory.  */
6290   /* ??? With a rearrangement of atomics at the gimple level, we can handle
6291      the normal case where EXPECT is totally private, i.e. a register.  At
6292      which point the store can be unconditional.  */
6293   label = gen_label_rtx ();
6294   emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
6295 			   GET_MODE (target), 1, label);
6296   emit_move_insn (expect, oldval);
6297   emit_label (label);
6298 
6299   return target;
6300 }
6301 
6302 /* Helper function for expand_ifn_atomic_compare_exchange - expand
6303    internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
6304    call.  The weak parameter must be dropped to match the expected parameter
6305    list and the expected argument changed from value to pointer to memory
6306    slot.  */
6307 
6308 static void
expand_ifn_atomic_compare_exchange_into_call(gcall * call,machine_mode mode)6309 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
6310 {
6311   unsigned int z;
6312   vec<tree, va_gc> *vec;
6313 
6314   vec_alloc (vec, 5);
6315   vec->quick_push (gimple_call_arg (call, 0));
6316   tree expected = gimple_call_arg (call, 1);
6317   rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
6318 				      TREE_TYPE (expected));
6319   rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
6320   if (expd != x)
6321     emit_move_insn (x, expd);
6322   tree v = make_tree (TREE_TYPE (expected), x);
6323   vec->quick_push (build1 (ADDR_EXPR,
6324 			   build_pointer_type (TREE_TYPE (expected)), v));
6325   vec->quick_push (gimple_call_arg (call, 2));
6326   /* Skip the boolean weak parameter.  */
6327   for (z = 4; z < 6; z++)
6328     vec->quick_push (gimple_call_arg (call, z));
6329   /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}.  */
6330   unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
6331   gcc_assert (bytes_log2 < 5);
6332   built_in_function fncode
6333     = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
6334 			   + bytes_log2);
6335   tree fndecl = builtin_decl_explicit (fncode);
6336   tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
6337 		    fndecl);
6338   tree exp = build_call_vec (boolean_type_node, fn, vec);
6339   tree lhs = gimple_call_lhs (call);
6340   rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
6341   if (lhs)
6342     {
6343       rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6344       if (GET_MODE (boolret) != mode)
6345 	boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6346       x = force_reg (mode, x);
6347       write_complex_part (target, boolret, true);
6348       write_complex_part (target, x, false);
6349     }
6350 }
6351 
6352 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function.  */
6353 
6354 void
expand_ifn_atomic_compare_exchange(gcall * call)6355 expand_ifn_atomic_compare_exchange (gcall *call)
6356 {
6357   int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
6358   gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
6359   machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
6360   rtx expect, desired, mem, oldval, boolret;
6361   enum memmodel success, failure;
6362   tree lhs;
6363   bool is_weak;
6364   location_t loc
6365     = expansion_point_location_if_in_system_header (gimple_location (call));
6366 
6367   success = get_memmodel (gimple_call_arg (call, 4));
6368   failure = get_memmodel (gimple_call_arg (call, 5));
6369 
6370   if (failure > success)
6371     {
6372       warning_at (loc, OPT_Winvalid_memory_model,
6373 		  "failure memory model cannot be stronger than success "
6374 		  "memory model for %<__atomic_compare_exchange%>");
6375       success = MEMMODEL_SEQ_CST;
6376     }
6377 
6378   if (is_mm_release (failure) || is_mm_acq_rel (failure))
6379     {
6380       warning_at (loc, OPT_Winvalid_memory_model,
6381 		  "invalid failure memory model for "
6382 		  "%<__atomic_compare_exchange%>");
6383       failure = MEMMODEL_SEQ_CST;
6384       success = MEMMODEL_SEQ_CST;
6385     }
6386 
6387   if (!flag_inline_atomics)
6388     {
6389       expand_ifn_atomic_compare_exchange_into_call (call, mode);
6390       return;
6391     }
6392 
6393   /* Expand the operands.  */
6394   mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6395 
6396   expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6397   desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6398 
6399   is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6400 
6401   boolret = NULL;
6402   oldval = NULL;
6403 
6404   if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6405 				       is_weak, success, failure))
6406     {
6407       expand_ifn_atomic_compare_exchange_into_call (call, mode);
6408       return;
6409     }
6410 
6411   lhs = gimple_call_lhs (call);
6412   if (lhs)
6413     {
6414       rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6415       if (GET_MODE (boolret) != mode)
6416 	boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6417       write_complex_part (target, boolret, true);
6418       write_complex_part (target, oldval, false);
6419     }
6420 }
6421 
6422 /* Expand the __atomic_load intrinsic:
6423    	TYPE __atomic_load (TYPE *object, enum memmodel)
6424    EXP is the CALL_EXPR.
6425    TARGET is an optional place for us to store the results.  */
6426 
6427 static rtx
expand_builtin_atomic_load(machine_mode mode,tree exp,rtx target)6428 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6429 {
6430   rtx mem;
6431   enum memmodel model;
6432 
6433   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6434   if (is_mm_release (model) || is_mm_acq_rel (model))
6435     {
6436       location_t loc
6437 	= expansion_point_location_if_in_system_header (input_location);
6438       warning_at (loc, OPT_Winvalid_memory_model,
6439 		  "invalid memory model for %<__atomic_load%>");
6440       model = MEMMODEL_SEQ_CST;
6441     }
6442 
6443   if (!flag_inline_atomics)
6444     return NULL_RTX;
6445 
6446   /* Expand the operand.  */
6447   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6448 
6449   return expand_atomic_load (target, mem, model);
6450 }
6451 
6452 
6453 /* Expand the __atomic_store intrinsic:
6454    	void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6455    EXP is the CALL_EXPR.
6456    TARGET is an optional place for us to store the results.  */
6457 
6458 static rtx
expand_builtin_atomic_store(machine_mode mode,tree exp)6459 expand_builtin_atomic_store (machine_mode mode, tree exp)
6460 {
6461   rtx mem, val;
6462   enum memmodel model;
6463 
6464   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6465   if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6466 	|| is_mm_release (model)))
6467     {
6468       location_t loc
6469 	= expansion_point_location_if_in_system_header (input_location);
6470       warning_at (loc, OPT_Winvalid_memory_model,
6471 		  "invalid memory model for %<__atomic_store%>");
6472       model = MEMMODEL_SEQ_CST;
6473     }
6474 
6475   if (!flag_inline_atomics)
6476     return NULL_RTX;
6477 
6478   /* Expand the operands.  */
6479   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6480   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6481 
6482   return expand_atomic_store (mem, val, model, false);
6483 }
6484 
6485 /* Expand the __atomic_fetch_XXX intrinsic:
6486    	TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6487    EXP is the CALL_EXPR.
6488    TARGET is an optional place for us to store the results.
6489    CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6490    FETCH_AFTER is true if returning the result of the operation.
6491    FETCH_AFTER is false if returning the value before the operation.
6492    IGNORE is true if the result is not used.
6493    EXT_CALL is the correct builtin for an external call if this cannot be
6494    resolved to an instruction sequence.  */
6495 
6496 static rtx
expand_builtin_atomic_fetch_op(machine_mode mode,tree exp,rtx target,enum rtx_code code,bool fetch_after,bool ignore,enum built_in_function ext_call)6497 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6498 				enum rtx_code code, bool fetch_after,
6499 				bool ignore, enum built_in_function ext_call)
6500 {
6501   rtx val, mem, ret;
6502   enum memmodel model;
6503   tree fndecl;
6504   tree addr;
6505 
6506   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6507 
6508   /* Expand the operands.  */
6509   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6510   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6511 
6512   /* Only try generating instructions if inlining is turned on.  */
6513   if (flag_inline_atomics)
6514     {
6515       ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6516       if (ret)
6517 	return ret;
6518     }
6519 
6520   /* Return if a different routine isn't needed for the library call.  */
6521   if (ext_call == BUILT_IN_NONE)
6522     return NULL_RTX;
6523 
6524   /* Change the call to the specified function.  */
6525   fndecl = get_callee_fndecl (exp);
6526   addr = CALL_EXPR_FN (exp);
6527   STRIP_NOPS (addr);
6528 
6529   gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6530   TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6531 
6532   /* If we will emit code after the call, the call cannot be a tail call.
6533      If it is emitted as a tail call, a barrier is emitted after it, and
6534      then all trailing code is removed.  */
6535   if (!ignore)
6536     CALL_EXPR_TAILCALL (exp) = 0;
6537 
6538   /* Expand the call here so we can emit trailing code.  */
6539   ret = expand_call (exp, target, ignore);
6540 
6541   /* Replace the original function just in case it matters.  */
6542   TREE_OPERAND (addr, 0) = fndecl;
6543 
6544   /* Then issue the arithmetic correction to return the right result.  */
6545   if (!ignore)
6546     {
6547       if (code == NOT)
6548 	{
6549 	  ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6550 				     OPTAB_LIB_WIDEN);
6551 	  ret = expand_simple_unop (mode, NOT, ret, target, true);
6552 	}
6553       else
6554 	ret = expand_simple_binop (mode, code, ret, val, target, true,
6555 				   OPTAB_LIB_WIDEN);
6556     }
6557   return ret;
6558 }
6559 
6560 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function.  */
6561 
6562 void
expand_ifn_atomic_bit_test_and(gcall * call)6563 expand_ifn_atomic_bit_test_and (gcall *call)
6564 {
6565   tree ptr = gimple_call_arg (call, 0);
6566   tree bit = gimple_call_arg (call, 1);
6567   tree flag = gimple_call_arg (call, 2);
6568   tree lhs = gimple_call_lhs (call);
6569   enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6570   machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6571   enum rtx_code code;
6572   optab optab;
6573   struct expand_operand ops[5];
6574 
6575   gcc_assert (flag_inline_atomics);
6576 
6577   if (gimple_call_num_args (call) == 4)
6578     model = get_memmodel (gimple_call_arg (call, 3));
6579 
6580   rtx mem = get_builtin_sync_mem (ptr, mode);
6581   rtx val = expand_expr_force_mode (bit, mode);
6582 
6583   switch (gimple_call_internal_fn (call))
6584     {
6585     case IFN_ATOMIC_BIT_TEST_AND_SET:
6586       code = IOR;
6587       optab = atomic_bit_test_and_set_optab;
6588       break;
6589     case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6590       code = XOR;
6591       optab = atomic_bit_test_and_complement_optab;
6592       break;
6593     case IFN_ATOMIC_BIT_TEST_AND_RESET:
6594       code = AND;
6595       optab = atomic_bit_test_and_reset_optab;
6596       break;
6597     default:
6598       gcc_unreachable ();
6599     }
6600 
6601   if (lhs == NULL_TREE)
6602     {
6603       val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6604 				 val, NULL_RTX, true, OPTAB_DIRECT);
6605       if (code == AND)
6606 	val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6607       expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6608       return;
6609     }
6610 
6611   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6612   enum insn_code icode = direct_optab_handler (optab, mode);
6613   gcc_assert (icode != CODE_FOR_nothing);
6614   create_output_operand (&ops[0], target, mode);
6615   create_fixed_operand (&ops[1], mem);
6616   create_convert_operand_to (&ops[2], val, mode, true);
6617   create_integer_operand (&ops[3], model);
6618   create_integer_operand (&ops[4], integer_onep (flag));
6619   if (maybe_expand_insn (icode, 5, ops))
6620     return;
6621 
6622   rtx bitval = val;
6623   val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6624 			     val, NULL_RTX, true, OPTAB_DIRECT);
6625   rtx maskval = val;
6626   if (code == AND)
6627     val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6628   rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6629 				       code, model, false);
6630   if (integer_onep (flag))
6631     {
6632       result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6633 				    NULL_RTX, true, OPTAB_DIRECT);
6634       result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6635 				    true, OPTAB_DIRECT);
6636     }
6637   else
6638     result = expand_simple_binop (mode, AND, result, maskval, target, true,
6639 				  OPTAB_DIRECT);
6640   if (result != target)
6641     emit_move_insn (target, result);
6642 }
6643 
6644 /* Expand an atomic clear operation.
6645 	void _atomic_clear (BOOL *obj, enum memmodel)
6646    EXP is the call expression.  */
6647 
6648 static rtx
expand_builtin_atomic_clear(tree exp)6649 expand_builtin_atomic_clear (tree exp)
6650 {
6651   machine_mode mode;
6652   rtx mem, ret;
6653   enum memmodel model;
6654 
6655   mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6656   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6657   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6658 
6659   if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6660     {
6661       location_t loc
6662 	= expansion_point_location_if_in_system_header (input_location);
6663       warning_at (loc, OPT_Winvalid_memory_model,
6664 		  "invalid memory model for %<__atomic_store%>");
6665       model = MEMMODEL_SEQ_CST;
6666     }
6667 
6668   /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6669      Failing that, a store is issued by __atomic_store.  The only way this can
6670      fail is if the bool type is larger than a word size.  Unlikely, but
6671      handle it anyway for completeness.  Assume a single threaded model since
6672      there is no atomic support in this case, and no barriers are required.  */
6673   ret = expand_atomic_store (mem, const0_rtx, model, true);
6674   if (!ret)
6675     emit_move_insn (mem, const0_rtx);
6676   return const0_rtx;
6677 }
6678 
6679 /* Expand an atomic test_and_set operation.
6680 	bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6681    EXP is the call expression.  */
6682 
6683 static rtx
expand_builtin_atomic_test_and_set(tree exp,rtx target)6684 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6685 {
6686   rtx mem;
6687   enum memmodel model;
6688   machine_mode mode;
6689 
6690   mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6691   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6692   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6693 
6694   return expand_atomic_test_and_set (target, mem, model);
6695 }
6696 
6697 
6698 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6699    this architecture.  If ARG1 is NULL, use typical alignment for size ARG0.  */
6700 
6701 static tree
fold_builtin_atomic_always_lock_free(tree arg0,tree arg1)6702 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6703 {
6704   int size;
6705   machine_mode mode;
6706   unsigned int mode_align, type_align;
6707 
6708   if (TREE_CODE (arg0) != INTEGER_CST)
6709     return NULL_TREE;
6710 
6711   /* We need a corresponding integer mode for the access to be lock-free.  */
6712   size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6713   if (!int_mode_for_size (size, 0).exists (&mode))
6714     return boolean_false_node;
6715 
6716   mode_align = GET_MODE_ALIGNMENT (mode);
6717 
6718   if (TREE_CODE (arg1) == INTEGER_CST)
6719     {
6720       unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6721 
6722       /* Either this argument is null, or it's a fake pointer encoding
6723          the alignment of the object.  */
6724       val = least_bit_hwi (val);
6725       val *= BITS_PER_UNIT;
6726 
6727       if (val == 0 || mode_align < val)
6728         type_align = mode_align;
6729       else
6730         type_align = val;
6731     }
6732   else
6733     {
6734       tree ttype = TREE_TYPE (arg1);
6735 
6736       /* This function is usually invoked and folded immediately by the front
6737 	 end before anything else has a chance to look at it.  The pointer
6738 	 parameter at this point is usually cast to a void *, so check for that
6739 	 and look past the cast.  */
6740       if (CONVERT_EXPR_P (arg1)
6741 	  && POINTER_TYPE_P (ttype)
6742 	  && VOID_TYPE_P (TREE_TYPE (ttype))
6743 	  && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6744 	arg1 = TREE_OPERAND (arg1, 0);
6745 
6746       ttype = TREE_TYPE (arg1);
6747       gcc_assert (POINTER_TYPE_P (ttype));
6748 
6749       /* Get the underlying type of the object.  */
6750       ttype = TREE_TYPE (ttype);
6751       type_align = TYPE_ALIGN (ttype);
6752     }
6753 
6754   /* If the object has smaller alignment, the lock free routines cannot
6755      be used.  */
6756   if (type_align < mode_align)
6757     return boolean_false_node;
6758 
6759   /* Check if a compare_and_swap pattern exists for the mode which represents
6760      the required size.  The pattern is not allowed to fail, so the existence
6761      of the pattern indicates support is present.  Also require that an
6762      atomic load exists for the required size.  */
6763   if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6764     return boolean_true_node;
6765   else
6766     return boolean_false_node;
6767 }
6768 
6769 /* Return true if the parameters to call EXP represent an object which will
6770    always generate lock free instructions.  The first argument represents the
6771    size of the object, and the second parameter is a pointer to the object
6772    itself.  If NULL is passed for the object, then the result is based on
6773    typical alignment for an object of the specified size.  Otherwise return
6774    false.  */
6775 
6776 static rtx
expand_builtin_atomic_always_lock_free(tree exp)6777 expand_builtin_atomic_always_lock_free (tree exp)
6778 {
6779   tree size;
6780   tree arg0 = CALL_EXPR_ARG (exp, 0);
6781   tree arg1 = CALL_EXPR_ARG (exp, 1);
6782 
6783   if (TREE_CODE (arg0) != INTEGER_CST)
6784     {
6785       error ("non-constant argument 1 to __atomic_always_lock_free");
6786       return const0_rtx;
6787     }
6788 
6789   size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6790   if (size == boolean_true_node)
6791     return const1_rtx;
6792   return const0_rtx;
6793 }
6794 
6795 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6796    is lock free on this architecture.  */
6797 
6798 static tree
fold_builtin_atomic_is_lock_free(tree arg0,tree arg1)6799 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6800 {
6801   if (!flag_inline_atomics)
6802     return NULL_TREE;
6803 
6804   /* If it isn't always lock free, don't generate a result.  */
6805   if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6806     return boolean_true_node;
6807 
6808   return NULL_TREE;
6809 }
6810 
6811 /* Return true if the parameters to call EXP represent an object which will
6812    always generate lock free instructions.  The first argument represents the
6813    size of the object, and the second parameter is a pointer to the object
6814    itself.  If NULL is passed for the object, then the result is based on
6815    typical alignment for an object of the specified size.  Otherwise return
6816    NULL*/
6817 
6818 static rtx
expand_builtin_atomic_is_lock_free(tree exp)6819 expand_builtin_atomic_is_lock_free (tree exp)
6820 {
6821   tree size;
6822   tree arg0 = CALL_EXPR_ARG (exp, 0);
6823   tree arg1 = CALL_EXPR_ARG (exp, 1);
6824 
6825   if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6826     {
6827       error ("non-integer argument 1 to __atomic_is_lock_free");
6828       return NULL_RTX;
6829     }
6830 
6831   if (!flag_inline_atomics)
6832     return NULL_RTX;
6833 
6834   /* If the value is known at compile time, return the RTX for it.  */
6835   size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6836   if (size == boolean_true_node)
6837     return const1_rtx;
6838 
6839   return NULL_RTX;
6840 }
6841 
6842 /* Expand the __atomic_thread_fence intrinsic:
6843    	void __atomic_thread_fence (enum memmodel)
6844    EXP is the CALL_EXPR.  */
6845 
6846 static void
expand_builtin_atomic_thread_fence(tree exp)6847 expand_builtin_atomic_thread_fence (tree exp)
6848 {
6849   enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6850   expand_mem_thread_fence (model);
6851 }
6852 
6853 /* Expand the __atomic_signal_fence intrinsic:
6854    	void __atomic_signal_fence (enum memmodel)
6855    EXP is the CALL_EXPR.  */
6856 
6857 static void
expand_builtin_atomic_signal_fence(tree exp)6858 expand_builtin_atomic_signal_fence (tree exp)
6859 {
6860   enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6861   expand_mem_signal_fence (model);
6862 }
6863 
6864 /* Expand the __sync_synchronize intrinsic.  */
6865 
6866 static void
expand_builtin_sync_synchronize(void)6867 expand_builtin_sync_synchronize (void)
6868 {
6869   expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6870 }
6871 
6872 static rtx
expand_builtin_thread_pointer(tree exp,rtx target)6873 expand_builtin_thread_pointer (tree exp, rtx target)
6874 {
6875   enum insn_code icode;
6876   if (!validate_arglist (exp, VOID_TYPE))
6877     return const0_rtx;
6878   icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6879   if (icode != CODE_FOR_nothing)
6880     {
6881       struct expand_operand op;
6882       /* If the target is not sutitable then create a new target. */
6883       if (target == NULL_RTX
6884 	  || !REG_P (target)
6885 	  || GET_MODE (target) != Pmode)
6886 	target = gen_reg_rtx (Pmode);
6887       create_output_operand (&op, target, Pmode);
6888       expand_insn (icode, 1, &op);
6889       return target;
6890     }
6891   error ("%<__builtin_thread_pointer%> is not supported on this target");
6892   return const0_rtx;
6893 }
6894 
6895 static void
expand_builtin_set_thread_pointer(tree exp)6896 expand_builtin_set_thread_pointer (tree exp)
6897 {
6898   enum insn_code icode;
6899   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6900     return;
6901   icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6902   if (icode != CODE_FOR_nothing)
6903     {
6904       struct expand_operand op;
6905       rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6906 			     Pmode, EXPAND_NORMAL);
6907       create_input_operand (&op, val, Pmode);
6908       expand_insn (icode, 1, &op);
6909       return;
6910     }
6911   error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6912 }
6913 
6914 
6915 /* Emit code to restore the current value of stack.  */
6916 
6917 static void
expand_stack_restore(tree var)6918 expand_stack_restore (tree var)
6919 {
6920   rtx_insn *prev;
6921   rtx sa = expand_normal (var);
6922 
6923   sa = convert_memory_address (Pmode, sa);
6924 
6925   prev = get_last_insn ();
6926   emit_stack_restore (SAVE_BLOCK, sa);
6927 
6928   record_new_stack_level ();
6929 
6930   fixup_args_size_notes (prev, get_last_insn (), 0);
6931 }
6932 
6933 /* Emit code to save the current value of stack.  */
6934 
6935 static rtx
expand_stack_save(void)6936 expand_stack_save (void)
6937 {
6938   rtx ret = NULL_RTX;
6939 
6940   emit_stack_save (SAVE_BLOCK, &ret);
6941   return ret;
6942 }
6943 
6944 /* Emit code to get the openacc gang, worker or vector id or size.  */
6945 
6946 static rtx
expand_builtin_goacc_parlevel_id_size(tree exp,rtx target,int ignore)6947 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6948 {
6949   const char *name;
6950   rtx fallback_retval;
6951   rtx_insn *(*gen_fn) (rtx, rtx);
6952   switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6953     {
6954     case BUILT_IN_GOACC_PARLEVEL_ID:
6955       name = "__builtin_goacc_parlevel_id";
6956       fallback_retval = const0_rtx;
6957       gen_fn = targetm.gen_oacc_dim_pos;
6958       break;
6959     case BUILT_IN_GOACC_PARLEVEL_SIZE:
6960       name = "__builtin_goacc_parlevel_size";
6961       fallback_retval = const1_rtx;
6962       gen_fn = targetm.gen_oacc_dim_size;
6963       break;
6964     default:
6965       gcc_unreachable ();
6966     }
6967 
6968   if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6969     {
6970       error ("%qs only supported in OpenACC code", name);
6971       return const0_rtx;
6972     }
6973 
6974   tree arg = CALL_EXPR_ARG (exp, 0);
6975   if (TREE_CODE (arg) != INTEGER_CST)
6976     {
6977       error ("non-constant argument 0 to %qs", name);
6978       return const0_rtx;
6979     }
6980 
6981   int dim = TREE_INT_CST_LOW (arg);
6982   switch (dim)
6983     {
6984     case GOMP_DIM_GANG:
6985     case GOMP_DIM_WORKER:
6986     case GOMP_DIM_VECTOR:
6987       break;
6988     default:
6989       error ("illegal argument 0 to %qs", name);
6990       return const0_rtx;
6991     }
6992 
6993   if (ignore)
6994     return target;
6995 
6996   if (target == NULL_RTX)
6997     target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6998 
6999   if (!targetm.have_oacc_dim_size ())
7000     {
7001       emit_move_insn (target, fallback_retval);
7002       return target;
7003     }
7004 
7005   rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
7006   emit_insn (gen_fn (reg, GEN_INT (dim)));
7007   if (reg != target)
7008     emit_move_insn (target, reg);
7009 
7010   return target;
7011 }
7012 
7013 /* Expand a string compare operation using a sequence of char comparison
7014    to get rid of the calling overhead, with result going to TARGET if
7015    that's convenient.
7016 
7017    VAR_STR is the variable string source;
7018    CONST_STR is the constant string source;
7019    LENGTH is the number of chars to compare;
7020    CONST_STR_N indicates which source string is the constant string;
7021    IS_MEMCMP indicates whether it's a memcmp or strcmp.
7022 
7023    to: (assume const_str_n is 2, i.e., arg2 is a constant string)
7024 
7025    target = (int) (unsigned char) var_str[0]
7026 	    - (int) (unsigned char) const_str[0];
7027    if (target != 0)
7028      goto ne_label;
7029      ...
7030    target = (int) (unsigned char) var_str[length - 2]
7031 	    - (int) (unsigned char) const_str[length - 2];
7032    if (target != 0)
7033      goto ne_label;
7034    target = (int) (unsigned char) var_str[length - 1]
7035 	    - (int) (unsigned char) const_str[length - 1];
7036    ne_label:
7037   */
7038 
7039 static rtx
inline_string_cmp(rtx target,tree var_str,const char * const_str,unsigned HOST_WIDE_INT length,int const_str_n,machine_mode mode)7040 inline_string_cmp (rtx target, tree var_str, const char *const_str,
7041 		   unsigned HOST_WIDE_INT length,
7042 		   int const_str_n, machine_mode mode)
7043 {
7044   HOST_WIDE_INT offset = 0;
7045   rtx var_rtx_array
7046     = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
7047   rtx var_rtx = NULL_RTX;
7048   rtx const_rtx = NULL_RTX;
7049   rtx result = target ? target : gen_reg_rtx (mode);
7050   rtx_code_label *ne_label = gen_label_rtx ();
7051   tree unit_type_node = unsigned_char_type_node;
7052   scalar_int_mode unit_mode
7053     = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
7054 
7055   start_sequence ();
7056 
7057   for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
7058     {
7059       var_rtx
7060 	= adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
7061       const_rtx = c_readstr (const_str + offset, unit_mode);
7062       rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
7063       rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
7064 
7065       op0 = convert_modes (mode, unit_mode, op0, 1);
7066       op1 = convert_modes (mode, unit_mode, op1, 1);
7067       result = expand_simple_binop (mode, MINUS, op0, op1,
7068 				    result, 1, OPTAB_WIDEN);
7069       if (i < length - 1)
7070 	emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
7071 	    			 mode, true, ne_label);
7072       offset += GET_MODE_SIZE (unit_mode);
7073     }
7074 
7075   emit_label (ne_label);
7076   rtx_insn *insns = get_insns ();
7077   end_sequence ();
7078   emit_insn (insns);
7079 
7080   return result;
7081 }
7082 
7083 /* Inline expansion a call to str(n)cmp, with result going to
7084    TARGET if that's convenient.
7085    If the call is not been inlined, return NULL_RTX.  */
7086 static rtx
inline_expand_builtin_string_cmp(tree exp,rtx target)7087 inline_expand_builtin_string_cmp (tree exp, rtx target)
7088 {
7089   tree fndecl = get_callee_fndecl (exp);
7090   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7091   unsigned HOST_WIDE_INT length = 0;
7092   bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
7093 
7094   /* Do NOT apply this inlining expansion when optimizing for size or
7095      optimization level below 2.  */
7096   if (optimize < 2 || optimize_insn_for_size_p ())
7097     return NULL_RTX;
7098 
7099   gcc_checking_assert (fcode == BUILT_IN_STRCMP
7100 		       || fcode == BUILT_IN_STRNCMP
7101 		       || fcode == BUILT_IN_MEMCMP);
7102 
7103   /* On a target where the type of the call (int) has same or narrower presicion
7104      than unsigned char, give up the inlining expansion.  */
7105   if (TYPE_PRECISION (unsigned_char_type_node)
7106       >= TYPE_PRECISION (TREE_TYPE (exp)))
7107     return NULL_RTX;
7108 
7109   tree arg1 = CALL_EXPR_ARG (exp, 0);
7110   tree arg2 = CALL_EXPR_ARG (exp, 1);
7111   tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
7112 
7113   unsigned HOST_WIDE_INT len1 = 0;
7114   unsigned HOST_WIDE_INT len2 = 0;
7115   unsigned HOST_WIDE_INT len3 = 0;
7116 
7117   const char *src_str1 = c_getstr (arg1, &len1);
7118   const char *src_str2 = c_getstr (arg2, &len2);
7119 
7120   /* If neither strings is constant string, the call is not qualify.  */
7121   if (!src_str1 && !src_str2)
7122     return NULL_RTX;
7123 
7124   /* For strncmp, if the length is not a const, not qualify.  */
7125   if (is_ncmp)
7126     {
7127       if (!tree_fits_uhwi_p (len3_tree))
7128 	return NULL_RTX;
7129       else
7130 	len3 = tree_to_uhwi (len3_tree);
7131     }
7132 
7133   if (src_str1 != NULL)
7134     len1 = strnlen (src_str1, len1) + 1;
7135 
7136   if (src_str2 != NULL)
7137     len2 = strnlen (src_str2, len2) + 1;
7138 
7139   int const_str_n = 0;
7140   if (!len1)
7141     const_str_n = 2;
7142   else if (!len2)
7143     const_str_n = 1;
7144   else if (len2 > len1)
7145     const_str_n = 1;
7146   else
7147     const_str_n = 2;
7148 
7149   gcc_checking_assert (const_str_n > 0);
7150   length = (const_str_n == 1) ? len1 : len2;
7151 
7152   if (is_ncmp && len3 < length)
7153     length = len3;
7154 
7155   /* If the length of the comparision is larger than the threshold,
7156      do nothing.  */
7157   if (length > (unsigned HOST_WIDE_INT)
7158 	       PARAM_VALUE (BUILTIN_STRING_CMP_INLINE_LENGTH))
7159     return NULL_RTX;
7160 
7161   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7162 
7163   /* Now, start inline expansion the call.  */
7164   return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
7165 			    (const_str_n == 1) ? src_str1 : src_str2, length,
7166 			    const_str_n, mode);
7167 }
7168 
7169 /* Expand a call to __builtin_speculation_safe_value_<N>.  MODE
7170    represents the size of the first argument to that call, or VOIDmode
7171    if the argument is a pointer.  IGNORE will be true if the result
7172    isn't used.  */
7173 static rtx
expand_speculation_safe_value(machine_mode mode,tree exp,rtx target,bool ignore)7174 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
7175 			       bool ignore)
7176 {
7177   rtx val, failsafe;
7178   unsigned nargs = call_expr_nargs (exp);
7179 
7180   tree arg0 = CALL_EXPR_ARG (exp, 0);
7181 
7182   if (mode == VOIDmode)
7183     {
7184       mode = TYPE_MODE (TREE_TYPE (arg0));
7185       gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
7186     }
7187 
7188   val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
7189 
7190   /* An optional second argument can be used as a failsafe value on
7191      some machines.  If it isn't present, then the failsafe value is
7192      assumed to be 0.  */
7193   if (nargs > 1)
7194     {
7195       tree arg1 = CALL_EXPR_ARG (exp, 1);
7196       failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
7197     }
7198   else
7199     failsafe = const0_rtx;
7200 
7201   /* If the result isn't used, the behavior is undefined.  It would be
7202      nice to emit a warning here, but path splitting means this might
7203      happen with legitimate code.  So simply drop the builtin
7204      expansion in that case; we've handled any side-effects above.  */
7205   if (ignore)
7206     return const0_rtx;
7207 
7208   /* If we don't have a suitable target, create one to hold the result.  */
7209   if (target == NULL || GET_MODE (target) != mode)
7210     target = gen_reg_rtx (mode);
7211 
7212   if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
7213     val = convert_modes (mode, VOIDmode, val, false);
7214 
7215   return targetm.speculation_safe_value (mode, target, val, failsafe);
7216 }
7217 
7218 /* Expand an expression EXP that calls a built-in function,
7219    with result going to TARGET if that's convenient
7220    (and in mode MODE if that's convenient).
7221    SUBTARGET may be used as the target for computing one of EXP's operands.
7222    IGNORE is nonzero if the value is to be ignored.  */
7223 
7224 rtx
expand_builtin(tree exp,rtx target,rtx subtarget,machine_mode mode,int ignore)7225 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
7226 		int ignore)
7227 {
7228   tree fndecl = get_callee_fndecl (exp);
7229   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7230   machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
7231   int flags;
7232 
7233   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
7234     return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
7235 
7236   /* When ASan is enabled, we don't want to expand some memory/string
7237      builtins and rely on libsanitizer's hooks.  This allows us to avoid
7238      redundant checks and be sure, that possible overflow will be detected
7239      by ASan.  */
7240 
7241   if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
7242     return expand_call (exp, target, ignore);
7243 
7244   /* When not optimizing, generate calls to library functions for a certain
7245      set of builtins.  */
7246   if (!optimize
7247       && !called_as_built_in (fndecl)
7248       && fcode != BUILT_IN_FORK
7249       && fcode != BUILT_IN_EXECL
7250       && fcode != BUILT_IN_EXECV
7251       && fcode != BUILT_IN_EXECLP
7252       && fcode != BUILT_IN_EXECLE
7253       && fcode != BUILT_IN_EXECVP
7254       && fcode != BUILT_IN_EXECVE
7255       && !ALLOCA_FUNCTION_CODE_P (fcode)
7256       && fcode != BUILT_IN_FREE)
7257     return expand_call (exp, target, ignore);
7258 
7259   /* The built-in function expanders test for target == const0_rtx
7260      to determine whether the function's result will be ignored.  */
7261   if (ignore)
7262     target = const0_rtx;
7263 
7264   /* If the result of a pure or const built-in function is ignored, and
7265      none of its arguments are volatile, we can avoid expanding the
7266      built-in call and just evaluate the arguments for side-effects.  */
7267   if (target == const0_rtx
7268       && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
7269       && !(flags & ECF_LOOPING_CONST_OR_PURE))
7270     {
7271       bool volatilep = false;
7272       tree arg;
7273       call_expr_arg_iterator iter;
7274 
7275       FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7276 	if (TREE_THIS_VOLATILE (arg))
7277 	  {
7278 	    volatilep = true;
7279 	    break;
7280 	  }
7281 
7282       if (! volatilep)
7283 	{
7284 	  FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
7285 	    expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
7286 	  return const0_rtx;
7287 	}
7288     }
7289 
7290   switch (fcode)
7291     {
7292     CASE_FLT_FN (BUILT_IN_FABS):
7293     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
7294     case BUILT_IN_FABSD32:
7295     case BUILT_IN_FABSD64:
7296     case BUILT_IN_FABSD128:
7297       target = expand_builtin_fabs (exp, target, subtarget);
7298       if (target)
7299 	return target;
7300       break;
7301 
7302     CASE_FLT_FN (BUILT_IN_COPYSIGN):
7303     CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
7304       target = expand_builtin_copysign (exp, target, subtarget);
7305       if (target)
7306 	return target;
7307       break;
7308 
7309       /* Just do a normal library call if we were unable to fold
7310 	 the values.  */
7311     CASE_FLT_FN (BUILT_IN_CABS):
7312       break;
7313 
7314     CASE_FLT_FN (BUILT_IN_FMA):
7315     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
7316       target = expand_builtin_mathfn_ternary (exp, target, subtarget);
7317       if (target)
7318 	return target;
7319       break;
7320 
7321     CASE_FLT_FN (BUILT_IN_ILOGB):
7322       if (! flag_unsafe_math_optimizations)
7323 	break;
7324       gcc_fallthrough ();
7325     CASE_FLT_FN (BUILT_IN_ISINF):
7326     CASE_FLT_FN (BUILT_IN_FINITE):
7327     case BUILT_IN_ISFINITE:
7328     case BUILT_IN_ISNORMAL:
7329       target = expand_builtin_interclass_mathfn (exp, target);
7330       if (target)
7331 	return target;
7332       break;
7333 
7334     CASE_FLT_FN (BUILT_IN_ICEIL):
7335     CASE_FLT_FN (BUILT_IN_LCEIL):
7336     CASE_FLT_FN (BUILT_IN_LLCEIL):
7337     CASE_FLT_FN (BUILT_IN_LFLOOR):
7338     CASE_FLT_FN (BUILT_IN_IFLOOR):
7339     CASE_FLT_FN (BUILT_IN_LLFLOOR):
7340       target = expand_builtin_int_roundingfn (exp, target);
7341       if (target)
7342 	return target;
7343       break;
7344 
7345     CASE_FLT_FN (BUILT_IN_IRINT):
7346     CASE_FLT_FN (BUILT_IN_LRINT):
7347     CASE_FLT_FN (BUILT_IN_LLRINT):
7348     CASE_FLT_FN (BUILT_IN_IROUND):
7349     CASE_FLT_FN (BUILT_IN_LROUND):
7350     CASE_FLT_FN (BUILT_IN_LLROUND):
7351       target = expand_builtin_int_roundingfn_2 (exp, target);
7352       if (target)
7353 	return target;
7354       break;
7355 
7356     CASE_FLT_FN (BUILT_IN_POWI):
7357       target = expand_builtin_powi (exp, target);
7358       if (target)
7359 	return target;
7360       break;
7361 
7362     CASE_FLT_FN (BUILT_IN_CEXPI):
7363       target = expand_builtin_cexpi (exp, target);
7364       gcc_assert (target);
7365       return target;
7366 
7367     CASE_FLT_FN (BUILT_IN_SIN):
7368     CASE_FLT_FN (BUILT_IN_COS):
7369       if (! flag_unsafe_math_optimizations)
7370 	break;
7371       target = expand_builtin_mathfn_3 (exp, target, subtarget);
7372       if (target)
7373 	return target;
7374       break;
7375 
7376     CASE_FLT_FN (BUILT_IN_SINCOS):
7377       if (! flag_unsafe_math_optimizations)
7378 	break;
7379       target = expand_builtin_sincos (exp);
7380       if (target)
7381 	return target;
7382       break;
7383 
7384     case BUILT_IN_APPLY_ARGS:
7385       return expand_builtin_apply_args ();
7386 
7387       /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7388 	 FUNCTION with a copy of the parameters described by
7389 	 ARGUMENTS, and ARGSIZE.  It returns a block of memory
7390 	 allocated on the stack into which is stored all the registers
7391 	 that might possibly be used for returning the result of a
7392 	 function.  ARGUMENTS is the value returned by
7393 	 __builtin_apply_args.  ARGSIZE is the number of bytes of
7394 	 arguments that must be copied.  ??? How should this value be
7395 	 computed?  We'll also need a safe worst case value for varargs
7396 	 functions.  */
7397     case BUILT_IN_APPLY:
7398       if (!validate_arglist (exp, POINTER_TYPE,
7399 			     POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7400 	  && !validate_arglist (exp, REFERENCE_TYPE,
7401 				POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7402 	return const0_rtx;
7403       else
7404 	{
7405 	  rtx ops[3];
7406 
7407 	  ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7408 	  ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7409 	  ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7410 
7411 	  return expand_builtin_apply (ops[0], ops[1], ops[2]);
7412 	}
7413 
7414       /* __builtin_return (RESULT) causes the function to return the
7415 	 value described by RESULT.  RESULT is address of the block of
7416 	 memory returned by __builtin_apply.  */
7417     case BUILT_IN_RETURN:
7418       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7419 	expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7420       return const0_rtx;
7421 
7422     case BUILT_IN_SAVEREGS:
7423       return expand_builtin_saveregs ();
7424 
7425     case BUILT_IN_VA_ARG_PACK:
7426       /* All valid uses of __builtin_va_arg_pack () are removed during
7427 	 inlining.  */
7428       error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
7429       return const0_rtx;
7430 
7431     case BUILT_IN_VA_ARG_PACK_LEN:
7432       /* All valid uses of __builtin_va_arg_pack_len () are removed during
7433 	 inlining.  */
7434       error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
7435       return const0_rtx;
7436 
7437       /* Return the address of the first anonymous stack arg.  */
7438     case BUILT_IN_NEXT_ARG:
7439       if (fold_builtin_next_arg (exp, false))
7440 	return const0_rtx;
7441       return expand_builtin_next_arg ();
7442 
7443     case BUILT_IN_CLEAR_CACHE:
7444       target = expand_builtin___clear_cache (exp);
7445       if (target)
7446         return target;
7447       break;
7448 
7449     case BUILT_IN_CLASSIFY_TYPE:
7450       return expand_builtin_classify_type (exp);
7451 
7452     case BUILT_IN_CONSTANT_P:
7453       return const0_rtx;
7454 
7455     case BUILT_IN_FRAME_ADDRESS:
7456     case BUILT_IN_RETURN_ADDRESS:
7457       return expand_builtin_frame_address (fndecl, exp);
7458 
7459     /* Returns the address of the area where the structure is returned.
7460        0 otherwise.  */
7461     case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7462       if (call_expr_nargs (exp) != 0
7463 	  || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7464 	  || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7465 	return const0_rtx;
7466       else
7467 	return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7468 
7469     CASE_BUILT_IN_ALLOCA:
7470       target = expand_builtin_alloca (exp);
7471       if (target)
7472 	return target;
7473       break;
7474 
7475     case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7476       return expand_asan_emit_allocas_unpoison (exp);
7477 
7478     case BUILT_IN_STACK_SAVE:
7479       return expand_stack_save ();
7480 
7481     case BUILT_IN_STACK_RESTORE:
7482       expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7483       return const0_rtx;
7484 
7485     case BUILT_IN_BSWAP16:
7486     case BUILT_IN_BSWAP32:
7487     case BUILT_IN_BSWAP64:
7488       target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7489       if (target)
7490 	return target;
7491       break;
7492 
7493     CASE_INT_FN (BUILT_IN_FFS):
7494       target = expand_builtin_unop (target_mode, exp, target,
7495 				    subtarget, ffs_optab);
7496       if (target)
7497 	return target;
7498       break;
7499 
7500     CASE_INT_FN (BUILT_IN_CLZ):
7501       target = expand_builtin_unop (target_mode, exp, target,
7502 				    subtarget, clz_optab);
7503       if (target)
7504 	return target;
7505       break;
7506 
7507     CASE_INT_FN (BUILT_IN_CTZ):
7508       target = expand_builtin_unop (target_mode, exp, target,
7509 				    subtarget, ctz_optab);
7510       if (target)
7511 	return target;
7512       break;
7513 
7514     CASE_INT_FN (BUILT_IN_CLRSB):
7515       target = expand_builtin_unop (target_mode, exp, target,
7516 				    subtarget, clrsb_optab);
7517       if (target)
7518 	return target;
7519       break;
7520 
7521     CASE_INT_FN (BUILT_IN_POPCOUNT):
7522       target = expand_builtin_unop (target_mode, exp, target,
7523 				    subtarget, popcount_optab);
7524       if (target)
7525 	return target;
7526       break;
7527 
7528     CASE_INT_FN (BUILT_IN_PARITY):
7529       target = expand_builtin_unop (target_mode, exp, target,
7530 				    subtarget, parity_optab);
7531       if (target)
7532 	return target;
7533       break;
7534 
7535     case BUILT_IN_STRLEN:
7536       target = expand_builtin_strlen (exp, target, target_mode);
7537       if (target)
7538 	return target;
7539       break;
7540 
7541     case BUILT_IN_STRNLEN:
7542       target = expand_builtin_strnlen (exp, target, target_mode);
7543       if (target)
7544 	return target;
7545       break;
7546 
7547     case BUILT_IN_STRCAT:
7548       target = expand_builtin_strcat (exp, target);
7549       if (target)
7550 	return target;
7551       break;
7552 
7553     case BUILT_IN_STRCPY:
7554       target = expand_builtin_strcpy (exp, target);
7555       if (target)
7556 	return target;
7557       break;
7558 
7559     case BUILT_IN_STRNCAT:
7560       target = expand_builtin_strncat (exp, target);
7561       if (target)
7562 	return target;
7563       break;
7564 
7565     case BUILT_IN_STRNCPY:
7566       target = expand_builtin_strncpy (exp, target);
7567       if (target)
7568 	return target;
7569       break;
7570 
7571     case BUILT_IN_STPCPY:
7572       target = expand_builtin_stpcpy (exp, target, mode);
7573       if (target)
7574 	return target;
7575       break;
7576 
7577     case BUILT_IN_STPNCPY:
7578       target = expand_builtin_stpncpy (exp, target);
7579       if (target)
7580 	return target;
7581       break;
7582 
7583     case BUILT_IN_MEMCHR:
7584       target = expand_builtin_memchr (exp, target);
7585       if (target)
7586 	return target;
7587       break;
7588 
7589     case BUILT_IN_MEMCPY:
7590       target = expand_builtin_memcpy (exp, target);
7591       if (target)
7592 	return target;
7593       break;
7594 
7595     case BUILT_IN_MEMMOVE:
7596       target = expand_builtin_memmove (exp, target);
7597       if (target)
7598 	return target;
7599       break;
7600 
7601     case BUILT_IN_MEMPCPY:
7602       target = expand_builtin_mempcpy (exp, target);
7603       if (target)
7604 	return target;
7605       break;
7606 
7607     case BUILT_IN_MEMSET:
7608       target = expand_builtin_memset (exp, target, mode);
7609       if (target)
7610 	return target;
7611       break;
7612 
7613     case BUILT_IN_BZERO:
7614       target = expand_builtin_bzero (exp);
7615       if (target)
7616 	return target;
7617       break;
7618 
7619     /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7620        back to a BUILT_IN_STRCMP. Remember to delete the 3rd paramater
7621        when changing it to a strcmp call.  */
7622     case BUILT_IN_STRCMP_EQ:
7623       target = expand_builtin_memcmp (exp, target, true);
7624       if (target)
7625 	return target;
7626 
7627       /* Change this call back to a BUILT_IN_STRCMP.  */
7628       TREE_OPERAND (exp, 1)
7629 	= build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7630 
7631       /* Delete the last parameter.  */
7632       unsigned int i;
7633       vec<tree, va_gc> *arg_vec;
7634       vec_alloc (arg_vec, 2);
7635       for (i = 0; i < 2; i++)
7636 	arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7637       exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7638       /* FALLTHROUGH */
7639 
7640     case BUILT_IN_STRCMP:
7641       target = expand_builtin_strcmp (exp, target);
7642       if (target)
7643 	return target;
7644       break;
7645 
7646     /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7647        back to a BUILT_IN_STRNCMP.  */
7648     case BUILT_IN_STRNCMP_EQ:
7649       target = expand_builtin_memcmp (exp, target, true);
7650       if (target)
7651 	return target;
7652 
7653       /* Change it back to a BUILT_IN_STRNCMP.  */
7654       TREE_OPERAND (exp, 1)
7655 	= build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7656       /* FALLTHROUGH */
7657 
7658     case BUILT_IN_STRNCMP:
7659       target = expand_builtin_strncmp (exp, target, mode);
7660       if (target)
7661 	return target;
7662       break;
7663 
7664     case BUILT_IN_BCMP:
7665     case BUILT_IN_MEMCMP:
7666     case BUILT_IN_MEMCMP_EQ:
7667       target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7668       if (target)
7669 	return target;
7670       if (fcode == BUILT_IN_MEMCMP_EQ)
7671 	{
7672 	  tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7673 	  TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7674 	}
7675       break;
7676 
7677     case BUILT_IN_SETJMP:
7678       /* This should have been lowered to the builtins below.  */
7679       gcc_unreachable ();
7680 
7681     case BUILT_IN_SETJMP_SETUP:
7682       /* __builtin_setjmp_setup is passed a pointer to an array of five words
7683           and the receiver label.  */
7684       if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7685 	{
7686 	  rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7687 				      VOIDmode, EXPAND_NORMAL);
7688 	  tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7689 	  rtx_insn *label_r = label_rtx (label);
7690 
7691 	  /* This is copied from the handling of non-local gotos.  */
7692 	  expand_builtin_setjmp_setup (buf_addr, label_r);
7693 	  nonlocal_goto_handler_labels
7694 	    = gen_rtx_INSN_LIST (VOIDmode, label_r,
7695 				 nonlocal_goto_handler_labels);
7696 	  /* ??? Do not let expand_label treat us as such since we would
7697 	     not want to be both on the list of non-local labels and on
7698 	     the list of forced labels.  */
7699 	  FORCED_LABEL (label) = 0;
7700 	  return const0_rtx;
7701 	}
7702       break;
7703 
7704     case BUILT_IN_SETJMP_RECEIVER:
7705        /* __builtin_setjmp_receiver is passed the receiver label.  */
7706       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7707 	{
7708 	  tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7709 	  rtx_insn *label_r = label_rtx (label);
7710 
7711 	  expand_builtin_setjmp_receiver (label_r);
7712 	  return const0_rtx;
7713 	}
7714       break;
7715 
7716       /* __builtin_longjmp is passed a pointer to an array of five words.
7717 	 It's similar to the C library longjmp function but works with
7718 	 __builtin_setjmp above.  */
7719     case BUILT_IN_LONGJMP:
7720       if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7721 	{
7722 	  rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7723 				      VOIDmode, EXPAND_NORMAL);
7724 	  rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7725 
7726 	  if (value != const1_rtx)
7727 	    {
7728 	      error ("%<__builtin_longjmp%> second argument must be 1");
7729 	      return const0_rtx;
7730 	    }
7731 
7732 	  expand_builtin_longjmp (buf_addr, value);
7733 	  return const0_rtx;
7734 	}
7735       break;
7736 
7737     case BUILT_IN_NONLOCAL_GOTO:
7738       target = expand_builtin_nonlocal_goto (exp);
7739       if (target)
7740 	return target;
7741       break;
7742 
7743       /* This updates the setjmp buffer that is its argument with the value
7744 	 of the current stack pointer.  */
7745     case BUILT_IN_UPDATE_SETJMP_BUF:
7746       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7747 	{
7748 	  rtx buf_addr
7749 	    = expand_normal (CALL_EXPR_ARG (exp, 0));
7750 
7751 	  expand_builtin_update_setjmp_buf (buf_addr);
7752 	  return const0_rtx;
7753 	}
7754       break;
7755 
7756     case BUILT_IN_TRAP:
7757       expand_builtin_trap ();
7758       return const0_rtx;
7759 
7760     case BUILT_IN_UNREACHABLE:
7761       expand_builtin_unreachable ();
7762       return const0_rtx;
7763 
7764     CASE_FLT_FN (BUILT_IN_SIGNBIT):
7765     case BUILT_IN_SIGNBITD32:
7766     case BUILT_IN_SIGNBITD64:
7767     case BUILT_IN_SIGNBITD128:
7768       target = expand_builtin_signbit (exp, target);
7769       if (target)
7770 	return target;
7771       break;
7772 
7773       /* Various hooks for the DWARF 2 __throw routine.  */
7774     case BUILT_IN_UNWIND_INIT:
7775       expand_builtin_unwind_init ();
7776       return const0_rtx;
7777     case BUILT_IN_DWARF_CFA:
7778       return virtual_cfa_rtx;
7779 #ifdef DWARF2_UNWIND_INFO
7780     case BUILT_IN_DWARF_SP_COLUMN:
7781       return expand_builtin_dwarf_sp_column ();
7782     case BUILT_IN_INIT_DWARF_REG_SIZES:
7783       expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7784       return const0_rtx;
7785 #endif
7786     case BUILT_IN_FROB_RETURN_ADDR:
7787       return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7788     case BUILT_IN_EXTRACT_RETURN_ADDR:
7789       return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7790     case BUILT_IN_EH_RETURN:
7791       expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7792 				CALL_EXPR_ARG (exp, 1));
7793       return const0_rtx;
7794     case BUILT_IN_EH_RETURN_DATA_REGNO:
7795       return expand_builtin_eh_return_data_regno (exp);
7796     case BUILT_IN_EXTEND_POINTER:
7797       return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7798     case BUILT_IN_EH_POINTER:
7799       return expand_builtin_eh_pointer (exp);
7800     case BUILT_IN_EH_FILTER:
7801       return expand_builtin_eh_filter (exp);
7802     case BUILT_IN_EH_COPY_VALUES:
7803       return expand_builtin_eh_copy_values (exp);
7804 
7805     case BUILT_IN_VA_START:
7806       return expand_builtin_va_start (exp);
7807     case BUILT_IN_VA_END:
7808       return expand_builtin_va_end (exp);
7809     case BUILT_IN_VA_COPY:
7810       return expand_builtin_va_copy (exp);
7811     case BUILT_IN_EXPECT:
7812       return expand_builtin_expect (exp, target);
7813     case BUILT_IN_EXPECT_WITH_PROBABILITY:
7814       return expand_builtin_expect_with_probability (exp, target);
7815     case BUILT_IN_ASSUME_ALIGNED:
7816       return expand_builtin_assume_aligned (exp, target);
7817     case BUILT_IN_PREFETCH:
7818       expand_builtin_prefetch (exp);
7819       return const0_rtx;
7820 
7821     case BUILT_IN_INIT_TRAMPOLINE:
7822       return expand_builtin_init_trampoline (exp, true);
7823     case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7824       return expand_builtin_init_trampoline (exp, false);
7825     case BUILT_IN_ADJUST_TRAMPOLINE:
7826       return expand_builtin_adjust_trampoline (exp);
7827 
7828     case BUILT_IN_INIT_DESCRIPTOR:
7829       return expand_builtin_init_descriptor (exp);
7830     case BUILT_IN_ADJUST_DESCRIPTOR:
7831       return expand_builtin_adjust_descriptor (exp);
7832 
7833     case BUILT_IN_FORK:
7834     case BUILT_IN_EXECL:
7835     case BUILT_IN_EXECV:
7836     case BUILT_IN_EXECLP:
7837     case BUILT_IN_EXECLE:
7838     case BUILT_IN_EXECVP:
7839     case BUILT_IN_EXECVE:
7840       target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7841       if (target)
7842 	return target;
7843       break;
7844 
7845     case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7846     case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7847     case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7848     case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7849     case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7850       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7851       target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7852       if (target)
7853 	return target;
7854       break;
7855 
7856     case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7857     case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7858     case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7859     case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7860     case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7861       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7862       target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7863       if (target)
7864 	return target;
7865       break;
7866 
7867     case BUILT_IN_SYNC_FETCH_AND_OR_1:
7868     case BUILT_IN_SYNC_FETCH_AND_OR_2:
7869     case BUILT_IN_SYNC_FETCH_AND_OR_4:
7870     case BUILT_IN_SYNC_FETCH_AND_OR_8:
7871     case BUILT_IN_SYNC_FETCH_AND_OR_16:
7872       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7873       target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7874       if (target)
7875 	return target;
7876       break;
7877 
7878     case BUILT_IN_SYNC_FETCH_AND_AND_1:
7879     case BUILT_IN_SYNC_FETCH_AND_AND_2:
7880     case BUILT_IN_SYNC_FETCH_AND_AND_4:
7881     case BUILT_IN_SYNC_FETCH_AND_AND_8:
7882     case BUILT_IN_SYNC_FETCH_AND_AND_16:
7883       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7884       target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7885       if (target)
7886 	return target;
7887       break;
7888 
7889     case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7890     case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7891     case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7892     case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7893     case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7894       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7895       target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7896       if (target)
7897 	return target;
7898       break;
7899 
7900     case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7901     case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7902     case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7903     case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7904     case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7905       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7906       target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7907       if (target)
7908 	return target;
7909       break;
7910 
7911     case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7912     case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7913     case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7914     case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7915     case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7916       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7917       target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7918       if (target)
7919 	return target;
7920       break;
7921 
7922     case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7923     case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7924     case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7925     case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7926     case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7927       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7928       target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7929       if (target)
7930 	return target;
7931       break;
7932 
7933     case BUILT_IN_SYNC_OR_AND_FETCH_1:
7934     case BUILT_IN_SYNC_OR_AND_FETCH_2:
7935     case BUILT_IN_SYNC_OR_AND_FETCH_4:
7936     case BUILT_IN_SYNC_OR_AND_FETCH_8:
7937     case BUILT_IN_SYNC_OR_AND_FETCH_16:
7938       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7939       target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7940       if (target)
7941 	return target;
7942       break;
7943 
7944     case BUILT_IN_SYNC_AND_AND_FETCH_1:
7945     case BUILT_IN_SYNC_AND_AND_FETCH_2:
7946     case BUILT_IN_SYNC_AND_AND_FETCH_4:
7947     case BUILT_IN_SYNC_AND_AND_FETCH_8:
7948     case BUILT_IN_SYNC_AND_AND_FETCH_16:
7949       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7950       target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7951       if (target)
7952 	return target;
7953       break;
7954 
7955     case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7956     case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7957     case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7958     case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7959     case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7960       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7961       target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7962       if (target)
7963 	return target;
7964       break;
7965 
7966     case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7967     case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7968     case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7969     case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7970     case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7971       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7972       target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7973       if (target)
7974 	return target;
7975       break;
7976 
7977     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7978     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7979     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7980     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7981     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7982       if (mode == VOIDmode)
7983 	mode = TYPE_MODE (boolean_type_node);
7984       if (!target || !register_operand (target, mode))
7985 	target = gen_reg_rtx (mode);
7986 
7987       mode = get_builtin_sync_mode
7988 				(fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7989       target = expand_builtin_compare_and_swap (mode, exp, true, target);
7990       if (target)
7991 	return target;
7992       break;
7993 
7994     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7995     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7996     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7997     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7998     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7999       mode = get_builtin_sync_mode
8000 				(fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
8001       target = expand_builtin_compare_and_swap (mode, exp, false, target);
8002       if (target)
8003 	return target;
8004       break;
8005 
8006     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
8007     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
8008     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
8009     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
8010     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
8011       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
8012       target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
8013       if (target)
8014 	return target;
8015       break;
8016 
8017     case BUILT_IN_SYNC_LOCK_RELEASE_1:
8018     case BUILT_IN_SYNC_LOCK_RELEASE_2:
8019     case BUILT_IN_SYNC_LOCK_RELEASE_4:
8020     case BUILT_IN_SYNC_LOCK_RELEASE_8:
8021     case BUILT_IN_SYNC_LOCK_RELEASE_16:
8022       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
8023       expand_builtin_sync_lock_release (mode, exp);
8024       return const0_rtx;
8025 
8026     case BUILT_IN_SYNC_SYNCHRONIZE:
8027       expand_builtin_sync_synchronize ();
8028       return const0_rtx;
8029 
8030     case BUILT_IN_ATOMIC_EXCHANGE_1:
8031     case BUILT_IN_ATOMIC_EXCHANGE_2:
8032     case BUILT_IN_ATOMIC_EXCHANGE_4:
8033     case BUILT_IN_ATOMIC_EXCHANGE_8:
8034     case BUILT_IN_ATOMIC_EXCHANGE_16:
8035       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
8036       target = expand_builtin_atomic_exchange (mode, exp, target);
8037       if (target)
8038 	return target;
8039       break;
8040 
8041     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
8042     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
8043     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
8044     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
8045     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
8046       {
8047 	unsigned int nargs, z;
8048 	vec<tree, va_gc> *vec;
8049 
8050 	mode =
8051 	    get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
8052 	target = expand_builtin_atomic_compare_exchange (mode, exp, target);
8053 	if (target)
8054 	  return target;
8055 
8056 	/* If this is turned into an external library call, the weak parameter
8057 	   must be dropped to match the expected parameter list.  */
8058 	nargs = call_expr_nargs (exp);
8059 	vec_alloc (vec, nargs - 1);
8060 	for (z = 0; z < 3; z++)
8061 	  vec->quick_push (CALL_EXPR_ARG (exp, z));
8062 	/* Skip the boolean weak parameter.  */
8063 	for (z = 4; z < 6; z++)
8064 	  vec->quick_push (CALL_EXPR_ARG (exp, z));
8065 	exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
8066 	break;
8067       }
8068 
8069     case BUILT_IN_ATOMIC_LOAD_1:
8070     case BUILT_IN_ATOMIC_LOAD_2:
8071     case BUILT_IN_ATOMIC_LOAD_4:
8072     case BUILT_IN_ATOMIC_LOAD_8:
8073     case BUILT_IN_ATOMIC_LOAD_16:
8074       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
8075       target = expand_builtin_atomic_load (mode, exp, target);
8076       if (target)
8077 	return target;
8078       break;
8079 
8080     case BUILT_IN_ATOMIC_STORE_1:
8081     case BUILT_IN_ATOMIC_STORE_2:
8082     case BUILT_IN_ATOMIC_STORE_4:
8083     case BUILT_IN_ATOMIC_STORE_8:
8084     case BUILT_IN_ATOMIC_STORE_16:
8085       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
8086       target = expand_builtin_atomic_store (mode, exp);
8087       if (target)
8088 	return const0_rtx;
8089       break;
8090 
8091     case BUILT_IN_ATOMIC_ADD_FETCH_1:
8092     case BUILT_IN_ATOMIC_ADD_FETCH_2:
8093     case BUILT_IN_ATOMIC_ADD_FETCH_4:
8094     case BUILT_IN_ATOMIC_ADD_FETCH_8:
8095     case BUILT_IN_ATOMIC_ADD_FETCH_16:
8096       {
8097 	enum built_in_function lib;
8098 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
8099 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
8100 				       (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
8101 	target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
8102 						 ignore, lib);
8103 	if (target)
8104 	  return target;
8105 	break;
8106       }
8107     case BUILT_IN_ATOMIC_SUB_FETCH_1:
8108     case BUILT_IN_ATOMIC_SUB_FETCH_2:
8109     case BUILT_IN_ATOMIC_SUB_FETCH_4:
8110     case BUILT_IN_ATOMIC_SUB_FETCH_8:
8111     case BUILT_IN_ATOMIC_SUB_FETCH_16:
8112       {
8113 	enum built_in_function lib;
8114 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
8115 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
8116 				       (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
8117 	target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
8118 						 ignore, lib);
8119 	if (target)
8120 	  return target;
8121 	break;
8122       }
8123     case BUILT_IN_ATOMIC_AND_FETCH_1:
8124     case BUILT_IN_ATOMIC_AND_FETCH_2:
8125     case BUILT_IN_ATOMIC_AND_FETCH_4:
8126     case BUILT_IN_ATOMIC_AND_FETCH_8:
8127     case BUILT_IN_ATOMIC_AND_FETCH_16:
8128       {
8129 	enum built_in_function lib;
8130 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
8131 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
8132 				       (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
8133 	target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
8134 						 ignore, lib);
8135 	if (target)
8136 	  return target;
8137 	break;
8138       }
8139     case BUILT_IN_ATOMIC_NAND_FETCH_1:
8140     case BUILT_IN_ATOMIC_NAND_FETCH_2:
8141     case BUILT_IN_ATOMIC_NAND_FETCH_4:
8142     case BUILT_IN_ATOMIC_NAND_FETCH_8:
8143     case BUILT_IN_ATOMIC_NAND_FETCH_16:
8144       {
8145 	enum built_in_function lib;
8146 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
8147 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
8148 				       (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
8149 	target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
8150 						 ignore, lib);
8151 	if (target)
8152 	  return target;
8153 	break;
8154       }
8155     case BUILT_IN_ATOMIC_XOR_FETCH_1:
8156     case BUILT_IN_ATOMIC_XOR_FETCH_2:
8157     case BUILT_IN_ATOMIC_XOR_FETCH_4:
8158     case BUILT_IN_ATOMIC_XOR_FETCH_8:
8159     case BUILT_IN_ATOMIC_XOR_FETCH_16:
8160       {
8161 	enum built_in_function lib;
8162 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
8163 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
8164 				       (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
8165 	target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
8166 						 ignore, lib);
8167 	if (target)
8168 	  return target;
8169 	break;
8170       }
8171     case BUILT_IN_ATOMIC_OR_FETCH_1:
8172     case BUILT_IN_ATOMIC_OR_FETCH_2:
8173     case BUILT_IN_ATOMIC_OR_FETCH_4:
8174     case BUILT_IN_ATOMIC_OR_FETCH_8:
8175     case BUILT_IN_ATOMIC_OR_FETCH_16:
8176       {
8177 	enum built_in_function lib;
8178 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
8179 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
8180 				       (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
8181 	target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
8182 						 ignore, lib);
8183 	if (target)
8184 	  return target;
8185 	break;
8186       }
8187     case BUILT_IN_ATOMIC_FETCH_ADD_1:
8188     case BUILT_IN_ATOMIC_FETCH_ADD_2:
8189     case BUILT_IN_ATOMIC_FETCH_ADD_4:
8190     case BUILT_IN_ATOMIC_FETCH_ADD_8:
8191     case BUILT_IN_ATOMIC_FETCH_ADD_16:
8192       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
8193       target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
8194 					       ignore, BUILT_IN_NONE);
8195       if (target)
8196 	return target;
8197       break;
8198 
8199     case BUILT_IN_ATOMIC_FETCH_SUB_1:
8200     case BUILT_IN_ATOMIC_FETCH_SUB_2:
8201     case BUILT_IN_ATOMIC_FETCH_SUB_4:
8202     case BUILT_IN_ATOMIC_FETCH_SUB_8:
8203     case BUILT_IN_ATOMIC_FETCH_SUB_16:
8204       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
8205       target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
8206 					       ignore, BUILT_IN_NONE);
8207       if (target)
8208 	return target;
8209       break;
8210 
8211     case BUILT_IN_ATOMIC_FETCH_AND_1:
8212     case BUILT_IN_ATOMIC_FETCH_AND_2:
8213     case BUILT_IN_ATOMIC_FETCH_AND_4:
8214     case BUILT_IN_ATOMIC_FETCH_AND_8:
8215     case BUILT_IN_ATOMIC_FETCH_AND_16:
8216       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
8217       target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
8218 					       ignore, BUILT_IN_NONE);
8219       if (target)
8220 	return target;
8221       break;
8222 
8223     case BUILT_IN_ATOMIC_FETCH_NAND_1:
8224     case BUILT_IN_ATOMIC_FETCH_NAND_2:
8225     case BUILT_IN_ATOMIC_FETCH_NAND_4:
8226     case BUILT_IN_ATOMIC_FETCH_NAND_8:
8227     case BUILT_IN_ATOMIC_FETCH_NAND_16:
8228       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
8229       target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
8230 					       ignore, BUILT_IN_NONE);
8231       if (target)
8232 	return target;
8233       break;
8234 
8235     case BUILT_IN_ATOMIC_FETCH_XOR_1:
8236     case BUILT_IN_ATOMIC_FETCH_XOR_2:
8237     case BUILT_IN_ATOMIC_FETCH_XOR_4:
8238     case BUILT_IN_ATOMIC_FETCH_XOR_8:
8239     case BUILT_IN_ATOMIC_FETCH_XOR_16:
8240       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
8241       target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
8242 					       ignore, BUILT_IN_NONE);
8243       if (target)
8244 	return target;
8245       break;
8246 
8247     case BUILT_IN_ATOMIC_FETCH_OR_1:
8248     case BUILT_IN_ATOMIC_FETCH_OR_2:
8249     case BUILT_IN_ATOMIC_FETCH_OR_4:
8250     case BUILT_IN_ATOMIC_FETCH_OR_8:
8251     case BUILT_IN_ATOMIC_FETCH_OR_16:
8252       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
8253       target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
8254 					       ignore, BUILT_IN_NONE);
8255       if (target)
8256 	return target;
8257       break;
8258 
8259     case BUILT_IN_ATOMIC_TEST_AND_SET:
8260       return expand_builtin_atomic_test_and_set (exp, target);
8261 
8262     case BUILT_IN_ATOMIC_CLEAR:
8263       return expand_builtin_atomic_clear (exp);
8264 
8265     case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8266       return expand_builtin_atomic_always_lock_free (exp);
8267 
8268     case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8269       target = expand_builtin_atomic_is_lock_free (exp);
8270       if (target)
8271         return target;
8272       break;
8273 
8274     case BUILT_IN_ATOMIC_THREAD_FENCE:
8275       expand_builtin_atomic_thread_fence (exp);
8276       return const0_rtx;
8277 
8278     case BUILT_IN_ATOMIC_SIGNAL_FENCE:
8279       expand_builtin_atomic_signal_fence (exp);
8280       return const0_rtx;
8281 
8282     case BUILT_IN_OBJECT_SIZE:
8283       return expand_builtin_object_size (exp);
8284 
8285     case BUILT_IN_MEMCPY_CHK:
8286     case BUILT_IN_MEMPCPY_CHK:
8287     case BUILT_IN_MEMMOVE_CHK:
8288     case BUILT_IN_MEMSET_CHK:
8289       target = expand_builtin_memory_chk (exp, target, mode, fcode);
8290       if (target)
8291 	return target;
8292       break;
8293 
8294     case BUILT_IN_STRCPY_CHK:
8295     case BUILT_IN_STPCPY_CHK:
8296     case BUILT_IN_STRNCPY_CHK:
8297     case BUILT_IN_STPNCPY_CHK:
8298     case BUILT_IN_STRCAT_CHK:
8299     case BUILT_IN_STRNCAT_CHK:
8300     case BUILT_IN_SNPRINTF_CHK:
8301     case BUILT_IN_VSNPRINTF_CHK:
8302       maybe_emit_chk_warning (exp, fcode);
8303       break;
8304 
8305     case BUILT_IN_SPRINTF_CHK:
8306     case BUILT_IN_VSPRINTF_CHK:
8307       maybe_emit_sprintf_chk_warning (exp, fcode);
8308       break;
8309 
8310     case BUILT_IN_FREE:
8311       if (warn_free_nonheap_object)
8312 	maybe_emit_free_warning (exp);
8313       break;
8314 
8315     case BUILT_IN_THREAD_POINTER:
8316       return expand_builtin_thread_pointer (exp, target);
8317 
8318     case BUILT_IN_SET_THREAD_POINTER:
8319       expand_builtin_set_thread_pointer (exp);
8320       return const0_rtx;
8321 
8322     case BUILT_IN_ACC_ON_DEVICE:
8323       /* Do library call, if we failed to expand the builtin when
8324 	 folding.  */
8325       break;
8326 
8327     case BUILT_IN_GOACC_PARLEVEL_ID:
8328     case BUILT_IN_GOACC_PARLEVEL_SIZE:
8329       return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
8330 
8331     case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
8332       return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
8333 
8334     case BUILT_IN_SPECULATION_SAFE_VALUE_1:
8335     case BUILT_IN_SPECULATION_SAFE_VALUE_2:
8336     case BUILT_IN_SPECULATION_SAFE_VALUE_4:
8337     case BUILT_IN_SPECULATION_SAFE_VALUE_8:
8338     case BUILT_IN_SPECULATION_SAFE_VALUE_16:
8339       mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
8340       return expand_speculation_safe_value (mode, exp, target, ignore);
8341 
8342     default:	/* just do library call, if unknown builtin */
8343       break;
8344     }
8345 
8346   /* The switch statement above can drop through to cause the function
8347      to be called normally.  */
8348   return expand_call (exp, target, ignore);
8349 }
8350 
8351 /* Determine whether a tree node represents a call to a built-in
8352    function.  If the tree T is a call to a built-in function with
8353    the right number of arguments of the appropriate types, return
8354    the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
8355    Otherwise the return value is END_BUILTINS.  */
8356 
8357 enum built_in_function
builtin_mathfn_code(const_tree t)8358 builtin_mathfn_code (const_tree t)
8359 {
8360   const_tree fndecl, arg, parmlist;
8361   const_tree argtype, parmtype;
8362   const_call_expr_arg_iterator iter;
8363 
8364   if (TREE_CODE (t) != CALL_EXPR)
8365     return END_BUILTINS;
8366 
8367   fndecl = get_callee_fndecl (t);
8368   if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
8369       return END_BUILTINS;
8370 
8371   parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
8372   init_const_call_expr_arg_iterator (t, &iter);
8373   for (; parmlist; parmlist = TREE_CHAIN (parmlist))
8374     {
8375       /* If a function doesn't take a variable number of arguments,
8376 	 the last element in the list will have type `void'.  */
8377       parmtype = TREE_VALUE (parmlist);
8378       if (VOID_TYPE_P (parmtype))
8379 	{
8380 	  if (more_const_call_expr_args_p (&iter))
8381 	    return END_BUILTINS;
8382 	  return DECL_FUNCTION_CODE (fndecl);
8383 	}
8384 
8385       if (! more_const_call_expr_args_p (&iter))
8386 	return END_BUILTINS;
8387 
8388       arg = next_const_call_expr_arg (&iter);
8389       argtype = TREE_TYPE (arg);
8390 
8391       if (SCALAR_FLOAT_TYPE_P (parmtype))
8392 	{
8393 	  if (! SCALAR_FLOAT_TYPE_P (argtype))
8394 	    return END_BUILTINS;
8395 	}
8396       else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8397 	{
8398 	  if (! COMPLEX_FLOAT_TYPE_P (argtype))
8399 	    return END_BUILTINS;
8400 	}
8401       else if (POINTER_TYPE_P (parmtype))
8402 	{
8403 	  if (! POINTER_TYPE_P (argtype))
8404 	    return END_BUILTINS;
8405 	}
8406       else if (INTEGRAL_TYPE_P (parmtype))
8407 	{
8408 	  if (! INTEGRAL_TYPE_P (argtype))
8409 	    return END_BUILTINS;
8410 	}
8411       else
8412 	return END_BUILTINS;
8413     }
8414 
8415   /* Variable-length argument list.  */
8416   return DECL_FUNCTION_CODE (fndecl);
8417 }
8418 
8419 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8420    evaluate to a constant.  */
8421 
8422 static tree
fold_builtin_constant_p(tree arg)8423 fold_builtin_constant_p (tree arg)
8424 {
8425   /* We return 1 for a numeric type that's known to be a constant
8426      value at compile-time or for an aggregate type that's a
8427      literal constant.  */
8428   STRIP_NOPS (arg);
8429 
8430   /* If we know this is a constant, emit the constant of one.  */
8431   if (CONSTANT_CLASS_P (arg)
8432       || (TREE_CODE (arg) == CONSTRUCTOR
8433 	  && TREE_CONSTANT (arg)))
8434     return integer_one_node;
8435   if (TREE_CODE (arg) == ADDR_EXPR)
8436     {
8437        tree op = TREE_OPERAND (arg, 0);
8438        if (TREE_CODE (op) == STRING_CST
8439 	   || (TREE_CODE (op) == ARRAY_REF
8440 	       && integer_zerop (TREE_OPERAND (op, 1))
8441 	       && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8442 	 return integer_one_node;
8443     }
8444 
8445   /* If this expression has side effects, show we don't know it to be a
8446      constant.  Likewise if it's a pointer or aggregate type since in
8447      those case we only want literals, since those are only optimized
8448      when generating RTL, not later.
8449      And finally, if we are compiling an initializer, not code, we
8450      need to return a definite result now; there's not going to be any
8451      more optimization done.  */
8452   if (TREE_SIDE_EFFECTS (arg)
8453       || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8454       || POINTER_TYPE_P (TREE_TYPE (arg))
8455       || cfun == 0
8456       || folding_initializer
8457       || force_folding_builtin_constant_p)
8458     return integer_zero_node;
8459 
8460   return NULL_TREE;
8461 }
8462 
8463 /* Create builtin_expect or builtin_expect_with_probability
8464    with PRED and EXPECTED as its arguments and return it as a truthvalue.
8465    Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8466    builtin_expect_with_probability instead uses third argument as PROBABILITY
8467    value.  */
8468 
8469 static tree
build_builtin_expect_predicate(location_t loc,tree pred,tree expected,tree predictor,tree probability)8470 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8471 				tree predictor, tree probability)
8472 {
8473   tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8474 
8475   fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8476 			      : BUILT_IN_EXPECT_WITH_PROBABILITY);
8477   arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8478   ret_type = TREE_TYPE (TREE_TYPE (fn));
8479   pred_type = TREE_VALUE (arg_types);
8480   expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8481 
8482   pred = fold_convert_loc (loc, pred_type, pred);
8483   expected = fold_convert_loc (loc, expected_type, expected);
8484 
8485   if (probability)
8486     call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8487   else
8488     call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8489 				     predictor);
8490 
8491   return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8492 		 build_int_cst (ret_type, 0));
8493 }
8494 
8495 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3.  Return
8496    NULL_TREE if no simplification is possible.  */
8497 
8498 tree
fold_builtin_expect(location_t loc,tree arg0,tree arg1,tree arg2,tree arg3)8499 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8500 		     tree arg3)
8501 {
8502   tree inner, fndecl, inner_arg0;
8503   enum tree_code code;
8504 
8505   /* Distribute the expected value over short-circuiting operators.
8506      See through the cast from truthvalue_type_node to long.  */
8507   inner_arg0 = arg0;
8508   while (CONVERT_EXPR_P (inner_arg0)
8509 	 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8510 	 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8511     inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8512 
8513   /* If this is a builtin_expect within a builtin_expect keep the
8514      inner one.  See through a comparison against a constant.  It
8515      might have been added to create a thruthvalue.  */
8516   inner = inner_arg0;
8517 
8518   if (COMPARISON_CLASS_P (inner)
8519       && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8520     inner = TREE_OPERAND (inner, 0);
8521 
8522   if (TREE_CODE (inner) == CALL_EXPR
8523       && (fndecl = get_callee_fndecl (inner))
8524       && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8525 	  || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8526     return arg0;
8527 
8528   inner = inner_arg0;
8529   code = TREE_CODE (inner);
8530   if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8531     {
8532       tree op0 = TREE_OPERAND (inner, 0);
8533       tree op1 = TREE_OPERAND (inner, 1);
8534       arg1 = save_expr (arg1);
8535 
8536       op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8537       op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8538       inner = build2 (code, TREE_TYPE (inner), op0, op1);
8539 
8540       return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8541     }
8542 
8543   /* If the argument isn't invariant then there's nothing else we can do.  */
8544   if (!TREE_CONSTANT (inner_arg0))
8545     return NULL_TREE;
8546 
8547   /* If we expect that a comparison against the argument will fold to
8548      a constant return the constant.  In practice, this means a true
8549      constant or the address of a non-weak symbol.  */
8550   inner = inner_arg0;
8551   STRIP_NOPS (inner);
8552   if (TREE_CODE (inner) == ADDR_EXPR)
8553     {
8554       do
8555 	{
8556 	  inner = TREE_OPERAND (inner, 0);
8557 	}
8558       while (TREE_CODE (inner) == COMPONENT_REF
8559 	     || TREE_CODE (inner) == ARRAY_REF);
8560       if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8561 	return NULL_TREE;
8562     }
8563 
8564   /* Otherwise, ARG0 already has the proper type for the return value.  */
8565   return arg0;
8566 }
8567 
8568 /* Fold a call to __builtin_classify_type with argument ARG.  */
8569 
8570 static tree
fold_builtin_classify_type(tree arg)8571 fold_builtin_classify_type (tree arg)
8572 {
8573   if (arg == 0)
8574     return build_int_cst (integer_type_node, no_type_class);
8575 
8576   return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8577 }
8578 
8579 /* Fold a call to __builtin_strlen with argument ARG.  */
8580 
8581 static tree
fold_builtin_strlen(location_t loc,tree type,tree arg)8582 fold_builtin_strlen (location_t loc, tree type, tree arg)
8583 {
8584   if (!validate_arg (arg, POINTER_TYPE))
8585     return NULL_TREE;
8586   else
8587     {
8588       c_strlen_data lendata = { };
8589       tree len = c_strlen (arg, 0, &lendata);
8590 
8591       if (len)
8592 	return fold_convert_loc (loc, type, len);
8593 
8594       if (!lendata.decl)
8595 	c_strlen (arg, 1, &lendata);
8596 
8597       if (lendata.decl)
8598 	{
8599 	  if (EXPR_HAS_LOCATION (arg))
8600 	    loc = EXPR_LOCATION (arg);
8601 	  else if (loc == UNKNOWN_LOCATION)
8602 	    loc = input_location;
8603 	  warn_string_no_nul (loc, "strlen", arg, lendata.decl);
8604 	}
8605 
8606       return NULL_TREE;
8607     }
8608 }
8609 
8610 /* Fold a call to __builtin_inf or __builtin_huge_val.  */
8611 
8612 static tree
fold_builtin_inf(location_t loc,tree type,int warn)8613 fold_builtin_inf (location_t loc, tree type, int warn)
8614 {
8615   REAL_VALUE_TYPE real;
8616 
8617   /* __builtin_inff is intended to be usable to define INFINITY on all
8618      targets.  If an infinity is not available, INFINITY expands "to a
8619      positive constant of type float that overflows at translation
8620      time", footnote "In this case, using INFINITY will violate the
8621      constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8622      Thus we pedwarn to ensure this constraint violation is
8623      diagnosed.  */
8624   if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8625     pedwarn (loc, 0, "target format does not support infinity");
8626 
8627   real_inf (&real);
8628   return build_real (type, real);
8629 }
8630 
8631 /* Fold function call to builtin sincos, sincosf, or sincosl.  Return
8632    NULL_TREE if no simplification can be made.  */
8633 
8634 static tree
fold_builtin_sincos(location_t loc,tree arg0,tree arg1,tree arg2)8635 fold_builtin_sincos (location_t loc,
8636 		     tree arg0, tree arg1, tree arg2)
8637 {
8638   tree type;
8639   tree fndecl, call = NULL_TREE;
8640 
8641   if (!validate_arg (arg0, REAL_TYPE)
8642       || !validate_arg (arg1, POINTER_TYPE)
8643       || !validate_arg (arg2, POINTER_TYPE))
8644     return NULL_TREE;
8645 
8646   type = TREE_TYPE (arg0);
8647 
8648   /* Calculate the result when the argument is a constant.  */
8649   built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8650   if (fn == END_BUILTINS)
8651     return NULL_TREE;
8652 
8653   /* Canonicalize sincos to cexpi.  */
8654   if (TREE_CODE (arg0) == REAL_CST)
8655     {
8656       tree complex_type = build_complex_type (type);
8657       call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8658     }
8659   if (!call)
8660     {
8661       if (!targetm.libc_has_function (function_c99_math_complex)
8662 	  || !builtin_decl_implicit_p (fn))
8663 	return NULL_TREE;
8664       fndecl = builtin_decl_explicit (fn);
8665       call = build_call_expr_loc (loc, fndecl, 1, arg0);
8666       call = builtin_save_expr (call);
8667     }
8668 
8669   tree ptype = build_pointer_type (type);
8670   arg1 = fold_convert (ptype, arg1);
8671   arg2 = fold_convert (ptype, arg2);
8672   return build2 (COMPOUND_EXPR, void_type_node,
8673 		 build2 (MODIFY_EXPR, void_type_node,
8674 			 build_fold_indirect_ref_loc (loc, arg1),
8675 			 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8676 		 build2 (MODIFY_EXPR, void_type_node,
8677 			 build_fold_indirect_ref_loc (loc, arg2),
8678 			 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8679 }
8680 
8681 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8682    Return NULL_TREE if no simplification can be made.  */
8683 
8684 static tree
fold_builtin_memcmp(location_t loc,tree arg1,tree arg2,tree len)8685 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8686 {
8687   if (!validate_arg (arg1, POINTER_TYPE)
8688       || !validate_arg (arg2, POINTER_TYPE)
8689       || !validate_arg (len, INTEGER_TYPE))
8690     return NULL_TREE;
8691 
8692   /* If the LEN parameter is zero, return zero.  */
8693   if (integer_zerop (len))
8694     return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8695 			      arg1, arg2);
8696 
8697   /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
8698   if (operand_equal_p (arg1, arg2, 0))
8699     return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8700 
8701   /* If len parameter is one, return an expression corresponding to
8702      (*(const unsigned char*)arg1 - (const unsigned char*)arg2).  */
8703   if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8704     {
8705       tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8706       tree cst_uchar_ptr_node
8707 	= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8708 
8709       tree ind1
8710 	= fold_convert_loc (loc, integer_type_node,
8711 			    build1 (INDIRECT_REF, cst_uchar_node,
8712 				    fold_convert_loc (loc,
8713 						      cst_uchar_ptr_node,
8714 						      arg1)));
8715       tree ind2
8716 	= fold_convert_loc (loc, integer_type_node,
8717 			    build1 (INDIRECT_REF, cst_uchar_node,
8718 				    fold_convert_loc (loc,
8719 						      cst_uchar_ptr_node,
8720 						      arg2)));
8721       return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8722     }
8723 
8724   return NULL_TREE;
8725 }
8726 
8727 /* Fold a call to builtin isascii with argument ARG.  */
8728 
8729 static tree
fold_builtin_isascii(location_t loc,tree arg)8730 fold_builtin_isascii (location_t loc, tree arg)
8731 {
8732   if (!validate_arg (arg, INTEGER_TYPE))
8733     return NULL_TREE;
8734   else
8735     {
8736       /* Transform isascii(c) -> ((c & ~0x7f) == 0).  */
8737       arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8738 			 build_int_cst (integer_type_node,
8739 					~ (unsigned HOST_WIDE_INT) 0x7f));
8740       return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8741 			      arg, integer_zero_node);
8742     }
8743 }
8744 
8745 /* Fold a call to builtin toascii with argument ARG.  */
8746 
8747 static tree
fold_builtin_toascii(location_t loc,tree arg)8748 fold_builtin_toascii (location_t loc, tree arg)
8749 {
8750   if (!validate_arg (arg, INTEGER_TYPE))
8751     return NULL_TREE;
8752 
8753   /* Transform toascii(c) -> (c & 0x7f).  */
8754   return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8755 			  build_int_cst (integer_type_node, 0x7f));
8756 }
8757 
8758 /* Fold a call to builtin isdigit with argument ARG.  */
8759 
8760 static tree
fold_builtin_isdigit(location_t loc,tree arg)8761 fold_builtin_isdigit (location_t loc, tree arg)
8762 {
8763   if (!validate_arg (arg, INTEGER_TYPE))
8764     return NULL_TREE;
8765   else
8766     {
8767       /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9.  */
8768       /* According to the C standard, isdigit is unaffected by locale.
8769 	 However, it definitely is affected by the target character set.  */
8770       unsigned HOST_WIDE_INT target_digit0
8771 	= lang_hooks.to_target_charset ('0');
8772 
8773       if (target_digit0 == 0)
8774 	return NULL_TREE;
8775 
8776       arg = fold_convert_loc (loc, unsigned_type_node, arg);
8777       arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8778 			 build_int_cst (unsigned_type_node, target_digit0));
8779       return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8780 			  build_int_cst (unsigned_type_node, 9));
8781     }
8782 }
8783 
8784 /* Fold a call to fabs, fabsf or fabsl with argument ARG.  */
8785 
8786 static tree
fold_builtin_fabs(location_t loc,tree arg,tree type)8787 fold_builtin_fabs (location_t loc, tree arg, tree type)
8788 {
8789   if (!validate_arg (arg, REAL_TYPE))
8790     return NULL_TREE;
8791 
8792   arg = fold_convert_loc (loc, type, arg);
8793   return fold_build1_loc (loc, ABS_EXPR, type, arg);
8794 }
8795 
8796 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG.  */
8797 
8798 static tree
fold_builtin_abs(location_t loc,tree arg,tree type)8799 fold_builtin_abs (location_t loc, tree arg, tree type)
8800 {
8801   if (!validate_arg (arg, INTEGER_TYPE))
8802     return NULL_TREE;
8803 
8804   arg = fold_convert_loc (loc, type, arg);
8805   return fold_build1_loc (loc, ABS_EXPR, type, arg);
8806 }
8807 
8808 /* Fold a call to builtin carg(a+bi) -> atan2(b,a).  */
8809 
8810 static tree
fold_builtin_carg(location_t loc,tree arg,tree type)8811 fold_builtin_carg (location_t loc, tree arg, tree type)
8812 {
8813   if (validate_arg (arg, COMPLEX_TYPE)
8814       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8815     {
8816       tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8817 
8818       if (atan2_fn)
8819         {
8820   	  tree new_arg = builtin_save_expr (arg);
8821 	  tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8822 	  tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8823 	  return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8824 	}
8825     }
8826 
8827   return NULL_TREE;
8828 }
8829 
8830 /* Fold a call to builtin frexp, we can assume the base is 2.  */
8831 
8832 static tree
fold_builtin_frexp(location_t loc,tree arg0,tree arg1,tree rettype)8833 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8834 {
8835   if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8836     return NULL_TREE;
8837 
8838   STRIP_NOPS (arg0);
8839 
8840   if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8841     return NULL_TREE;
8842 
8843   arg1 = build_fold_indirect_ref_loc (loc, arg1);
8844 
8845   /* Proceed if a valid pointer type was passed in.  */
8846   if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8847     {
8848       const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8849       tree frac, exp;
8850 
8851       switch (value->cl)
8852       {
8853       case rvc_zero:
8854 	/* For +-0, return (*exp = 0, +-0).  */
8855 	exp = integer_zero_node;
8856 	frac = arg0;
8857 	break;
8858       case rvc_nan:
8859       case rvc_inf:
8860 	/* For +-NaN or +-Inf, *exp is unspecified, return arg0.  */
8861 	return omit_one_operand_loc (loc, rettype, arg0, arg1);
8862       case rvc_normal:
8863 	{
8864 	  /* Since the frexp function always expects base 2, and in
8865 	     GCC normalized significands are already in the range
8866 	     [0.5, 1.0), we have exactly what frexp wants.  */
8867 	  REAL_VALUE_TYPE frac_rvt = *value;
8868 	  SET_REAL_EXP (&frac_rvt, 0);
8869 	  frac = build_real (rettype, frac_rvt);
8870 	  exp = build_int_cst (integer_type_node, REAL_EXP (value));
8871 	}
8872 	break;
8873       default:
8874 	gcc_unreachable ();
8875       }
8876 
8877       /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8878       arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8879       TREE_SIDE_EFFECTS (arg1) = 1;
8880       return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8881     }
8882 
8883   return NULL_TREE;
8884 }
8885 
8886 /* Fold a call to builtin modf.  */
8887 
8888 static tree
fold_builtin_modf(location_t loc,tree arg0,tree arg1,tree rettype)8889 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8890 {
8891   if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8892     return NULL_TREE;
8893 
8894   STRIP_NOPS (arg0);
8895 
8896   if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8897     return NULL_TREE;
8898 
8899   arg1 = build_fold_indirect_ref_loc (loc, arg1);
8900 
8901   /* Proceed if a valid pointer type was passed in.  */
8902   if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8903     {
8904       const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8905       REAL_VALUE_TYPE trunc, frac;
8906 
8907       switch (value->cl)
8908       {
8909       case rvc_nan:
8910       case rvc_zero:
8911 	/* For +-NaN or +-0, return (*arg1 = arg0, arg0).  */
8912 	trunc = frac = *value;
8913 	break;
8914       case rvc_inf:
8915 	/* For +-Inf, return (*arg1 = arg0, +-0).  */
8916 	frac = dconst0;
8917 	frac.sign = value->sign;
8918 	trunc = *value;
8919 	break;
8920       case rvc_normal:
8921 	/* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)).  */
8922 	real_trunc (&trunc, VOIDmode, value);
8923 	real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8924 	/* If the original number was negative and already
8925 	   integral, then the fractional part is -0.0.  */
8926 	if (value->sign && frac.cl == rvc_zero)
8927 	  frac.sign = value->sign;
8928 	break;
8929       }
8930 
8931       /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8932       arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8933 			  build_real (rettype, trunc));
8934       TREE_SIDE_EFFECTS (arg1) = 1;
8935       return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8936 			  build_real (rettype, frac));
8937     }
8938 
8939   return NULL_TREE;
8940 }
8941 
8942 /* Given a location LOC, an interclass builtin function decl FNDECL
8943    and its single argument ARG, return an folded expression computing
8944    the same, or NULL_TREE if we either couldn't or didn't want to fold
8945    (the latter happen if there's an RTL instruction available).  */
8946 
8947 static tree
fold_builtin_interclass_mathfn(location_t loc,tree fndecl,tree arg)8948 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8949 {
8950   machine_mode mode;
8951 
8952   if (!validate_arg (arg, REAL_TYPE))
8953     return NULL_TREE;
8954 
8955   if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8956     return NULL_TREE;
8957 
8958   mode = TYPE_MODE (TREE_TYPE (arg));
8959 
8960   bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8961 
8962   /* If there is no optab, try generic code.  */
8963   switch (DECL_FUNCTION_CODE (fndecl))
8964     {
8965       tree result;
8966 
8967     CASE_FLT_FN (BUILT_IN_ISINF):
8968       {
8969 	/* isinf(x) -> isgreater(fabs(x),DBL_MAX).  */
8970 	tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8971 	tree type = TREE_TYPE (arg);
8972 	REAL_VALUE_TYPE r;
8973 	char buf[128];
8974 
8975 	if (is_ibm_extended)
8976 	  {
8977 	    /* NaN and Inf are encoded in the high-order double value
8978 	       only.  The low-order value is not significant.  */
8979 	    type = double_type_node;
8980 	    mode = DFmode;
8981 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8982 	  }
8983 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
8984 	real_from_string (&r, buf);
8985 	result = build_call_expr (isgr_fn, 2,
8986 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
8987 				  build_real (type, r));
8988 	return result;
8989       }
8990     CASE_FLT_FN (BUILT_IN_FINITE):
8991     case BUILT_IN_ISFINITE:
8992       {
8993 	/* isfinite(x) -> islessequal(fabs(x),DBL_MAX).  */
8994 	tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8995 	tree type = TREE_TYPE (arg);
8996 	REAL_VALUE_TYPE r;
8997 	char buf[128];
8998 
8999 	if (is_ibm_extended)
9000 	  {
9001 	    /* NaN and Inf are encoded in the high-order double value
9002 	       only.  The low-order value is not significant.  */
9003 	    type = double_type_node;
9004 	    mode = DFmode;
9005 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9006 	  }
9007 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9008 	real_from_string (&r, buf);
9009 	result = build_call_expr (isle_fn, 2,
9010 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
9011 				  build_real (type, r));
9012 	/*result = fold_build2_loc (loc, UNGT_EXPR,
9013 				  TREE_TYPE (TREE_TYPE (fndecl)),
9014 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
9015 				  build_real (type, r));
9016 	result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
9017 				  TREE_TYPE (TREE_TYPE (fndecl)),
9018 				  result);*/
9019 	return result;
9020       }
9021     case BUILT_IN_ISNORMAL:
9022       {
9023 	/* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
9024 	   islessequal(fabs(x),DBL_MAX).  */
9025 	tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
9026 	tree type = TREE_TYPE (arg);
9027 	tree orig_arg, max_exp, min_exp;
9028 	machine_mode orig_mode = mode;
9029 	REAL_VALUE_TYPE rmax, rmin;
9030 	char buf[128];
9031 
9032 	orig_arg = arg = builtin_save_expr (arg);
9033 	if (is_ibm_extended)
9034 	  {
9035 	    /* Use double to test the normal range of IBM extended
9036 	       precision.  Emin for IBM extended precision is
9037 	       different to emin for IEEE double, being 53 higher
9038 	       since the low double exponent is at least 53 lower
9039 	       than the high double exponent.  */
9040 	    type = double_type_node;
9041 	    mode = DFmode;
9042 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
9043 	  }
9044 	arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
9045 
9046 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
9047 	real_from_string (&rmax, buf);
9048 	sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
9049 	real_from_string (&rmin, buf);
9050 	max_exp = build_real (type, rmax);
9051 	min_exp = build_real (type, rmin);
9052 
9053 	max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
9054 	if (is_ibm_extended)
9055 	  {
9056 	    /* Testing the high end of the range is done just using
9057 	       the high double, using the same test as isfinite().
9058 	       For the subnormal end of the range we first test the
9059 	       high double, then if its magnitude is equal to the
9060 	       limit of 0x1p-969, we test whether the low double is
9061 	       non-zero and opposite sign to the high double.  */
9062 	    tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
9063 	    tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
9064 	    tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
9065 	    tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
9066 				       arg, min_exp);
9067 	    tree as_complex = build1 (VIEW_CONVERT_EXPR,
9068 				      complex_double_type_node, orig_arg);
9069 	    tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
9070 	    tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
9071 	    tree zero = build_real (type, dconst0);
9072 	    tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
9073 	    tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
9074 	    tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
9075 	    tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
9076 				      fold_build3 (COND_EXPR,
9077 						   integer_type_node,
9078 						   hilt, logt, lolt));
9079 	    eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
9080 				  eq_min, ok_lo);
9081 	    min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
9082 				   gt_min, eq_min);
9083 	  }
9084 	else
9085 	  {
9086 	    tree const isge_fn
9087 	      = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
9088 	    min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
9089 	  }
9090 	result = fold_build2 (BIT_AND_EXPR, integer_type_node,
9091 			      max_exp, min_exp);
9092 	return result;
9093       }
9094     default:
9095       break;
9096     }
9097 
9098   return NULL_TREE;
9099 }
9100 
9101 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
9102    ARG is the argument for the call.  */
9103 
9104 static tree
fold_builtin_classify(location_t loc,tree fndecl,tree arg,int builtin_index)9105 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
9106 {
9107   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9108 
9109   if (!validate_arg (arg, REAL_TYPE))
9110     return NULL_TREE;
9111 
9112   switch (builtin_index)
9113     {
9114     case BUILT_IN_ISINF:
9115       if (!HONOR_INFINITIES (arg))
9116 	return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9117 
9118       return NULL_TREE;
9119 
9120     case BUILT_IN_ISINF_SIGN:
9121       {
9122 	/* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
9123 	/* In a boolean context, GCC will fold the inner COND_EXPR to
9124 	   1.  So e.g. "if (isinf_sign(x))" would be folded to just
9125 	   "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
9126 	tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
9127 	tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
9128 	tree tmp = NULL_TREE;
9129 
9130 	arg = builtin_save_expr (arg);
9131 
9132 	if (signbit_fn && isinf_fn)
9133 	  {
9134 	    tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
9135 	    tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
9136 
9137 	    signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9138 					signbit_call, integer_zero_node);
9139 	    isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
9140 				      isinf_call, integer_zero_node);
9141 
9142 	    tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
9143 			       integer_minus_one_node, integer_one_node);
9144 	    tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9145 			       isinf_call, tmp,
9146 			       integer_zero_node);
9147 	  }
9148 
9149 	return tmp;
9150       }
9151 
9152     case BUILT_IN_ISFINITE:
9153       if (!HONOR_NANS (arg)
9154 	  && !HONOR_INFINITIES (arg))
9155 	return omit_one_operand_loc (loc, type, integer_one_node, arg);
9156 
9157       return NULL_TREE;
9158 
9159     case BUILT_IN_ISNAN:
9160       if (!HONOR_NANS (arg))
9161 	return omit_one_operand_loc (loc, type, integer_zero_node, arg);
9162 
9163       {
9164 	bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
9165 	if (is_ibm_extended)
9166 	  {
9167 	    /* NaN and Inf are encoded in the high-order double value
9168 	       only.  The low-order value is not significant.  */
9169 	    arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
9170 	  }
9171       }
9172       arg = builtin_save_expr (arg);
9173       return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
9174 
9175     default:
9176       gcc_unreachable ();
9177     }
9178 }
9179 
9180 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
9181    This builtin will generate code to return the appropriate floating
9182    point classification depending on the value of the floating point
9183    number passed in.  The possible return values must be supplied as
9184    int arguments to the call in the following order: FP_NAN, FP_INFINITE,
9185    FP_NORMAL, FP_SUBNORMAL and FP_ZERO.  The ellipses is for exactly
9186    one floating point argument which is "type generic".  */
9187 
9188 static tree
fold_builtin_fpclassify(location_t loc,tree * args,int nargs)9189 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
9190 {
9191   tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
9192     arg, type, res, tmp;
9193   machine_mode mode;
9194   REAL_VALUE_TYPE r;
9195   char buf[128];
9196 
9197   /* Verify the required arguments in the original call.  */
9198   if (nargs != 6
9199       || !validate_arg (args[0], INTEGER_TYPE)
9200       || !validate_arg (args[1], INTEGER_TYPE)
9201       || !validate_arg (args[2], INTEGER_TYPE)
9202       || !validate_arg (args[3], INTEGER_TYPE)
9203       || !validate_arg (args[4], INTEGER_TYPE)
9204       || !validate_arg (args[5], REAL_TYPE))
9205     return NULL_TREE;
9206 
9207   fp_nan = args[0];
9208   fp_infinite = args[1];
9209   fp_normal = args[2];
9210   fp_subnormal = args[3];
9211   fp_zero = args[4];
9212   arg = args[5];
9213   type = TREE_TYPE (arg);
9214   mode = TYPE_MODE (type);
9215   arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
9216 
9217   /* fpclassify(x) ->
9218        isnan(x) ? FP_NAN :
9219          (fabs(x) == Inf ? FP_INFINITE :
9220 	   (fabs(x) >= DBL_MIN ? FP_NORMAL :
9221 	     (x == 0 ? FP_ZERO : FP_SUBNORMAL))).  */
9222 
9223   tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9224 		     build_real (type, dconst0));
9225   res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
9226 		     tmp, fp_zero, fp_subnormal);
9227 
9228   sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
9229   real_from_string (&r, buf);
9230   tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
9231 		     arg, build_real (type, r));
9232   res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
9233 
9234   if (HONOR_INFINITIES (mode))
9235     {
9236       real_inf (&r);
9237       tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
9238 			 build_real (type, r));
9239       res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
9240 			 fp_infinite, res);
9241     }
9242 
9243   if (HONOR_NANS (mode))
9244     {
9245       tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
9246       res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
9247     }
9248 
9249   return res;
9250 }
9251 
9252 /* Fold a call to an unordered comparison function such as
9253    __builtin_isgreater().  FNDECL is the FUNCTION_DECL for the function
9254    being called and ARG0 and ARG1 are the arguments for the call.
9255    UNORDERED_CODE and ORDERED_CODE are comparison codes that give
9256    the opposite of the desired result.  UNORDERED_CODE is used
9257    for modes that can hold NaNs and ORDERED_CODE is used for
9258    the rest.  */
9259 
9260 static tree
fold_builtin_unordered_cmp(location_t loc,tree fndecl,tree arg0,tree arg1,enum tree_code unordered_code,enum tree_code ordered_code)9261 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
9262 			    enum tree_code unordered_code,
9263 			    enum tree_code ordered_code)
9264 {
9265   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9266   enum tree_code code;
9267   tree type0, type1;
9268   enum tree_code code0, code1;
9269   tree cmp_type = NULL_TREE;
9270 
9271   type0 = TREE_TYPE (arg0);
9272   type1 = TREE_TYPE (arg1);
9273 
9274   code0 = TREE_CODE (type0);
9275   code1 = TREE_CODE (type1);
9276 
9277   if (code0 == REAL_TYPE && code1 == REAL_TYPE)
9278     /* Choose the wider of two real types.  */
9279     cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
9280       ? type0 : type1;
9281   else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
9282     cmp_type = type0;
9283   else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
9284     cmp_type = type1;
9285 
9286   arg0 = fold_convert_loc (loc, cmp_type, arg0);
9287   arg1 = fold_convert_loc (loc, cmp_type, arg1);
9288 
9289   if (unordered_code == UNORDERED_EXPR)
9290     {
9291       if (!HONOR_NANS (arg0))
9292 	return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
9293       return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
9294     }
9295 
9296   code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
9297   return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
9298 		      fold_build2_loc (loc, code, type, arg0, arg1));
9299 }
9300 
9301 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
9302    arithmetics if it can never overflow, or into internal functions that
9303    return both result of arithmetics and overflowed boolean flag in
9304    a complex integer result, or some other check for overflow.
9305    Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
9306    checking part of that.  */
9307 
9308 static tree
fold_builtin_arith_overflow(location_t loc,enum built_in_function fcode,tree arg0,tree arg1,tree arg2)9309 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
9310 			     tree arg0, tree arg1, tree arg2)
9311 {
9312   enum internal_fn ifn = IFN_LAST;
9313   /* The code of the expression corresponding to the built-in.  */
9314   enum tree_code opcode = ERROR_MARK;
9315   bool ovf_only = false;
9316 
9317   switch (fcode)
9318     {
9319     case BUILT_IN_ADD_OVERFLOW_P:
9320       ovf_only = true;
9321       /* FALLTHRU */
9322     case BUILT_IN_ADD_OVERFLOW:
9323     case BUILT_IN_SADD_OVERFLOW:
9324     case BUILT_IN_SADDL_OVERFLOW:
9325     case BUILT_IN_SADDLL_OVERFLOW:
9326     case BUILT_IN_UADD_OVERFLOW:
9327     case BUILT_IN_UADDL_OVERFLOW:
9328     case BUILT_IN_UADDLL_OVERFLOW:
9329       opcode = PLUS_EXPR;
9330       ifn = IFN_ADD_OVERFLOW;
9331       break;
9332     case BUILT_IN_SUB_OVERFLOW_P:
9333       ovf_only = true;
9334       /* FALLTHRU */
9335     case BUILT_IN_SUB_OVERFLOW:
9336     case BUILT_IN_SSUB_OVERFLOW:
9337     case BUILT_IN_SSUBL_OVERFLOW:
9338     case BUILT_IN_SSUBLL_OVERFLOW:
9339     case BUILT_IN_USUB_OVERFLOW:
9340     case BUILT_IN_USUBL_OVERFLOW:
9341     case BUILT_IN_USUBLL_OVERFLOW:
9342       opcode = MINUS_EXPR;
9343       ifn = IFN_SUB_OVERFLOW;
9344       break;
9345     case BUILT_IN_MUL_OVERFLOW_P:
9346       ovf_only = true;
9347       /* FALLTHRU */
9348     case BUILT_IN_MUL_OVERFLOW:
9349     case BUILT_IN_SMUL_OVERFLOW:
9350     case BUILT_IN_SMULL_OVERFLOW:
9351     case BUILT_IN_SMULLL_OVERFLOW:
9352     case BUILT_IN_UMUL_OVERFLOW:
9353     case BUILT_IN_UMULL_OVERFLOW:
9354     case BUILT_IN_UMULLL_OVERFLOW:
9355       opcode = MULT_EXPR;
9356       ifn = IFN_MUL_OVERFLOW;
9357       break;
9358     default:
9359       gcc_unreachable ();
9360     }
9361 
9362   /* For the "generic" overloads, the first two arguments can have different
9363      types and the last argument determines the target type to use to check
9364      for overflow.  The arguments of the other overloads all have the same
9365      type.  */
9366   tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9367 
9368   /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9369      arguments are constant, attempt to fold the built-in call into a constant
9370      expression indicating whether or not it detected an overflow.  */
9371   if (ovf_only
9372       && TREE_CODE (arg0) == INTEGER_CST
9373       && TREE_CODE (arg1) == INTEGER_CST)
9374     /* Perform the computation in the target type and check for overflow.  */
9375     return omit_one_operand_loc (loc, boolean_type_node,
9376 				 arith_overflowed_p (opcode, type, arg0, arg1)
9377 				 ? boolean_true_node : boolean_false_node,
9378 				 arg2);
9379 
9380   tree intres, ovfres;
9381   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9382     {
9383       intres = fold_binary_loc (loc, opcode, type,
9384 				fold_convert_loc (loc, type, arg0),
9385 				fold_convert_loc (loc, type, arg1));
9386       if (TREE_OVERFLOW (intres))
9387 	intres = drop_tree_overflow (intres);
9388       ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9389 		? boolean_true_node : boolean_false_node);
9390     }
9391   else
9392     {
9393       tree ctype = build_complex_type (type);
9394       tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9395 						arg0, arg1);
9396       tree tgt = save_expr (call);
9397       intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9398       ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9399       ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9400     }
9401 
9402   if (ovf_only)
9403     return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9404 
9405   tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9406   tree store
9407     = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9408   return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9409 }
9410 
9411 /* Fold a call to __builtin_FILE to a constant string.  */
9412 
9413 static inline tree
fold_builtin_FILE(location_t loc)9414 fold_builtin_FILE (location_t loc)
9415 {
9416   if (const char *fname = LOCATION_FILE (loc))
9417     {
9418       /* The documentation says this builtin is equivalent to the preprocessor
9419 	 __FILE__ macro so it appears appropriate to use the same file prefix
9420 	 mappings.  */
9421       fname = remap_macro_filename (fname);
9422     return build_string_literal (strlen (fname) + 1, fname);
9423     }
9424 
9425   return build_string_literal (1, "");
9426 }
9427 
9428 /* Fold a call to __builtin_FUNCTION to a constant string.  */
9429 
9430 static inline tree
fold_builtin_FUNCTION()9431 fold_builtin_FUNCTION ()
9432 {
9433   const char *name = "";
9434 
9435   if (current_function_decl)
9436     name = lang_hooks.decl_printable_name (current_function_decl, 0);
9437 
9438   return build_string_literal (strlen (name) + 1, name);
9439 }
9440 
9441 /* Fold a call to __builtin_LINE to an integer constant.  */
9442 
9443 static inline tree
fold_builtin_LINE(location_t loc,tree type)9444 fold_builtin_LINE (location_t loc, tree type)
9445 {
9446   return build_int_cst (type, LOCATION_LINE (loc));
9447 }
9448 
9449 /* Fold a call to built-in function FNDECL with 0 arguments.
9450    This function returns NULL_TREE if no simplification was possible.  */
9451 
9452 static tree
fold_builtin_0(location_t loc,tree fndecl)9453 fold_builtin_0 (location_t loc, tree fndecl)
9454 {
9455   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9456   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9457   switch (fcode)
9458     {
9459     case BUILT_IN_FILE:
9460       return fold_builtin_FILE (loc);
9461 
9462     case BUILT_IN_FUNCTION:
9463       return fold_builtin_FUNCTION ();
9464 
9465     case BUILT_IN_LINE:
9466       return fold_builtin_LINE (loc, type);
9467 
9468     CASE_FLT_FN (BUILT_IN_INF):
9469     CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9470     case BUILT_IN_INFD32:
9471     case BUILT_IN_INFD64:
9472     case BUILT_IN_INFD128:
9473       return fold_builtin_inf (loc, type, true);
9474 
9475     CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9476     CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9477       return fold_builtin_inf (loc, type, false);
9478 
9479     case BUILT_IN_CLASSIFY_TYPE:
9480       return fold_builtin_classify_type (NULL_TREE);
9481 
9482     default:
9483       break;
9484     }
9485   return NULL_TREE;
9486 }
9487 
9488 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9489    This function returns NULL_TREE if no simplification was possible.  */
9490 
9491 static tree
fold_builtin_1(location_t loc,tree fndecl,tree arg0)9492 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
9493 {
9494   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9495   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9496 
9497   if (TREE_CODE (arg0) == ERROR_MARK)
9498     return NULL_TREE;
9499 
9500   if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9501     return ret;
9502 
9503   switch (fcode)
9504     {
9505     case BUILT_IN_CONSTANT_P:
9506       {
9507 	tree val = fold_builtin_constant_p (arg0);
9508 
9509 	/* Gimplification will pull the CALL_EXPR for the builtin out of
9510 	   an if condition.  When not optimizing, we'll not CSE it back.
9511 	   To avoid link error types of regressions, return false now.  */
9512 	if (!val && !optimize)
9513 	  val = integer_zero_node;
9514 
9515 	return val;
9516       }
9517 
9518     case BUILT_IN_CLASSIFY_TYPE:
9519       return fold_builtin_classify_type (arg0);
9520 
9521     case BUILT_IN_STRLEN:
9522       return fold_builtin_strlen (loc, type, arg0);
9523 
9524     CASE_FLT_FN (BUILT_IN_FABS):
9525     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9526     case BUILT_IN_FABSD32:
9527     case BUILT_IN_FABSD64:
9528     case BUILT_IN_FABSD128:
9529       return fold_builtin_fabs (loc, arg0, type);
9530 
9531     case BUILT_IN_ABS:
9532     case BUILT_IN_LABS:
9533     case BUILT_IN_LLABS:
9534     case BUILT_IN_IMAXABS:
9535       return fold_builtin_abs (loc, arg0, type);
9536 
9537     CASE_FLT_FN (BUILT_IN_CONJ):
9538       if (validate_arg (arg0, COMPLEX_TYPE)
9539 	&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9540 	return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9541     break;
9542 
9543     CASE_FLT_FN (BUILT_IN_CREAL):
9544       if (validate_arg (arg0, COMPLEX_TYPE)
9545 	&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9546 	return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9547     break;
9548 
9549     CASE_FLT_FN (BUILT_IN_CIMAG):
9550       if (validate_arg (arg0, COMPLEX_TYPE)
9551 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9552 	return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9553     break;
9554 
9555     CASE_FLT_FN (BUILT_IN_CARG):
9556       return fold_builtin_carg (loc, arg0, type);
9557 
9558     case BUILT_IN_ISASCII:
9559       return fold_builtin_isascii (loc, arg0);
9560 
9561     case BUILT_IN_TOASCII:
9562       return fold_builtin_toascii (loc, arg0);
9563 
9564     case BUILT_IN_ISDIGIT:
9565       return fold_builtin_isdigit (loc, arg0);
9566 
9567     CASE_FLT_FN (BUILT_IN_FINITE):
9568     case BUILT_IN_FINITED32:
9569     case BUILT_IN_FINITED64:
9570     case BUILT_IN_FINITED128:
9571     case BUILT_IN_ISFINITE:
9572       {
9573 	tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9574 	if (ret)
9575 	  return ret;
9576 	return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9577       }
9578 
9579     CASE_FLT_FN (BUILT_IN_ISINF):
9580     case BUILT_IN_ISINFD32:
9581     case BUILT_IN_ISINFD64:
9582     case BUILT_IN_ISINFD128:
9583       {
9584 	tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9585 	if (ret)
9586 	  return ret;
9587 	return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9588       }
9589 
9590     case BUILT_IN_ISNORMAL:
9591       return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9592 
9593     case BUILT_IN_ISINF_SIGN:
9594       return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9595 
9596     CASE_FLT_FN (BUILT_IN_ISNAN):
9597     case BUILT_IN_ISNAND32:
9598     case BUILT_IN_ISNAND64:
9599     case BUILT_IN_ISNAND128:
9600       return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9601 
9602     case BUILT_IN_FREE:
9603       if (integer_zerop (arg0))
9604 	return build_empty_stmt (loc);
9605       break;
9606 
9607     default:
9608       break;
9609     }
9610 
9611   return NULL_TREE;
9612 
9613 }
9614 
9615 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
9616    This function returns NULL_TREE if no simplification was possible.  */
9617 
9618 static tree
fold_builtin_2(location_t loc,tree fndecl,tree arg0,tree arg1)9619 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
9620 {
9621   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9622   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9623 
9624   if (TREE_CODE (arg0) == ERROR_MARK
9625       || TREE_CODE (arg1) == ERROR_MARK)
9626     return NULL_TREE;
9627 
9628   if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9629     return ret;
9630 
9631   switch (fcode)
9632     {
9633     CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9634     CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9635       if (validate_arg (arg0, REAL_TYPE)
9636 	  && validate_arg (arg1, POINTER_TYPE))
9637 	return do_mpfr_lgamma_r (arg0, arg1, type);
9638     break;
9639 
9640     CASE_FLT_FN (BUILT_IN_FREXP):
9641       return fold_builtin_frexp (loc, arg0, arg1, type);
9642 
9643     CASE_FLT_FN (BUILT_IN_MODF):
9644       return fold_builtin_modf (loc, arg0, arg1, type);
9645 
9646     case BUILT_IN_STRSPN:
9647       return fold_builtin_strspn (loc, arg0, arg1);
9648 
9649     case BUILT_IN_STRCSPN:
9650       return fold_builtin_strcspn (loc, arg0, arg1);
9651 
9652     case BUILT_IN_STRPBRK:
9653       return fold_builtin_strpbrk (loc, arg0, arg1, type);
9654 
9655     case BUILT_IN_EXPECT:
9656       return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9657 
9658     case BUILT_IN_ISGREATER:
9659       return fold_builtin_unordered_cmp (loc, fndecl,
9660 					 arg0, arg1, UNLE_EXPR, LE_EXPR);
9661     case BUILT_IN_ISGREATEREQUAL:
9662       return fold_builtin_unordered_cmp (loc, fndecl,
9663 					 arg0, arg1, UNLT_EXPR, LT_EXPR);
9664     case BUILT_IN_ISLESS:
9665       return fold_builtin_unordered_cmp (loc, fndecl,
9666 					 arg0, arg1, UNGE_EXPR, GE_EXPR);
9667     case BUILT_IN_ISLESSEQUAL:
9668       return fold_builtin_unordered_cmp (loc, fndecl,
9669 					 arg0, arg1, UNGT_EXPR, GT_EXPR);
9670     case BUILT_IN_ISLESSGREATER:
9671       return fold_builtin_unordered_cmp (loc, fndecl,
9672 					 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9673     case BUILT_IN_ISUNORDERED:
9674       return fold_builtin_unordered_cmp (loc, fndecl,
9675 					 arg0, arg1, UNORDERED_EXPR,
9676 					 NOP_EXPR);
9677 
9678       /* We do the folding for va_start in the expander.  */
9679     case BUILT_IN_VA_START:
9680       break;
9681 
9682     case BUILT_IN_OBJECT_SIZE:
9683       return fold_builtin_object_size (arg0, arg1);
9684 
9685     case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9686       return fold_builtin_atomic_always_lock_free (arg0, arg1);
9687 
9688     case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9689       return fold_builtin_atomic_is_lock_free (arg0, arg1);
9690 
9691     default:
9692       break;
9693     }
9694   return NULL_TREE;
9695 }
9696 
9697 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9698    and ARG2.
9699    This function returns NULL_TREE if no simplification was possible.  */
9700 
9701 static tree
fold_builtin_3(location_t loc,tree fndecl,tree arg0,tree arg1,tree arg2)9702 fold_builtin_3 (location_t loc, tree fndecl,
9703 		tree arg0, tree arg1, tree arg2)
9704 {
9705   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9706   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9707 
9708   if (TREE_CODE (arg0) == ERROR_MARK
9709       || TREE_CODE (arg1) == ERROR_MARK
9710       || TREE_CODE (arg2) == ERROR_MARK)
9711     return NULL_TREE;
9712 
9713   if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9714 				  arg0, arg1, arg2))
9715     return ret;
9716 
9717   switch (fcode)
9718     {
9719 
9720     CASE_FLT_FN (BUILT_IN_SINCOS):
9721       return fold_builtin_sincos (loc, arg0, arg1, arg2);
9722 
9723     CASE_FLT_FN (BUILT_IN_REMQUO):
9724       if (validate_arg (arg0, REAL_TYPE)
9725 	  && validate_arg (arg1, REAL_TYPE)
9726 	  && validate_arg (arg2, POINTER_TYPE))
9727 	return do_mpfr_remquo (arg0, arg1, arg2);
9728     break;
9729 
9730     case BUILT_IN_MEMCMP:
9731       return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9732 
9733     case BUILT_IN_EXPECT:
9734       return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9735 
9736     case BUILT_IN_EXPECT_WITH_PROBABILITY:
9737       return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9738 
9739     case BUILT_IN_ADD_OVERFLOW:
9740     case BUILT_IN_SUB_OVERFLOW:
9741     case BUILT_IN_MUL_OVERFLOW:
9742     case BUILT_IN_ADD_OVERFLOW_P:
9743     case BUILT_IN_SUB_OVERFLOW_P:
9744     case BUILT_IN_MUL_OVERFLOW_P:
9745     case BUILT_IN_SADD_OVERFLOW:
9746     case BUILT_IN_SADDL_OVERFLOW:
9747     case BUILT_IN_SADDLL_OVERFLOW:
9748     case BUILT_IN_SSUB_OVERFLOW:
9749     case BUILT_IN_SSUBL_OVERFLOW:
9750     case BUILT_IN_SSUBLL_OVERFLOW:
9751     case BUILT_IN_SMUL_OVERFLOW:
9752     case BUILT_IN_SMULL_OVERFLOW:
9753     case BUILT_IN_SMULLL_OVERFLOW:
9754     case BUILT_IN_UADD_OVERFLOW:
9755     case BUILT_IN_UADDL_OVERFLOW:
9756     case BUILT_IN_UADDLL_OVERFLOW:
9757     case BUILT_IN_USUB_OVERFLOW:
9758     case BUILT_IN_USUBL_OVERFLOW:
9759     case BUILT_IN_USUBLL_OVERFLOW:
9760     case BUILT_IN_UMUL_OVERFLOW:
9761     case BUILT_IN_UMULL_OVERFLOW:
9762     case BUILT_IN_UMULLL_OVERFLOW:
9763       return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9764 
9765     default:
9766       break;
9767     }
9768   return NULL_TREE;
9769 }
9770 
9771 /* Fold a call to built-in function FNDECL.  ARGS is an array of NARGS
9772    arguments.  IGNORE is true if the result of the
9773    function call is ignored.  This function returns NULL_TREE if no
9774    simplification was possible.  */
9775 
9776 tree
fold_builtin_n(location_t loc,tree fndecl,tree * args,int nargs,bool)9777 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
9778 {
9779   tree ret = NULL_TREE;
9780 
9781   switch (nargs)
9782     {
9783     case 0:
9784       ret = fold_builtin_0 (loc, fndecl);
9785       break;
9786     case 1:
9787       ret = fold_builtin_1 (loc, fndecl, args[0]);
9788       break;
9789     case 2:
9790       ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
9791       break;
9792     case 3:
9793       ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9794       break;
9795     default:
9796       ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9797       break;
9798     }
9799   if (ret)
9800     {
9801       ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9802       SET_EXPR_LOCATION (ret, loc);
9803       return ret;
9804     }
9805   return NULL_TREE;
9806 }
9807 
9808 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9809    list ARGS along with N new arguments in NEWARGS.  SKIP is the number
9810    of arguments in ARGS to be omitted.  OLDNARGS is the number of
9811    elements in ARGS.  */
9812 
9813 static tree
rewrite_call_expr_valist(location_t loc,int oldnargs,tree * args,int skip,tree fndecl,int n,va_list newargs)9814 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9815 			  int skip, tree fndecl, int n, va_list newargs)
9816 {
9817   int nargs = oldnargs - skip + n;
9818   tree *buffer;
9819 
9820   if (n > 0)
9821     {
9822       int i, j;
9823 
9824       buffer = XALLOCAVEC (tree, nargs);
9825       for (i = 0; i < n; i++)
9826 	buffer[i] = va_arg (newargs, tree);
9827       for (j = skip; j < oldnargs; j++, i++)
9828 	buffer[i] = args[j];
9829     }
9830   else
9831     buffer = args + skip;
9832 
9833   return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9834 }
9835 
9836 /* Return true if FNDECL shouldn't be folded right now.
9837    If a built-in function has an inline attribute always_inline
9838    wrapper, defer folding it after always_inline functions have
9839    been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9840    might not be performed.  */
9841 
9842 bool
avoid_folding_inline_builtin(tree fndecl)9843 avoid_folding_inline_builtin (tree fndecl)
9844 {
9845   return (DECL_DECLARED_INLINE_P (fndecl)
9846 	  && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9847 	  && cfun
9848 	  && !cfun->always_inline_functions_inlined
9849 	  && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9850 }
9851 
9852 /* A wrapper function for builtin folding that prevents warnings for
9853    "statement without effect" and the like, caused by removing the
9854    call node earlier than the warning is generated.  */
9855 
9856 tree
fold_call_expr(location_t loc,tree exp,bool ignore)9857 fold_call_expr (location_t loc, tree exp, bool ignore)
9858 {
9859   tree ret = NULL_TREE;
9860   tree fndecl = get_callee_fndecl (exp);
9861   if (fndecl && fndecl_built_in_p (fndecl)
9862       /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9863 	 yet.  Defer folding until we see all the arguments
9864 	 (after inlining).  */
9865       && !CALL_EXPR_VA_ARG_PACK (exp))
9866     {
9867       int nargs = call_expr_nargs (exp);
9868 
9869       /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9870 	 instead last argument is __builtin_va_arg_pack ().  Defer folding
9871 	 even in that case, until arguments are finalized.  */
9872       if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9873 	{
9874 	  tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9875 	  if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9876 	    return NULL_TREE;
9877 	}
9878 
9879       if (avoid_folding_inline_builtin (fndecl))
9880 	return NULL_TREE;
9881 
9882       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9883         return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9884 				     CALL_EXPR_ARGP (exp), ignore);
9885       else
9886 	{
9887 	  tree *args = CALL_EXPR_ARGP (exp);
9888 	  ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9889 	  if (ret)
9890 	    return ret;
9891 	}
9892     }
9893   return NULL_TREE;
9894 }
9895 
9896 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9897    N arguments are passed in the array ARGARRAY.  Return a folded
9898    expression or NULL_TREE if no simplification was possible.  */
9899 
9900 tree
fold_builtin_call_array(location_t loc,tree,tree fn,int n,tree * argarray)9901 fold_builtin_call_array (location_t loc, tree,
9902 			 tree fn,
9903 			 int n,
9904 			 tree *argarray)
9905 {
9906   if (TREE_CODE (fn) != ADDR_EXPR)
9907     return NULL_TREE;
9908 
9909   tree fndecl = TREE_OPERAND (fn, 0);
9910   if (TREE_CODE (fndecl) == FUNCTION_DECL
9911       && fndecl_built_in_p (fndecl))
9912     {
9913       /* If last argument is __builtin_va_arg_pack (), arguments to this
9914 	 function are not finalized yet.  Defer folding until they are.  */
9915       if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9916 	{
9917 	  tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9918 	  if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9919 	    return NULL_TREE;
9920 	}
9921       if (avoid_folding_inline_builtin (fndecl))
9922 	return NULL_TREE;
9923       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9924 	return targetm.fold_builtin (fndecl, n, argarray, false);
9925       else
9926 	return fold_builtin_n (loc, fndecl, argarray, n, false);
9927     }
9928 
9929   return NULL_TREE;
9930 }
9931 
9932 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9933    along with N new arguments specified as the "..." parameters.  SKIP
9934    is the number of arguments in EXP to be omitted.  This function is used
9935    to do varargs-to-varargs transformations.  */
9936 
9937 static tree
rewrite_call_expr(location_t loc,tree exp,int skip,tree fndecl,int n,...)9938 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9939 {
9940   va_list ap;
9941   tree t;
9942 
9943   va_start (ap, n);
9944   t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9945 				CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9946   va_end (ap);
9947 
9948   return t;
9949 }
9950 
9951 /* Validate a single argument ARG against a tree code CODE representing
9952    a type.  Return true when argument is valid.  */
9953 
9954 static bool
validate_arg(const_tree arg,enum tree_code code)9955 validate_arg (const_tree arg, enum tree_code code)
9956 {
9957   if (!arg)
9958     return false;
9959   else if (code == POINTER_TYPE)
9960     return POINTER_TYPE_P (TREE_TYPE (arg));
9961   else if (code == INTEGER_TYPE)
9962     return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9963   return code == TREE_CODE (TREE_TYPE (arg));
9964 }
9965 
9966 /* This function validates the types of a function call argument list
9967    against a specified list of tree_codes.  If the last specifier is a 0,
9968    that represents an ellipses, otherwise the last specifier must be a
9969    VOID_TYPE.
9970 
9971    This is the GIMPLE version of validate_arglist.  Eventually we want to
9972    completely convert builtins.c to work from GIMPLEs and the tree based
9973    validate_arglist will then be removed.  */
9974 
9975 bool
validate_gimple_arglist(const gcall * call,...)9976 validate_gimple_arglist (const gcall *call, ...)
9977 {
9978   enum tree_code code;
9979   bool res = 0;
9980   va_list ap;
9981   const_tree arg;
9982   size_t i;
9983 
9984   va_start (ap, call);
9985   i = 0;
9986 
9987   do
9988     {
9989       code = (enum tree_code) va_arg (ap, int);
9990       switch (code)
9991 	{
9992 	case 0:
9993 	  /* This signifies an ellipses, any further arguments are all ok.  */
9994 	  res = true;
9995 	  goto end;
9996 	case VOID_TYPE:
9997 	  /* This signifies an endlink, if no arguments remain, return
9998 	     true, otherwise return false.  */
9999 	  res = (i == gimple_call_num_args (call));
10000 	  goto end;
10001 	default:
10002 	  /* If no parameters remain or the parameter's code does not
10003 	     match the specified code, return false.  Otherwise continue
10004 	     checking any remaining arguments.  */
10005 	  arg = gimple_call_arg (call, i++);
10006 	  if (!validate_arg (arg, code))
10007 	    goto end;
10008 	  break;
10009 	}
10010     }
10011   while (1);
10012 
10013   /* We need gotos here since we can only have one VA_CLOSE in a
10014      function.  */
10015  end: ;
10016   va_end (ap);
10017 
10018   return res;
10019 }
10020 
10021 /* Default target-specific builtin expander that does nothing.  */
10022 
10023 rtx
default_expand_builtin(tree exp ATTRIBUTE_UNUSED,rtx target ATTRIBUTE_UNUSED,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)10024 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
10025 			rtx target ATTRIBUTE_UNUSED,
10026 			rtx subtarget ATTRIBUTE_UNUSED,
10027 			machine_mode mode ATTRIBUTE_UNUSED,
10028 			int ignore ATTRIBUTE_UNUSED)
10029 {
10030   return NULL_RTX;
10031 }
10032 
10033 /* Returns true is EXP represents data that would potentially reside
10034    in a readonly section.  */
10035 
10036 bool
readonly_data_expr(tree exp)10037 readonly_data_expr (tree exp)
10038 {
10039   STRIP_NOPS (exp);
10040 
10041   if (TREE_CODE (exp) != ADDR_EXPR)
10042     return false;
10043 
10044   exp = get_base_address (TREE_OPERAND (exp, 0));
10045   if (!exp)
10046     return false;
10047 
10048   /* Make sure we call decl_readonly_section only for trees it
10049      can handle (since it returns true for everything it doesn't
10050      understand).  */
10051   if (TREE_CODE (exp) == STRING_CST
10052       || TREE_CODE (exp) == CONSTRUCTOR
10053       || (VAR_P (exp) && TREE_STATIC (exp)))
10054     return decl_readonly_section (exp, 0);
10055   else
10056     return false;
10057 }
10058 
10059 /* Simplify a call to the strpbrk builtin.  S1 and S2 are the arguments
10060    to the call, and TYPE is its return type.
10061 
10062    Return NULL_TREE if no simplification was possible, otherwise return the
10063    simplified form of the call as a tree.
10064 
10065    The simplified form may be a constant or other expression which
10066    computes the same value, but in a more efficient manner (including
10067    calls to other builtin functions).
10068 
10069    The call may contain arguments which need to be evaluated, but
10070    which are not useful to determine the result of the call.  In
10071    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
10072    COMPOUND_EXPR will be an argument which must be evaluated.
10073    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
10074    COMPOUND_EXPR in the chain will contain the tree for the simplified
10075    form of the builtin function call.  */
10076 
10077 static tree
fold_builtin_strpbrk(location_t loc,tree s1,tree s2,tree type)10078 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
10079 {
10080   if (!validate_arg (s1, POINTER_TYPE)
10081       || !validate_arg (s2, POINTER_TYPE))
10082     return NULL_TREE;
10083   else
10084     {
10085       tree fn;
10086       const char *p1, *p2;
10087 
10088       p2 = c_getstr (s2);
10089       if (p2 == NULL)
10090 	return NULL_TREE;
10091 
10092       p1 = c_getstr (s1);
10093       if (p1 != NULL)
10094 	{
10095 	  const char *r = strpbrk (p1, p2);
10096 	  tree tem;
10097 
10098 	  if (r == NULL)
10099 	    return build_int_cst (TREE_TYPE (s1), 0);
10100 
10101 	  /* Return an offset into the constant string argument.  */
10102 	  tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
10103 	  return fold_convert_loc (loc, type, tem);
10104 	}
10105 
10106       if (p2[0] == '\0')
10107 	/* strpbrk(x, "") == NULL.
10108 	   Evaluate and ignore s1 in case it had side-effects.  */
10109 	return omit_one_operand_loc (loc, type, integer_zero_node, s1);
10110 
10111       if (p2[1] != '\0')
10112 	return NULL_TREE;  /* Really call strpbrk.  */
10113 
10114       fn = builtin_decl_implicit (BUILT_IN_STRCHR);
10115       if (!fn)
10116 	return NULL_TREE;
10117 
10118       /* New argument list transforming strpbrk(s1, s2) to
10119 	 strchr(s1, s2[0]).  */
10120       return build_call_expr_loc (loc, fn, 2, s1,
10121 				  build_int_cst (integer_type_node, p2[0]));
10122     }
10123 }
10124 
10125 /* Simplify a call to the strspn builtin.  S1 and S2 are the arguments
10126    to the call.
10127 
10128    Return NULL_TREE if no simplification was possible, otherwise return the
10129    simplified form of the call as a tree.
10130 
10131    The simplified form may be a constant or other expression which
10132    computes the same value, but in a more efficient manner (including
10133    calls to other builtin functions).
10134 
10135    The call may contain arguments which need to be evaluated, but
10136    which are not useful to determine the result of the call.  In
10137    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
10138    COMPOUND_EXPR will be an argument which must be evaluated.
10139    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
10140    COMPOUND_EXPR in the chain will contain the tree for the simplified
10141    form of the builtin function call.  */
10142 
10143 static tree
fold_builtin_strspn(location_t loc,tree s1,tree s2)10144 fold_builtin_strspn (location_t loc, tree s1, tree s2)
10145 {
10146   if (!validate_arg (s1, POINTER_TYPE)
10147       || !validate_arg (s2, POINTER_TYPE))
10148     return NULL_TREE;
10149   else
10150     {
10151       const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
10152 
10153       /* If either argument is "", return NULL_TREE.  */
10154       if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
10155 	/* Evaluate and ignore both arguments in case either one has
10156 	   side-effects.  */
10157 	return omit_two_operands_loc (loc, size_type_node, size_zero_node,
10158 				  s1, s2);
10159       return NULL_TREE;
10160     }
10161 }
10162 
10163 /* Simplify a call to the strcspn builtin.  S1 and S2 are the arguments
10164    to the call.
10165 
10166    Return NULL_TREE if no simplification was possible, otherwise return the
10167    simplified form of the call as a tree.
10168 
10169    The simplified form may be a constant or other expression which
10170    computes the same value, but in a more efficient manner (including
10171    calls to other builtin functions).
10172 
10173    The call may contain arguments which need to be evaluated, but
10174    which are not useful to determine the result of the call.  In
10175    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
10176    COMPOUND_EXPR will be an argument which must be evaluated.
10177    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
10178    COMPOUND_EXPR in the chain will contain the tree for the simplified
10179    form of the builtin function call.  */
10180 
10181 static tree
fold_builtin_strcspn(location_t loc,tree s1,tree s2)10182 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
10183 {
10184   if (!validate_arg (s1, POINTER_TYPE)
10185       || !validate_arg (s2, POINTER_TYPE))
10186     return NULL_TREE;
10187   else
10188     {
10189       /* If the first argument is "", return NULL_TREE.  */
10190       const char *p1 = c_getstr (s1);
10191       if (p1 && *p1 == '\0')
10192 	{
10193 	  /* Evaluate and ignore argument s2 in case it has
10194 	     side-effects.  */
10195 	  return omit_one_operand_loc (loc, size_type_node,
10196 				   size_zero_node, s2);
10197 	}
10198 
10199       /* If the second argument is "", return __builtin_strlen(s1).  */
10200       const char *p2 = c_getstr (s2);
10201       if (p2 && *p2 == '\0')
10202 	{
10203 	  tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
10204 
10205 	  /* If the replacement _DECL isn't initialized, don't do the
10206 	     transformation.  */
10207 	  if (!fn)
10208 	    return NULL_TREE;
10209 
10210 	  return build_call_expr_loc (loc, fn, 1, s1);
10211 	}
10212       return NULL_TREE;
10213     }
10214 }
10215 
10216 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
10217    produced.  False otherwise.  This is done so that we don't output the error
10218    or warning twice or three times.  */
10219 
10220 bool
fold_builtin_next_arg(tree exp,bool va_start_p)10221 fold_builtin_next_arg (tree exp, bool va_start_p)
10222 {
10223   tree fntype = TREE_TYPE (current_function_decl);
10224   int nargs = call_expr_nargs (exp);
10225   tree arg;
10226   /* There is good chance the current input_location points inside the
10227      definition of the va_start macro (perhaps on the token for
10228      builtin) in a system header, so warnings will not be emitted.
10229      Use the location in real source code.  */
10230   location_t current_location =
10231     linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
10232 					      NULL);
10233 
10234   if (!stdarg_p (fntype))
10235     {
10236       error ("%<va_start%> used in function with fixed args");
10237       return true;
10238     }
10239 
10240   if (va_start_p)
10241     {
10242       if (va_start_p && (nargs != 2))
10243 	{
10244 	  error ("wrong number of arguments to function %<va_start%>");
10245 	  return true;
10246 	}
10247       arg = CALL_EXPR_ARG (exp, 1);
10248     }
10249   /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
10250      when we checked the arguments and if needed issued a warning.  */
10251   else
10252     {
10253       if (nargs == 0)
10254 	{
10255 	  /* Evidently an out of date version of <stdarg.h>; can't validate
10256 	     va_start's second argument, but can still work as intended.  */
10257 	  warning_at (current_location,
10258 		      OPT_Wvarargs,
10259 		   "%<__builtin_next_arg%> called without an argument");
10260 	  return true;
10261 	}
10262       else if (nargs > 1)
10263 	{
10264 	  error ("wrong number of arguments to function %<__builtin_next_arg%>");
10265 	  return true;
10266 	}
10267       arg = CALL_EXPR_ARG (exp, 0);
10268     }
10269 
10270   if (TREE_CODE (arg) == SSA_NAME
10271       && SSA_NAME_VAR (arg))
10272     arg = SSA_NAME_VAR (arg);
10273 
10274   /* We destructively modify the call to be __builtin_va_start (ap, 0)
10275      or __builtin_next_arg (0) the first time we see it, after checking
10276      the arguments and if needed issuing a warning.  */
10277   if (!integer_zerop (arg))
10278     {
10279       tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
10280 
10281       /* Strip off all nops for the sake of the comparison.  This
10282 	 is not quite the same as STRIP_NOPS.  It does more.
10283 	 We must also strip off INDIRECT_EXPR for C++ reference
10284 	 parameters.  */
10285       while (CONVERT_EXPR_P (arg)
10286 	     || TREE_CODE (arg) == INDIRECT_REF)
10287 	arg = TREE_OPERAND (arg, 0);
10288       if (arg != last_parm)
10289 	{
10290 	  /* FIXME: Sometimes with the tree optimizers we can get the
10291 	     not the last argument even though the user used the last
10292 	     argument.  We just warn and set the arg to be the last
10293 	     argument so that we will get wrong-code because of
10294 	     it.  */
10295 	  warning_at (current_location,
10296 		      OPT_Wvarargs,
10297 		      "second parameter of %<va_start%> not last named argument");
10298 	}
10299 
10300       /* Undefined by C99 7.15.1.4p4 (va_start):
10301          "If the parameter parmN is declared with the register storage
10302          class, with a function or array type, or with a type that is
10303          not compatible with the type that results after application of
10304          the default argument promotions, the behavior is undefined."
10305       */
10306       else if (DECL_REGISTER (arg))
10307 	{
10308 	  warning_at (current_location,
10309 		      OPT_Wvarargs,
10310 		      "undefined behavior when second parameter of "
10311 		      "%<va_start%> is declared with %<register%> storage");
10312 	}
10313 
10314       /* We want to verify the second parameter just once before the tree
10315 	 optimizers are run and then avoid keeping it in the tree,
10316 	 as otherwise we could warn even for correct code like:
10317 	 void foo (int i, ...)
10318 	 { va_list ap; i++; va_start (ap, i); va_end (ap); }  */
10319       if (va_start_p)
10320 	CALL_EXPR_ARG (exp, 1) = integer_zero_node;
10321       else
10322 	CALL_EXPR_ARG (exp, 0) = integer_zero_node;
10323     }
10324   return false;
10325 }
10326 
10327 
10328 /* Expand a call EXP to __builtin_object_size.  */
10329 
10330 static rtx
expand_builtin_object_size(tree exp)10331 expand_builtin_object_size (tree exp)
10332 {
10333   tree ost;
10334   int object_size_type;
10335   tree fndecl = get_callee_fndecl (exp);
10336 
10337   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10338     {
10339       error ("%Kfirst argument of %qD must be a pointer, second integer constant",
10340 	     exp, fndecl);
10341       expand_builtin_trap ();
10342       return const0_rtx;
10343     }
10344 
10345   ost = CALL_EXPR_ARG (exp, 1);
10346   STRIP_NOPS (ost);
10347 
10348   if (TREE_CODE (ost) != INTEGER_CST
10349       || tree_int_cst_sgn (ost) < 0
10350       || compare_tree_int (ost, 3) > 0)
10351     {
10352       error ("%Klast argument of %qD is not integer constant between 0 and 3",
10353 	     exp, fndecl);
10354       expand_builtin_trap ();
10355       return const0_rtx;
10356     }
10357 
10358   object_size_type = tree_to_shwi (ost);
10359 
10360   return object_size_type < 2 ? constm1_rtx : const0_rtx;
10361 }
10362 
10363 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10364    FCODE is the BUILT_IN_* to use.
10365    Return NULL_RTX if we failed; the caller should emit a normal call,
10366    otherwise try to get the result in TARGET, if convenient (and in
10367    mode MODE if that's convenient).  */
10368 
10369 static rtx
expand_builtin_memory_chk(tree exp,rtx target,machine_mode mode,enum built_in_function fcode)10370 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10371 			   enum built_in_function fcode)
10372 {
10373   if (!validate_arglist (exp,
10374 			 POINTER_TYPE,
10375 			 fcode == BUILT_IN_MEMSET_CHK
10376 			 ? INTEGER_TYPE : POINTER_TYPE,
10377 			 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10378     return NULL_RTX;
10379 
10380   tree dest = CALL_EXPR_ARG (exp, 0);
10381   tree src = CALL_EXPR_ARG (exp, 1);
10382   tree len = CALL_EXPR_ARG (exp, 2);
10383   tree size = CALL_EXPR_ARG (exp, 3);
10384 
10385   bool sizes_ok = check_access (exp, dest, src, len, /*maxread=*/NULL_TREE,
10386 				/*str=*/NULL_TREE, size);
10387 
10388   if (!tree_fits_uhwi_p (size))
10389     return NULL_RTX;
10390 
10391   if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10392     {
10393       /* Avoid transforming the checking call to an ordinary one when
10394 	 an overflow has been detected or when the call couldn't be
10395 	 validated because the size is not constant.  */
10396       if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10397 	return NULL_RTX;
10398 
10399       tree fn = NULL_TREE;
10400       /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10401 	 mem{cpy,pcpy,move,set} is available.  */
10402       switch (fcode)
10403 	{
10404 	case BUILT_IN_MEMCPY_CHK:
10405 	  fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10406 	  break;
10407 	case BUILT_IN_MEMPCPY_CHK:
10408 	  fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10409 	  break;
10410 	case BUILT_IN_MEMMOVE_CHK:
10411 	  fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10412 	  break;
10413 	case BUILT_IN_MEMSET_CHK:
10414 	  fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10415 	  break;
10416 	default:
10417 	  break;
10418 	}
10419 
10420       if (! fn)
10421 	return NULL_RTX;
10422 
10423       fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10424       gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10425       CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10426       return expand_expr (fn, target, mode, EXPAND_NORMAL);
10427     }
10428   else if (fcode == BUILT_IN_MEMSET_CHK)
10429     return NULL_RTX;
10430   else
10431     {
10432       unsigned int dest_align = get_pointer_alignment (dest);
10433 
10434       /* If DEST is not a pointer type, call the normal function.  */
10435       if (dest_align == 0)
10436 	return NULL_RTX;
10437 
10438       /* If SRC and DEST are the same (and not volatile), do nothing.  */
10439       if (operand_equal_p (src, dest, 0))
10440 	{
10441 	  tree expr;
10442 
10443 	  if (fcode != BUILT_IN_MEMPCPY_CHK)
10444 	    {
10445 	      /* Evaluate and ignore LEN in case it has side-effects.  */
10446 	      expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10447 	      return expand_expr (dest, target, mode, EXPAND_NORMAL);
10448 	    }
10449 
10450 	  expr = fold_build_pointer_plus (dest, len);
10451 	  return expand_expr (expr, target, mode, EXPAND_NORMAL);
10452 	}
10453 
10454       /* __memmove_chk special case.  */
10455       if (fcode == BUILT_IN_MEMMOVE_CHK)
10456 	{
10457 	  unsigned int src_align = get_pointer_alignment (src);
10458 
10459 	  if (src_align == 0)
10460 	    return NULL_RTX;
10461 
10462 	  /* If src is categorized for a readonly section we can use
10463 	     normal __memcpy_chk.  */
10464 	  if (readonly_data_expr (src))
10465 	    {
10466 	      tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10467 	      if (!fn)
10468 		return NULL_RTX;
10469 	      fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10470 					  dest, src, len, size);
10471 	      gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10472 	      CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10473 	      return expand_expr (fn, target, mode, EXPAND_NORMAL);
10474 	    }
10475 	}
10476       return NULL_RTX;
10477     }
10478 }
10479 
10480 /* Emit warning if a buffer overflow is detected at compile time.  */
10481 
10482 static void
maybe_emit_chk_warning(tree exp,enum built_in_function fcode)10483 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10484 {
10485   /* The source string.  */
10486   tree srcstr = NULL_TREE;
10487   /* The size of the destination object.  */
10488   tree objsize = NULL_TREE;
10489   /* The string that is being concatenated with (as in __strcat_chk)
10490      or null if it isn't.  */
10491   tree catstr = NULL_TREE;
10492   /* The maximum length of the source sequence in a bounded operation
10493      (such as __strncat_chk) or null if the operation isn't bounded
10494      (such as __strcat_chk).  */
10495   tree maxread = NULL_TREE;
10496   /* The exact size of the access (such as in __strncpy_chk).  */
10497   tree size = NULL_TREE;
10498 
10499   switch (fcode)
10500     {
10501     case BUILT_IN_STRCPY_CHK:
10502     case BUILT_IN_STPCPY_CHK:
10503       srcstr = CALL_EXPR_ARG (exp, 1);
10504       objsize = CALL_EXPR_ARG (exp, 2);
10505       break;
10506 
10507     case BUILT_IN_STRCAT_CHK:
10508       /* For __strcat_chk the warning will be emitted only if overflowing
10509 	 by at least strlen (dest) + 1 bytes.  */
10510       catstr = CALL_EXPR_ARG (exp, 0);
10511       srcstr = CALL_EXPR_ARG (exp, 1);
10512       objsize = CALL_EXPR_ARG (exp, 2);
10513       break;
10514 
10515     case BUILT_IN_STRNCAT_CHK:
10516       catstr = CALL_EXPR_ARG (exp, 0);
10517       srcstr = CALL_EXPR_ARG (exp, 1);
10518       maxread = CALL_EXPR_ARG (exp, 2);
10519       objsize = CALL_EXPR_ARG (exp, 3);
10520       break;
10521 
10522     case BUILT_IN_STRNCPY_CHK:
10523     case BUILT_IN_STPNCPY_CHK:
10524       srcstr = CALL_EXPR_ARG (exp, 1);
10525       size = CALL_EXPR_ARG (exp, 2);
10526       objsize = CALL_EXPR_ARG (exp, 3);
10527       break;
10528 
10529     case BUILT_IN_SNPRINTF_CHK:
10530     case BUILT_IN_VSNPRINTF_CHK:
10531       maxread = CALL_EXPR_ARG (exp, 1);
10532       objsize = CALL_EXPR_ARG (exp, 3);
10533       break;
10534     default:
10535       gcc_unreachable ();
10536     }
10537 
10538   if (catstr && maxread)
10539     {
10540       /* Check __strncat_chk.  There is no way to determine the length
10541 	 of the string to which the source string is being appended so
10542 	 just warn when the length of the source string is not known.  */
10543       check_strncat_sizes (exp, objsize);
10544       return;
10545     }
10546 
10547   /* The destination argument is the first one for all built-ins above.  */
10548   tree dst = CALL_EXPR_ARG (exp, 0);
10549 
10550   check_access (exp, dst, srcstr, size, maxread, srcstr, objsize);
10551 }
10552 
10553 /* Emit warning if a buffer overflow is detected at compile time
10554    in __sprintf_chk/__vsprintf_chk calls.  */
10555 
10556 static void
maybe_emit_sprintf_chk_warning(tree exp,enum built_in_function fcode)10557 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10558 {
10559   tree size, len, fmt;
10560   const char *fmt_str;
10561   int nargs = call_expr_nargs (exp);
10562 
10563   /* Verify the required arguments in the original call.  */
10564 
10565   if (nargs < 4)
10566     return;
10567   size = CALL_EXPR_ARG (exp, 2);
10568   fmt = CALL_EXPR_ARG (exp, 3);
10569 
10570   if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10571     return;
10572 
10573   /* Check whether the format is a literal string constant.  */
10574   fmt_str = c_getstr (fmt);
10575   if (fmt_str == NULL)
10576     return;
10577 
10578   if (!init_target_chars ())
10579     return;
10580 
10581   /* If the format doesn't contain % args or %%, we know its size.  */
10582   if (strchr (fmt_str, target_percent) == 0)
10583     len = build_int_cstu (size_type_node, strlen (fmt_str));
10584   /* If the format is "%s" and first ... argument is a string literal,
10585      we know it too.  */
10586   else if (fcode == BUILT_IN_SPRINTF_CHK
10587 	   && strcmp (fmt_str, target_percent_s) == 0)
10588     {
10589       tree arg;
10590 
10591       if (nargs < 5)
10592 	return;
10593       arg = CALL_EXPR_ARG (exp, 4);
10594       if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10595 	return;
10596 
10597       len = c_strlen (arg, 1);
10598       if (!len || ! tree_fits_uhwi_p (len))
10599 	return;
10600     }
10601   else
10602     return;
10603 
10604   /* Add one for the terminating nul.  */
10605   len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10606 
10607   check_access (exp, /*dst=*/NULL_TREE, /*src=*/NULL_TREE, /*size=*/NULL_TREE,
10608 		/*maxread=*/NULL_TREE, len, size);
10609 }
10610 
10611 /* Emit warning if a free is called with address of a variable.  */
10612 
10613 static void
maybe_emit_free_warning(tree exp)10614 maybe_emit_free_warning (tree exp)
10615 {
10616   if (call_expr_nargs (exp) != 1)
10617     return;
10618 
10619   tree arg = CALL_EXPR_ARG (exp, 0);
10620 
10621   STRIP_NOPS (arg);
10622   if (TREE_CODE (arg) != ADDR_EXPR)
10623     return;
10624 
10625   arg = get_base_address (TREE_OPERAND (arg, 0));
10626   if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
10627     return;
10628 
10629   if (SSA_VAR_P (arg))
10630     warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10631 		"%Kattempt to free a non-heap object %qD", exp, arg);
10632   else
10633     warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
10634 		"%Kattempt to free a non-heap object", exp);
10635 }
10636 
10637 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10638    if possible.  */
10639 
10640 static tree
fold_builtin_object_size(tree ptr,tree ost)10641 fold_builtin_object_size (tree ptr, tree ost)
10642 {
10643   unsigned HOST_WIDE_INT bytes;
10644   int object_size_type;
10645 
10646   if (!validate_arg (ptr, POINTER_TYPE)
10647       || !validate_arg (ost, INTEGER_TYPE))
10648     return NULL_TREE;
10649 
10650   STRIP_NOPS (ost);
10651 
10652   if (TREE_CODE (ost) != INTEGER_CST
10653       || tree_int_cst_sgn (ost) < 0
10654       || compare_tree_int (ost, 3) > 0)
10655     return NULL_TREE;
10656 
10657   object_size_type = tree_to_shwi (ost);
10658 
10659   /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10660      if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10661      and (size_t) 0 for types 2 and 3.  */
10662   if (TREE_SIDE_EFFECTS (ptr))
10663     return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10664 
10665   if (TREE_CODE (ptr) == ADDR_EXPR)
10666     {
10667       compute_builtin_object_size (ptr, object_size_type, &bytes);
10668       if (wi::fits_to_tree_p (bytes, size_type_node))
10669 	return build_int_cstu (size_type_node, bytes);
10670     }
10671   else if (TREE_CODE (ptr) == SSA_NAME)
10672     {
10673       /* If object size is not known yet, delay folding until
10674        later.  Maybe subsequent passes will help determining
10675        it.  */
10676       if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10677 	  && wi::fits_to_tree_p (bytes, size_type_node))
10678 	return build_int_cstu (size_type_node, bytes);
10679     }
10680 
10681   return NULL_TREE;
10682 }
10683 
10684 /* Builtins with folding operations that operate on "..." arguments
10685    need special handling; we need to store the arguments in a convenient
10686    data structure before attempting any folding.  Fortunately there are
10687    only a few builtins that fall into this category.  FNDECL is the
10688    function, EXP is the CALL_EXPR for the call.  */
10689 
10690 static tree
fold_builtin_varargs(location_t loc,tree fndecl,tree * args,int nargs)10691 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10692 {
10693   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10694   tree ret = NULL_TREE;
10695 
10696   switch (fcode)
10697     {
10698     case BUILT_IN_FPCLASSIFY:
10699       ret = fold_builtin_fpclassify (loc, args, nargs);
10700       break;
10701 
10702     default:
10703       break;
10704     }
10705   if (ret)
10706     {
10707       ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10708       SET_EXPR_LOCATION (ret, loc);
10709       TREE_NO_WARNING (ret) = 1;
10710       return ret;
10711     }
10712   return NULL_TREE;
10713 }
10714 
10715 /* Initialize format string characters in the target charset.  */
10716 
10717 bool
init_target_chars(void)10718 init_target_chars (void)
10719 {
10720   static bool init;
10721   if (!init)
10722     {
10723       target_newline = lang_hooks.to_target_charset ('\n');
10724       target_percent = lang_hooks.to_target_charset ('%');
10725       target_c = lang_hooks.to_target_charset ('c');
10726       target_s = lang_hooks.to_target_charset ('s');
10727       if (target_newline == 0 || target_percent == 0 || target_c == 0
10728 	  || target_s == 0)
10729 	return false;
10730 
10731       target_percent_c[0] = target_percent;
10732       target_percent_c[1] = target_c;
10733       target_percent_c[2] = '\0';
10734 
10735       target_percent_s[0] = target_percent;
10736       target_percent_s[1] = target_s;
10737       target_percent_s[2] = '\0';
10738 
10739       target_percent_s_newline[0] = target_percent;
10740       target_percent_s_newline[1] = target_s;
10741       target_percent_s_newline[2] = target_newline;
10742       target_percent_s_newline[3] = '\0';
10743 
10744       init = true;
10745     }
10746   return true;
10747 }
10748 
10749 /* Helper function for do_mpfr_arg*().  Ensure M is a normal number
10750    and no overflow/underflow occurred.  INEXACT is true if M was not
10751    exactly calculated.  TYPE is the tree type for the result.  This
10752    function assumes that you cleared the MPFR flags and then
10753    calculated M to see if anything subsequently set a flag prior to
10754    entering this function.  Return NULL_TREE if any checks fail.  */
10755 
10756 static tree
do_mpfr_ckconv(mpfr_srcptr m,tree type,int inexact)10757 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10758 {
10759   /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10760      overflow/underflow occurred.  If -frounding-math, proceed iff the
10761      result of calling FUNC was exact.  */
10762   if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10763       && (!flag_rounding_math || !inexact))
10764     {
10765       REAL_VALUE_TYPE rr;
10766 
10767       real_from_mpfr (&rr, m, type, GMP_RNDN);
10768       /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10769 	 check for overflow/underflow.  If the REAL_VALUE_TYPE is zero
10770 	 but the mpft_t is not, then we underflowed in the
10771 	 conversion.  */
10772       if (real_isfinite (&rr)
10773 	  && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10774         {
10775 	  REAL_VALUE_TYPE rmode;
10776 
10777 	  real_convert (&rmode, TYPE_MODE (type), &rr);
10778 	  /* Proceed iff the specified mode can hold the value.  */
10779 	  if (real_identical (&rmode, &rr))
10780 	    return build_real (type, rmode);
10781 	}
10782     }
10783   return NULL_TREE;
10784 }
10785 
10786 /* Helper function for do_mpc_arg*().  Ensure M is a normal complex
10787    number and no overflow/underflow occurred.  INEXACT is true if M
10788    was not exactly calculated.  TYPE is the tree type for the result.
10789    This function assumes that you cleared the MPFR flags and then
10790    calculated M to see if anything subsequently set a flag prior to
10791    entering this function.  Return NULL_TREE if any checks fail, if
10792    FORCE_CONVERT is true, then bypass the checks.  */
10793 
10794 static tree
do_mpc_ckconv(mpc_srcptr m,tree type,int inexact,int force_convert)10795 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10796 {
10797   /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10798      overflow/underflow occurred.  If -frounding-math, proceed iff the
10799      result of calling FUNC was exact.  */
10800   if (force_convert
10801       || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10802 	  && !mpfr_overflow_p () && !mpfr_underflow_p ()
10803 	  && (!flag_rounding_math || !inexact)))
10804     {
10805       REAL_VALUE_TYPE re, im;
10806 
10807       real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
10808       real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
10809       /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10810 	 check for overflow/underflow.  If the REAL_VALUE_TYPE is zero
10811 	 but the mpft_t is not, then we underflowed in the
10812 	 conversion.  */
10813       if (force_convert
10814 	  || (real_isfinite (&re) && real_isfinite (&im)
10815 	      && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10816 	      && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10817         {
10818 	  REAL_VALUE_TYPE re_mode, im_mode;
10819 
10820 	  real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10821 	  real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10822 	  /* Proceed iff the specified mode can hold the value.  */
10823 	  if (force_convert
10824 	      || (real_identical (&re_mode, &re)
10825 		  && real_identical (&im_mode, &im)))
10826 	    return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10827 				  build_real (TREE_TYPE (type), im_mode));
10828 	}
10829     }
10830   return NULL_TREE;
10831 }
10832 
10833 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10834    the pointer *(ARG_QUO) and return the result.  The type is taken
10835    from the type of ARG0 and is used for setting the precision of the
10836    calculation and results.  */
10837 
10838 static tree
do_mpfr_remquo(tree arg0,tree arg1,tree arg_quo)10839 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10840 {
10841   tree const type = TREE_TYPE (arg0);
10842   tree result = NULL_TREE;
10843 
10844   STRIP_NOPS (arg0);
10845   STRIP_NOPS (arg1);
10846 
10847   /* To proceed, MPFR must exactly represent the target floating point
10848      format, which only happens when the target base equals two.  */
10849   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10850       && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10851       && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10852     {
10853       const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10854       const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10855 
10856       if (real_isfinite (ra0) && real_isfinite (ra1))
10857         {
10858 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10859 	  const int prec = fmt->p;
10860 	  const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10861 	  tree result_rem;
10862 	  long integer_quo;
10863 	  mpfr_t m0, m1;
10864 
10865 	  mpfr_inits2 (prec, m0, m1, NULL);
10866 	  mpfr_from_real (m0, ra0, GMP_RNDN);
10867 	  mpfr_from_real (m1, ra1, GMP_RNDN);
10868 	  mpfr_clear_flags ();
10869 	  mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10870 	  /* Remquo is independent of the rounding mode, so pass
10871 	     inexact=0 to do_mpfr_ckconv().  */
10872 	  result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10873 	  mpfr_clears (m0, m1, NULL);
10874 	  if (result_rem)
10875 	    {
10876 	      /* MPFR calculates quo in the host's long so it may
10877 		 return more bits in quo than the target int can hold
10878 		 if sizeof(host long) > sizeof(target int).  This can
10879 		 happen even for native compilers in LP64 mode.  In
10880 		 these cases, modulo the quo value with the largest
10881 		 number that the target int can hold while leaving one
10882 		 bit for the sign.  */
10883 	      if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10884 		integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10885 
10886 	      /* Dereference the quo pointer argument.  */
10887 	      arg_quo = build_fold_indirect_ref (arg_quo);
10888 	      /* Proceed iff a valid pointer type was passed in.  */
10889 	      if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10890 	        {
10891 		  /* Set the value. */
10892 		  tree result_quo
10893 		    = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10894 				   build_int_cst (TREE_TYPE (arg_quo),
10895 						  integer_quo));
10896 		  TREE_SIDE_EFFECTS (result_quo) = 1;
10897 		  /* Combine the quo assignment with the rem.  */
10898 		  result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10899 						    result_quo, result_rem));
10900 		}
10901 	    }
10902 	}
10903     }
10904   return result;
10905 }
10906 
10907 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10908    resulting value as a tree with type TYPE.  The mpfr precision is
10909    set to the precision of TYPE.  We assume that this mpfr function
10910    returns zero if the result could be calculated exactly within the
10911    requested precision.  In addition, the integer pointer represented
10912    by ARG_SG will be dereferenced and set to the appropriate signgam
10913    (-1,1) value.  */
10914 
10915 static tree
do_mpfr_lgamma_r(tree arg,tree arg_sg,tree type)10916 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10917 {
10918   tree result = NULL_TREE;
10919 
10920   STRIP_NOPS (arg);
10921 
10922   /* To proceed, MPFR must exactly represent the target floating point
10923      format, which only happens when the target base equals two.  Also
10924      verify ARG is a constant and that ARG_SG is an int pointer.  */
10925   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10926       && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10927       && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10928       && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10929     {
10930       const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10931 
10932       /* In addition to NaN and Inf, the argument cannot be zero or a
10933 	 negative integer.  */
10934       if (real_isfinite (ra)
10935 	  && ra->cl != rvc_zero
10936 	  && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10937         {
10938 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10939 	  const int prec = fmt->p;
10940 	  const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
10941 	  int inexact, sg;
10942 	  mpfr_t m;
10943 	  tree result_lg;
10944 
10945 	  mpfr_init2 (m, prec);
10946 	  mpfr_from_real (m, ra, GMP_RNDN);
10947 	  mpfr_clear_flags ();
10948 	  inexact = mpfr_lgamma (m, &sg, m, rnd);
10949 	  result_lg = do_mpfr_ckconv (m, type, inexact);
10950 	  mpfr_clear (m);
10951 	  if (result_lg)
10952 	    {
10953 	      tree result_sg;
10954 
10955 	      /* Dereference the arg_sg pointer argument.  */
10956 	      arg_sg = build_fold_indirect_ref (arg_sg);
10957 	      /* Assign the signgam value into *arg_sg. */
10958 	      result_sg = fold_build2 (MODIFY_EXPR,
10959 				       TREE_TYPE (arg_sg), arg_sg,
10960 				       build_int_cst (TREE_TYPE (arg_sg), sg));
10961 	      TREE_SIDE_EFFECTS (result_sg) = 1;
10962 	      /* Combine the signgam assignment with the lgamma result.  */
10963 	      result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10964 						result_sg, result_lg));
10965 	    }
10966 	}
10967     }
10968 
10969   return result;
10970 }
10971 
10972 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10973    mpc function FUNC on it and return the resulting value as a tree
10974    with type TYPE.  The mpfr precision is set to the precision of
10975    TYPE.  We assume that function FUNC returns zero if the result
10976    could be calculated exactly within the requested precision.  If
10977    DO_NONFINITE is true, then fold expressions containing Inf or NaN
10978    in the arguments and/or results.  */
10979 
10980 tree
do_mpc_arg2(tree arg0,tree arg1,tree type,int do_nonfinite,int (* func)(mpc_ptr,mpc_srcptr,mpc_srcptr,mpc_rnd_t))10981 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10982 	     int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10983 {
10984   tree result = NULL_TREE;
10985 
10986   STRIP_NOPS (arg0);
10987   STRIP_NOPS (arg1);
10988 
10989   /* To proceed, MPFR must exactly represent the target floating point
10990      format, which only happens when the target base equals two.  */
10991   if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10992       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10993       && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10994       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10995       && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10996     {
10997       const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10998       const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10999       const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
11000       const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
11001 
11002       if (do_nonfinite
11003 	  || (real_isfinite (re0) && real_isfinite (im0)
11004 	      && real_isfinite (re1) && real_isfinite (im1)))
11005         {
11006 	  const struct real_format *const fmt =
11007 	    REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
11008 	  const int prec = fmt->p;
11009 	  const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
11010 	  const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
11011 	  int inexact;
11012 	  mpc_t m0, m1;
11013 
11014 	  mpc_init2 (m0, prec);
11015 	  mpc_init2 (m1, prec);
11016 	  mpfr_from_real (mpc_realref (m0), re0, rnd);
11017 	  mpfr_from_real (mpc_imagref (m0), im0, rnd);
11018 	  mpfr_from_real (mpc_realref (m1), re1, rnd);
11019 	  mpfr_from_real (mpc_imagref (m1), im1, rnd);
11020 	  mpfr_clear_flags ();
11021 	  inexact = func (m0, m0, m1, crnd);
11022 	  result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
11023 	  mpc_clear (m0);
11024 	  mpc_clear (m1);
11025 	}
11026     }
11027 
11028   return result;
11029 }
11030 
11031 /* A wrapper function for builtin folding that prevents warnings for
11032    "statement without effect" and the like, caused by removing the
11033    call node earlier than the warning is generated.  */
11034 
11035 tree
fold_call_stmt(gcall * stmt,bool ignore)11036 fold_call_stmt (gcall *stmt, bool ignore)
11037 {
11038   tree ret = NULL_TREE;
11039   tree fndecl = gimple_call_fndecl (stmt);
11040   location_t loc = gimple_location (stmt);
11041   if (fndecl && fndecl_built_in_p (fndecl)
11042       && !gimple_call_va_arg_pack_p (stmt))
11043     {
11044       int nargs = gimple_call_num_args (stmt);
11045       tree *args = (nargs > 0
11046 		    ? gimple_call_arg_ptr (stmt, 0)
11047 		    : &error_mark_node);
11048 
11049       if (avoid_folding_inline_builtin (fndecl))
11050 	return NULL_TREE;
11051       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
11052         {
11053 	  return targetm.fold_builtin (fndecl, nargs, args, ignore);
11054         }
11055       else
11056 	{
11057 	  ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
11058 	  if (ret)
11059 	    {
11060 	      /* Propagate location information from original call to
11061 		 expansion of builtin.  Otherwise things like
11062 		 maybe_emit_chk_warning, that operate on the expansion
11063 		 of a builtin, will use the wrong location information.  */
11064 	      if (gimple_has_location (stmt))
11065                 {
11066 		  tree realret = ret;
11067 		  if (TREE_CODE (ret) == NOP_EXPR)
11068 		    realret = TREE_OPERAND (ret, 0);
11069 		  if (CAN_HAVE_LOCATION_P (realret)
11070 		      && !EXPR_HAS_LOCATION (realret))
11071 		    SET_EXPR_LOCATION (realret, loc);
11072                   return realret;
11073                 }
11074 	      return ret;
11075 	    }
11076 	}
11077     }
11078   return NULL_TREE;
11079 }
11080 
11081 /* Look up the function in builtin_decl that corresponds to DECL
11082    and set ASMSPEC as its user assembler name.  DECL must be a
11083    function decl that declares a builtin.  */
11084 
11085 void
set_builtin_user_assembler_name(tree decl,const char * asmspec)11086 set_builtin_user_assembler_name (tree decl, const char *asmspec)
11087 {
11088   gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
11089 	      && asmspec != 0);
11090 
11091   tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
11092   set_user_assembler_name (builtin, asmspec);
11093 
11094   if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
11095       && INT_TYPE_SIZE < BITS_PER_WORD)
11096     {
11097       scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
11098       set_user_assembler_libfunc ("ffs", asmspec);
11099       set_optab_libfunc (ffs_optab, mode, "ffs");
11100     }
11101 }
11102 
11103 /* Return true if DECL is a builtin that expands to a constant or similarly
11104    simple code.  */
11105 bool
is_simple_builtin(tree decl)11106 is_simple_builtin (tree decl)
11107 {
11108   if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
11109     switch (DECL_FUNCTION_CODE (decl))
11110       {
11111 	/* Builtins that expand to constants.  */
11112       case BUILT_IN_CONSTANT_P:
11113       case BUILT_IN_EXPECT:
11114       case BUILT_IN_OBJECT_SIZE:
11115       case BUILT_IN_UNREACHABLE:
11116 	/* Simple register moves or loads from stack.  */
11117       case BUILT_IN_ASSUME_ALIGNED:
11118       case BUILT_IN_RETURN_ADDRESS:
11119       case BUILT_IN_EXTRACT_RETURN_ADDR:
11120       case BUILT_IN_FROB_RETURN_ADDR:
11121       case BUILT_IN_RETURN:
11122       case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
11123       case BUILT_IN_FRAME_ADDRESS:
11124       case BUILT_IN_VA_END:
11125       case BUILT_IN_STACK_SAVE:
11126       case BUILT_IN_STACK_RESTORE:
11127 	/* Exception state returns or moves registers around.  */
11128       case BUILT_IN_EH_FILTER:
11129       case BUILT_IN_EH_POINTER:
11130       case BUILT_IN_EH_COPY_VALUES:
11131 	return true;
11132 
11133       default:
11134 	return false;
11135       }
11136 
11137   return false;
11138 }
11139 
11140 /* Return true if DECL is a builtin that is not expensive, i.e., they are
11141    most probably expanded inline into reasonably simple code.  This is a
11142    superset of is_simple_builtin.  */
11143 bool
is_inexpensive_builtin(tree decl)11144 is_inexpensive_builtin (tree decl)
11145 {
11146   if (!decl)
11147     return false;
11148   else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
11149     return true;
11150   else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11151     switch (DECL_FUNCTION_CODE (decl))
11152       {
11153       case BUILT_IN_ABS:
11154       CASE_BUILT_IN_ALLOCA:
11155       case BUILT_IN_BSWAP16:
11156       case BUILT_IN_BSWAP32:
11157       case BUILT_IN_BSWAP64:
11158       case BUILT_IN_CLZ:
11159       case BUILT_IN_CLZIMAX:
11160       case BUILT_IN_CLZL:
11161       case BUILT_IN_CLZLL:
11162       case BUILT_IN_CTZ:
11163       case BUILT_IN_CTZIMAX:
11164       case BUILT_IN_CTZL:
11165       case BUILT_IN_CTZLL:
11166       case BUILT_IN_FFS:
11167       case BUILT_IN_FFSIMAX:
11168       case BUILT_IN_FFSL:
11169       case BUILT_IN_FFSLL:
11170       case BUILT_IN_IMAXABS:
11171       case BUILT_IN_FINITE:
11172       case BUILT_IN_FINITEF:
11173       case BUILT_IN_FINITEL:
11174       case BUILT_IN_FINITED32:
11175       case BUILT_IN_FINITED64:
11176       case BUILT_IN_FINITED128:
11177       case BUILT_IN_FPCLASSIFY:
11178       case BUILT_IN_ISFINITE:
11179       case BUILT_IN_ISINF_SIGN:
11180       case BUILT_IN_ISINF:
11181       case BUILT_IN_ISINFF:
11182       case BUILT_IN_ISINFL:
11183       case BUILT_IN_ISINFD32:
11184       case BUILT_IN_ISINFD64:
11185       case BUILT_IN_ISINFD128:
11186       case BUILT_IN_ISNAN:
11187       case BUILT_IN_ISNANF:
11188       case BUILT_IN_ISNANL:
11189       case BUILT_IN_ISNAND32:
11190       case BUILT_IN_ISNAND64:
11191       case BUILT_IN_ISNAND128:
11192       case BUILT_IN_ISNORMAL:
11193       case BUILT_IN_ISGREATER:
11194       case BUILT_IN_ISGREATEREQUAL:
11195       case BUILT_IN_ISLESS:
11196       case BUILT_IN_ISLESSEQUAL:
11197       case BUILT_IN_ISLESSGREATER:
11198       case BUILT_IN_ISUNORDERED:
11199       case BUILT_IN_VA_ARG_PACK:
11200       case BUILT_IN_VA_ARG_PACK_LEN:
11201       case BUILT_IN_VA_COPY:
11202       case BUILT_IN_TRAP:
11203       case BUILT_IN_SAVEREGS:
11204       case BUILT_IN_POPCOUNTL:
11205       case BUILT_IN_POPCOUNTLL:
11206       case BUILT_IN_POPCOUNTIMAX:
11207       case BUILT_IN_POPCOUNT:
11208       case BUILT_IN_PARITYL:
11209       case BUILT_IN_PARITYLL:
11210       case BUILT_IN_PARITYIMAX:
11211       case BUILT_IN_PARITY:
11212       case BUILT_IN_LABS:
11213       case BUILT_IN_LLABS:
11214       case BUILT_IN_PREFETCH:
11215       case BUILT_IN_ACC_ON_DEVICE:
11216 	return true;
11217 
11218       default:
11219 	return is_simple_builtin (decl);
11220       }
11221 
11222   return false;
11223 }
11224 
11225 /* Return true if T is a constant and the value cast to a target char
11226    can be represented by a host char.
11227    Store the casted char constant in *P if so.  */
11228 
11229 bool
target_char_cst_p(tree t,char * p)11230 target_char_cst_p (tree t, char *p)
11231 {
11232   if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
11233     return false;
11234 
11235   *p = (char)tree_to_uhwi (t);
11236   return true;
11237 }
11238 
11239 /* Return true if the builtin DECL is implemented in a standard library.
11240    Otherwise returns false which doesn't guarantee it is not (thus the list of
11241    handled builtins below may be incomplete).  */
11242 
11243 bool
builtin_with_linkage_p(tree decl)11244 builtin_with_linkage_p (tree decl)
11245 {
11246   if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
11247     switch (DECL_FUNCTION_CODE (decl))
11248     {
11249       CASE_FLT_FN (BUILT_IN_ACOS):
11250       CASE_FLT_FN (BUILT_IN_ACOSH):
11251       CASE_FLT_FN (BUILT_IN_ASIN):
11252       CASE_FLT_FN (BUILT_IN_ASINH):
11253       CASE_FLT_FN (BUILT_IN_ATAN):
11254       CASE_FLT_FN (BUILT_IN_ATANH):
11255       CASE_FLT_FN (BUILT_IN_ATAN2):
11256       CASE_FLT_FN (BUILT_IN_CBRT):
11257       CASE_FLT_FN (BUILT_IN_CEIL):
11258       CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
11259       CASE_FLT_FN (BUILT_IN_COPYSIGN):
11260       CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
11261       CASE_FLT_FN (BUILT_IN_COS):
11262       CASE_FLT_FN (BUILT_IN_COSH):
11263       CASE_FLT_FN (BUILT_IN_ERF):
11264       CASE_FLT_FN (BUILT_IN_ERFC):
11265       CASE_FLT_FN (BUILT_IN_EXP):
11266       CASE_FLT_FN (BUILT_IN_EXP2):
11267       CASE_FLT_FN (BUILT_IN_EXPM1):
11268       CASE_FLT_FN (BUILT_IN_FABS):
11269       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
11270       CASE_FLT_FN (BUILT_IN_FDIM):
11271       CASE_FLT_FN (BUILT_IN_FLOOR):
11272       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
11273       CASE_FLT_FN (BUILT_IN_FMA):
11274       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
11275       CASE_FLT_FN (BUILT_IN_FMAX):
11276       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
11277       CASE_FLT_FN (BUILT_IN_FMIN):
11278       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
11279       CASE_FLT_FN (BUILT_IN_FMOD):
11280       CASE_FLT_FN (BUILT_IN_FREXP):
11281       CASE_FLT_FN (BUILT_IN_HYPOT):
11282       CASE_FLT_FN (BUILT_IN_ILOGB):
11283       CASE_FLT_FN (BUILT_IN_LDEXP):
11284       CASE_FLT_FN (BUILT_IN_LGAMMA):
11285       CASE_FLT_FN (BUILT_IN_LLRINT):
11286       CASE_FLT_FN (BUILT_IN_LLROUND):
11287       CASE_FLT_FN (BUILT_IN_LOG):
11288       CASE_FLT_FN (BUILT_IN_LOG10):
11289       CASE_FLT_FN (BUILT_IN_LOG1P):
11290       CASE_FLT_FN (BUILT_IN_LOG2):
11291       CASE_FLT_FN (BUILT_IN_LOGB):
11292       CASE_FLT_FN (BUILT_IN_LRINT):
11293       CASE_FLT_FN (BUILT_IN_LROUND):
11294       CASE_FLT_FN (BUILT_IN_MODF):
11295       CASE_FLT_FN (BUILT_IN_NAN):
11296       CASE_FLT_FN (BUILT_IN_NEARBYINT):
11297       CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
11298       CASE_FLT_FN (BUILT_IN_NEXTAFTER):
11299       CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
11300       CASE_FLT_FN (BUILT_IN_POW):
11301       CASE_FLT_FN (BUILT_IN_REMAINDER):
11302       CASE_FLT_FN (BUILT_IN_REMQUO):
11303       CASE_FLT_FN (BUILT_IN_RINT):
11304       CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
11305       CASE_FLT_FN (BUILT_IN_ROUND):
11306       CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
11307       CASE_FLT_FN (BUILT_IN_SCALBLN):
11308       CASE_FLT_FN (BUILT_IN_SCALBN):
11309       CASE_FLT_FN (BUILT_IN_SIN):
11310       CASE_FLT_FN (BUILT_IN_SINH):
11311       CASE_FLT_FN (BUILT_IN_SINCOS):
11312       CASE_FLT_FN (BUILT_IN_SQRT):
11313       CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
11314       CASE_FLT_FN (BUILT_IN_TAN):
11315       CASE_FLT_FN (BUILT_IN_TANH):
11316       CASE_FLT_FN (BUILT_IN_TGAMMA):
11317       CASE_FLT_FN (BUILT_IN_TRUNC):
11318       CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
11319 	return true;
11320       default:
11321 	break;
11322     }
11323   return false;
11324 }
11325