1 /* Expand builtin functions.
2    Copyright (C) 1988-2021 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* Legacy warning!  Please add no further builtin simplifications here
21    (apart from pure constant folding) - builtin simplifications should go
22    to match.pd or gimple-fold.c instead.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-access.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename()  */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76 #include "gimple-iterator.h"
77 #include "gimple-ssa.h"
78 #include "tree-ssa-live.h"
79 #include "tree-outof-ssa.h"
80 #include "attr-fnspec.h"
81 #include "demangle.h"
82 #include "gimple-range.h"
83 #include "pointer-query.h"
84 
85 struct target_builtins default_target_builtins;
86 #if SWITCHABLE_TARGET
87 struct target_builtins *this_target_builtins = &default_target_builtins;
88 #endif
89 
90 /* Define the names of the builtin function types and codes.  */
91 const char *const built_in_class_names[BUILT_IN_LAST]
92   = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
93 
94 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
95 const char * built_in_names[(int) END_BUILTINS] =
96 {
97 #include "builtins.def"
98 };
99 
100 /* Setup an array of builtin_info_type, make sure each element decl is
101    initialized to NULL_TREE.  */
102 builtin_info_type builtin_info[(int)END_BUILTINS];
103 
104 /* Non-zero if __builtin_constant_p should be folded right away.  */
105 bool force_folding_builtin_constant_p;
106 
107 static int target_char_cast (tree, char *);
108 static int apply_args_size (void);
109 static int apply_result_size (void);
110 static rtx result_vector (int, rtx);
111 static void expand_builtin_prefetch (tree);
112 static rtx expand_builtin_apply_args (void);
113 static rtx expand_builtin_apply_args_1 (void);
114 static rtx expand_builtin_apply (rtx, rtx, rtx);
115 static void expand_builtin_return (rtx);
116 static enum type_class type_to_class (tree);
117 static rtx expand_builtin_classify_type (tree);
118 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
119 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
120 static rtx expand_builtin_interclass_mathfn (tree, rtx);
121 static rtx expand_builtin_sincos (tree);
122 static rtx expand_builtin_cexpi (tree, rtx);
123 static rtx expand_builtin_int_roundingfn (tree, rtx);
124 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
125 static rtx expand_builtin_next_arg (void);
126 static rtx expand_builtin_va_start (tree);
127 static rtx expand_builtin_va_end (tree);
128 static rtx expand_builtin_va_copy (tree);
129 static rtx inline_expand_builtin_bytecmp (tree, rtx);
130 static rtx expand_builtin_strcmp (tree, rtx);
131 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
132 static rtx expand_builtin_memcpy (tree, rtx);
133 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
134 					    rtx target, tree exp,
135 					    memop_ret retmode,
136 					    bool might_overlap);
137 static rtx expand_builtin_memmove (tree, rtx);
138 static rtx expand_builtin_mempcpy (tree, rtx);
139 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
140 static rtx expand_builtin_strcpy (tree, rtx);
141 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
142 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
143 static rtx expand_builtin_strncpy (tree, rtx);
144 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
145 static rtx expand_builtin_bzero (tree);
146 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
147 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
148 static rtx expand_builtin_alloca (tree);
149 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
150 static rtx expand_builtin_frame_address (tree, tree);
151 static tree stabilize_va_list_loc (location_t, tree, int);
152 static rtx expand_builtin_expect (tree, rtx);
153 static rtx expand_builtin_expect_with_probability (tree, rtx);
154 static tree fold_builtin_constant_p (tree);
155 static tree fold_builtin_classify_type (tree);
156 static tree fold_builtin_strlen (location_t, tree, tree, tree);
157 static tree fold_builtin_inf (location_t, tree, int);
158 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
159 static bool validate_arg (const_tree, enum tree_code code);
160 static rtx expand_builtin_fabs (tree, rtx, rtx);
161 static rtx expand_builtin_signbit (tree, rtx);
162 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
163 static tree fold_builtin_isascii (location_t, tree);
164 static tree fold_builtin_toascii (location_t, tree);
165 static tree fold_builtin_isdigit (location_t, tree);
166 static tree fold_builtin_fabs (location_t, tree, tree);
167 static tree fold_builtin_abs (location_t, tree, tree);
168 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
169 					enum tree_code);
170 static tree fold_builtin_varargs (location_t, tree, tree*, int);
171 
172 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
173 static tree fold_builtin_strspn (location_t, tree, tree, tree);
174 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
175 
176 static rtx expand_builtin_object_size (tree);
177 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
178 				      enum built_in_function);
179 static void maybe_emit_chk_warning (tree, enum built_in_function);
180 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
181 static tree fold_builtin_object_size (tree, tree);
182 
183 unsigned HOST_WIDE_INT target_newline;
184 unsigned HOST_WIDE_INT target_percent;
185 static unsigned HOST_WIDE_INT target_c;
186 static unsigned HOST_WIDE_INT target_s;
187 char target_percent_c[3];
188 char target_percent_s[3];
189 char target_percent_s_newline[4];
190 static tree do_mpfr_remquo (tree, tree, tree);
191 static tree do_mpfr_lgamma_r (tree, tree, tree);
192 static void expand_builtin_sync_synchronize (void);
193 
194 /* Return true if NAME starts with __builtin_ or __sync_.  */
195 
196 static bool
is_builtin_name(const char * name)197 is_builtin_name (const char *name)
198 {
199   return (startswith (name, "__builtin_")
200 	  || startswith (name, "__sync_")
201 	  || startswith (name, "__atomic_"));
202 }
203 
204 /* Return true if NODE should be considered for inline expansion regardless
205    of the optimization level.  This means whenever a function is invoked with
206    its "internal" name, which normally contains the prefix "__builtin".  */
207 
208 bool
called_as_built_in(tree node)209 called_as_built_in (tree node)
210 {
211   /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
212      we want the name used to call the function, not the name it
213      will have. */
214   const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
215   return is_builtin_name (name);
216 }
217 
218 /* Compute values M and N such that M divides (address of EXP - N) and such
219    that N < M.  If these numbers can be determined, store M in alignp and N in
220    *BITPOSP and return true.  Otherwise return false and store BITS_PER_UNIT to
221    *alignp and any bit-offset to *bitposp.
222 
223    Note that the address (and thus the alignment) computed here is based
224    on the address to which a symbol resolves, whereas DECL_ALIGN is based
225    on the address at which an object is actually located.  These two
226    addresses are not always the same.  For example, on ARM targets,
227    the address &foo of a Thumb function foo() has the lowest bit set,
228    whereas foo() itself starts on an even address.
229 
230    If ADDR_P is true we are taking the address of the memory reference EXP
231    and thus cannot rely on the access taking place.  */
232 
233 static bool
get_object_alignment_2(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp,bool addr_p)234 get_object_alignment_2 (tree exp, unsigned int *alignp,
235 			unsigned HOST_WIDE_INT *bitposp, bool addr_p)
236 {
237   poly_int64 bitsize, bitpos;
238   tree offset;
239   machine_mode mode;
240   int unsignedp, reversep, volatilep;
241   unsigned int align = BITS_PER_UNIT;
242   bool known_alignment = false;
243 
244   /* Get the innermost object and the constant (bitpos) and possibly
245      variable (offset) offset of the access.  */
246   exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
247 			     &unsignedp, &reversep, &volatilep);
248 
249   /* Extract alignment information from the innermost object and
250      possibly adjust bitpos and offset.  */
251   if (TREE_CODE (exp) == FUNCTION_DECL)
252     {
253       /* Function addresses can encode extra information besides their
254 	 alignment.  However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
255 	 allows the low bit to be used as a virtual bit, we know
256 	 that the address itself must be at least 2-byte aligned.  */
257       if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
258 	align = 2 * BITS_PER_UNIT;
259     }
260   else if (TREE_CODE (exp) == LABEL_DECL)
261     ;
262   else if (TREE_CODE (exp) == CONST_DECL)
263     {
264       /* The alignment of a CONST_DECL is determined by its initializer.  */
265       exp = DECL_INITIAL (exp);
266       align = TYPE_ALIGN (TREE_TYPE (exp));
267       if (CONSTANT_CLASS_P (exp))
268 	align = targetm.constant_alignment (exp, align);
269 
270       known_alignment = true;
271     }
272   else if (DECL_P (exp))
273     {
274       align = DECL_ALIGN (exp);
275       known_alignment = true;
276     }
277   else if (TREE_CODE (exp) == INDIRECT_REF
278 	   || TREE_CODE (exp) == MEM_REF
279 	   || TREE_CODE (exp) == TARGET_MEM_REF)
280     {
281       tree addr = TREE_OPERAND (exp, 0);
282       unsigned ptr_align;
283       unsigned HOST_WIDE_INT ptr_bitpos;
284       unsigned HOST_WIDE_INT ptr_bitmask = ~0;
285 
286       /* If the address is explicitely aligned, handle that.  */
287       if (TREE_CODE (addr) == BIT_AND_EXPR
288 	  && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
289 	{
290 	  ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
291 	  ptr_bitmask *= BITS_PER_UNIT;
292 	  align = least_bit_hwi (ptr_bitmask);
293 	  addr = TREE_OPERAND (addr, 0);
294 	}
295 
296       known_alignment
297 	= get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
298       align = MAX (ptr_align, align);
299 
300       /* Re-apply explicit alignment to the bitpos.  */
301       ptr_bitpos &= ptr_bitmask;
302 
303       /* The alignment of the pointer operand in a TARGET_MEM_REF
304 	 has to take the variable offset parts into account.  */
305       if (TREE_CODE (exp) == TARGET_MEM_REF)
306 	{
307 	  if (TMR_INDEX (exp))
308 	    {
309 	      unsigned HOST_WIDE_INT step = 1;
310 	      if (TMR_STEP (exp))
311 		step = TREE_INT_CST_LOW (TMR_STEP (exp));
312 	      align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
313 	    }
314 	  if (TMR_INDEX2 (exp))
315 	    align = BITS_PER_UNIT;
316 	  known_alignment = false;
317 	}
318 
319       /* When EXP is an actual memory reference then we can use
320 	 TYPE_ALIGN of a pointer indirection to derive alignment.
321 	 Do so only if get_pointer_alignment_1 did not reveal absolute
322 	 alignment knowledge and if using that alignment would
323 	 improve the situation.  */
324       unsigned int talign;
325       if (!addr_p && !known_alignment
326 	  && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
327 	  && talign > align)
328 	align = talign;
329       else
330 	{
331 	  /* Else adjust bitpos accordingly.  */
332 	  bitpos += ptr_bitpos;
333 	  if (TREE_CODE (exp) == MEM_REF
334 	      || TREE_CODE (exp) == TARGET_MEM_REF)
335 	    bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
336 	}
337     }
338   else if (TREE_CODE (exp) == STRING_CST)
339     {
340       /* STRING_CST are the only constant objects we allow to be not
341          wrapped inside a CONST_DECL.  */
342       align = TYPE_ALIGN (TREE_TYPE (exp));
343       if (CONSTANT_CLASS_P (exp))
344 	align = targetm.constant_alignment (exp, align);
345 
346       known_alignment = true;
347     }
348 
349   /* If there is a non-constant offset part extract the maximum
350      alignment that can prevail.  */
351   if (offset)
352     {
353       unsigned int trailing_zeros = tree_ctz (offset);
354       if (trailing_zeros < HOST_BITS_PER_INT)
355 	{
356 	  unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
357 	  if (inner)
358 	    align = MIN (align, inner);
359 	}
360     }
361 
362   /* Account for the alignment of runtime coefficients, so that the constant
363      bitpos is guaranteed to be accurate.  */
364   unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
365   if (alt_align != 0 && alt_align < align)
366     {
367       align = alt_align;
368       known_alignment = false;
369     }
370 
371   *alignp = align;
372   *bitposp = bitpos.coeffs[0] & (align - 1);
373   return known_alignment;
374 }
375 
376 /* For a memory reference expression EXP compute values M and N such that M
377    divides (&EXP - N) and such that N < M.  If these numbers can be determined,
378    store M in alignp and N in *BITPOSP and return true.  Otherwise return false
379    and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp.  */
380 
381 bool
get_object_alignment_1(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp)382 get_object_alignment_1 (tree exp, unsigned int *alignp,
383 			unsigned HOST_WIDE_INT *bitposp)
384 {
385   /* Strip a WITH_SIZE_EXPR, get_inner_reference doesn't know how to deal
386      with it.  */
387   if (TREE_CODE (exp) == WITH_SIZE_EXPR)
388     exp = TREE_OPERAND (exp, 0);
389   return get_object_alignment_2 (exp, alignp, bitposp, false);
390 }
391 
392 /* Return the alignment in bits of EXP, an object.  */
393 
394 unsigned int
get_object_alignment(tree exp)395 get_object_alignment (tree exp)
396 {
397   unsigned HOST_WIDE_INT bitpos = 0;
398   unsigned int align;
399 
400   get_object_alignment_1 (exp, &align, &bitpos);
401 
402   /* align and bitpos now specify known low bits of the pointer.
403      ptr & (align - 1) == bitpos.  */
404 
405   if (bitpos != 0)
406     align = least_bit_hwi (bitpos);
407   return align;
408 }
409 
410 /* For a pointer valued expression EXP compute values M and N such that M
411    divides (EXP - N) and such that N < M.  If these numbers can be determined,
412    store M in alignp and N in *BITPOSP and return true.  Return false if
413    the results are just a conservative approximation.
414 
415    If EXP is not a pointer, false is returned too.  */
416 
417 bool
get_pointer_alignment_1(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp)418 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
419 			 unsigned HOST_WIDE_INT *bitposp)
420 {
421   STRIP_NOPS (exp);
422 
423   if (TREE_CODE (exp) == ADDR_EXPR)
424     return get_object_alignment_2 (TREE_OPERAND (exp, 0),
425 				   alignp, bitposp, true);
426   else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
427     {
428       unsigned int align;
429       unsigned HOST_WIDE_INT bitpos;
430       bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
431 					  &align, &bitpos);
432       if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
433 	bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
434       else
435 	{
436 	  unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
437 	  if (trailing_zeros < HOST_BITS_PER_INT)
438 	    {
439 	      unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
440 	      if (inner)
441 		align = MIN (align, inner);
442 	    }
443 	}
444       *alignp = align;
445       *bitposp = bitpos & (align - 1);
446       return res;
447     }
448   else if (TREE_CODE (exp) == SSA_NAME
449 	   && POINTER_TYPE_P (TREE_TYPE (exp)))
450     {
451       unsigned int ptr_align, ptr_misalign;
452       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
453 
454       if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
455 	{
456 	  *bitposp = ptr_misalign * BITS_PER_UNIT;
457 	  *alignp = ptr_align * BITS_PER_UNIT;
458 	  /* Make sure to return a sensible alignment when the multiplication
459 	     by BITS_PER_UNIT overflowed.  */
460 	  if (*alignp == 0)
461 	    *alignp = 1u << (HOST_BITS_PER_INT - 1);
462 	  /* We cannot really tell whether this result is an approximation.  */
463 	  return false;
464 	}
465       else
466 	{
467 	  *bitposp = 0;
468 	  *alignp = BITS_PER_UNIT;
469 	  return false;
470 	}
471     }
472   else if (TREE_CODE (exp) == INTEGER_CST)
473     {
474       *alignp = BIGGEST_ALIGNMENT;
475       *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
476 		  & (BIGGEST_ALIGNMENT - 1));
477       return true;
478     }
479 
480   *bitposp = 0;
481   *alignp = BITS_PER_UNIT;
482   return false;
483 }
484 
485 /* Return the alignment in bits of EXP, a pointer valued expression.
486    The alignment returned is, by default, the alignment of the thing that
487    EXP points to.  If it is not a POINTER_TYPE, 0 is returned.
488 
489    Otherwise, look at the expression to see if we can do better, i.e., if the
490    expression is actually pointing at an object whose alignment is tighter.  */
491 
492 unsigned int
get_pointer_alignment(tree exp)493 get_pointer_alignment (tree exp)
494 {
495   unsigned HOST_WIDE_INT bitpos = 0;
496   unsigned int align;
497 
498   get_pointer_alignment_1 (exp, &align, &bitpos);
499 
500   /* align and bitpos now specify known low bits of the pointer.
501      ptr & (align - 1) == bitpos.  */
502 
503   if (bitpos != 0)
504     align = least_bit_hwi (bitpos);
505 
506   return align;
507 }
508 
509 /* Return the number of leading non-zero elements in the sequence
510    [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
511    ELTSIZE must be a power of 2 less than 8.  Used by c_strlen.  */
512 
513 unsigned
string_length(const void * ptr,unsigned eltsize,unsigned maxelts)514 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
515 {
516   gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
517 
518   unsigned n;
519 
520   if (eltsize == 1)
521     {
522       /* Optimize the common case of plain char.  */
523       for (n = 0; n < maxelts; n++)
524 	{
525 	  const char *elt = (const char*) ptr + n;
526 	  if (!*elt)
527 	    break;
528 	}
529     }
530   else
531     {
532       for (n = 0; n < maxelts; n++)
533 	{
534 	  const char *elt = (const char*) ptr + n * eltsize;
535 	  if (!memcmp (elt, "\0\0\0\0", eltsize))
536 	    break;
537 	}
538     }
539   return n;
540 }
541 
542 /* Compute the length of a null-terminated character string or wide
543    character string handling character sizes of 1, 2, and 4 bytes.
544    TREE_STRING_LENGTH is not the right way because it evaluates to
545    the size of the character array in bytes (as opposed to characters)
546    and because it can contain a zero byte in the middle.
547 
548    ONLY_VALUE should be nonzero if the result is not going to be emitted
549    into the instruction stream and zero if it is going to be expanded.
550    E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
551    is returned, otherwise NULL, since
552    len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
553    evaluate the side-effects.
554 
555    If ONLY_VALUE is two then we do not emit warnings about out-of-bound
556    accesses.  Note that this implies the result is not going to be emitted
557    into the instruction stream.
558 
559    Additional information about the string accessed may be recorded
560    in DATA.  For example, if ARG references an unterminated string,
561    then the declaration will be stored in the DECL field.   If the
562    length of the unterminated string can be determined, it'll be
563    stored in the LEN field.  Note this length could well be different
564    than what a C strlen call would return.
565 
566    ELTSIZE is 1 for normal single byte character strings, and 2 or
567    4 for wide characer strings.  ELTSIZE is by default 1.
568 
569    The value returned is of type `ssizetype'.  */
570 
571 tree
c_strlen(tree arg,int only_value,c_strlen_data * data,unsigned eltsize)572 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
573 {
574   /* If we were not passed a DATA pointer, then get one to a local
575      structure.  That avoids having to check DATA for NULL before
576      each time we want to use it.  */
577   c_strlen_data local_strlen_data = { };
578   if (!data)
579     data = &local_strlen_data;
580 
581   gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
582 
583   tree src = STRIP_NOPS (arg);
584   if (TREE_CODE (src) == COND_EXPR
585       && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
586     {
587       tree len1, len2;
588 
589       len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
590       len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
591       if (tree_int_cst_equal (len1, len2))
592 	return len1;
593     }
594 
595   if (TREE_CODE (src) == COMPOUND_EXPR
596       && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
597     return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
598 
599   location_t loc = EXPR_LOC_OR_LOC (src, input_location);
600 
601   /* Offset from the beginning of the string in bytes.  */
602   tree byteoff;
603   tree memsize;
604   tree decl;
605   src = string_constant (src, &byteoff, &memsize, &decl);
606   if (src == 0)
607     return NULL_TREE;
608 
609   /* Determine the size of the string element.  */
610   if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
611     return NULL_TREE;
612 
613   /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
614      length of SRC.  Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
615      in case the latter is less than the size of the array, such as when
616      SRC refers to a short string literal used to initialize a large array.
617      In that case, the elements of the array after the terminating NUL are
618      all NUL.  */
619   HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
620   strelts = strelts / eltsize;
621 
622   if (!tree_fits_uhwi_p (memsize))
623     return NULL_TREE;
624 
625   HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
626 
627   /* PTR can point to the byte representation of any string type, including
628      char* and wchar_t*.  */
629   const char *ptr = TREE_STRING_POINTER (src);
630 
631   if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
632     {
633       /* The code below works only for single byte character types.  */
634       if (eltsize != 1)
635 	return NULL_TREE;
636 
637       /* If the string has an internal NUL character followed by any
638 	 non-NUL characters (e.g., "foo\0bar"), we can't compute
639 	 the offset to the following NUL if we don't know where to
640 	 start searching for it.  */
641       unsigned len = string_length (ptr, eltsize, strelts);
642 
643       /* Return when an embedded null character is found or none at all.
644 	 In the latter case, set the DECL/LEN field in the DATA structure
645 	 so that callers may examine them.  */
646       if (len + 1 < strelts)
647 	return NULL_TREE;
648       else if (len >= maxelts)
649 	{
650 	  data->decl = decl;
651 	  data->off = byteoff;
652 	  data->minlen = ssize_int (len);
653 	  return NULL_TREE;
654 	}
655 
656       /* For empty strings the result should be zero.  */
657       if (len == 0)
658 	return ssize_int (0);
659 
660       /* We don't know the starting offset, but we do know that the string
661 	 has no internal zero bytes.  If the offset falls within the bounds
662 	 of the string subtract the offset from the length of the string,
663 	 and return that.  Otherwise the length is zero.  Take care to
664 	 use SAVE_EXPR in case the OFFSET has side-effects.  */
665       tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
666 						 : byteoff;
667       offsave = fold_convert_loc (loc, sizetype, offsave);
668       tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
669 				      size_int (len));
670       tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
671 				     offsave);
672       lenexp = fold_convert_loc (loc, ssizetype, lenexp);
673       return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
674 			      build_zero_cst (ssizetype));
675     }
676 
677   /* Offset from the beginning of the string in elements.  */
678   HOST_WIDE_INT eltoff;
679 
680   /* We have a known offset into the string.  Start searching there for
681      a null character if we can represent it as a single HOST_WIDE_INT.  */
682   if (byteoff == 0)
683     eltoff = 0;
684   else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
685     eltoff = -1;
686   else
687     eltoff = tree_to_uhwi (byteoff) / eltsize;
688 
689   /* If the offset is known to be out of bounds, warn, and call strlen at
690      runtime.  */
691   if (eltoff < 0 || eltoff >= maxelts)
692     {
693       /* Suppress multiple warnings for propagated constant strings.  */
694       if (only_value != 2
695 	  && !warning_suppressed_p (arg, OPT_Warray_bounds)
696 	  && warning_at (loc, OPT_Warray_bounds,
697 			 "offset %qwi outside bounds of constant string",
698 			 eltoff))
699 	{
700 	  if (decl)
701 	    inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
702 	  suppress_warning (arg, OPT_Warray_bounds);
703 	}
704       return NULL_TREE;
705     }
706 
707   /* If eltoff is larger than strelts but less than maxelts the
708      string length is zero, since the excess memory will be zero.  */
709   if (eltoff > strelts)
710     return ssize_int (0);
711 
712   /* Use strlen to search for the first zero byte.  Since any strings
713      constructed with build_string will have nulls appended, we win even
714      if we get handed something like (char[4])"abcd".
715 
716      Since ELTOFF is our starting index into the string, no further
717      calculation is needed.  */
718   unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
719 				strelts - eltoff);
720 
721   /* Don't know what to return if there was no zero termination.
722      Ideally this would turn into a gcc_checking_assert over time.
723      Set DECL/LEN so callers can examine them.  */
724   if (len >= maxelts - eltoff)
725     {
726       data->decl = decl;
727       data->off = byteoff;
728       data->minlen = ssize_int (len);
729       return NULL_TREE;
730     }
731 
732   return ssize_int (len);
733 }
734 
735 /* Return a constant integer corresponding to target reading
736    GET_MODE_BITSIZE (MODE) bits from string constant STR.  If
737    NULL_TERMINATED_P, reading stops after '\0' character, all further ones
738    are assumed to be zero, otherwise it reads as many characters
739    as needed.  */
740 
741 rtx
c_readstr(const char * str,scalar_int_mode mode,bool null_terminated_p)742 c_readstr (const char *str, scalar_int_mode mode,
743 	   bool null_terminated_p/*=true*/)
744 {
745   HOST_WIDE_INT ch;
746   unsigned int i, j;
747   HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
748 
749   gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
750   unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
751     / HOST_BITS_PER_WIDE_INT;
752 
753   gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
754   for (i = 0; i < len; i++)
755     tmp[i] = 0;
756 
757   ch = 1;
758   for (i = 0; i < GET_MODE_SIZE (mode); i++)
759     {
760       j = i;
761       if (WORDS_BIG_ENDIAN)
762 	j = GET_MODE_SIZE (mode) - i - 1;
763       if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
764 	  && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
765 	j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
766       j *= BITS_PER_UNIT;
767 
768       if (ch || !null_terminated_p)
769 	ch = (unsigned char) str[i];
770       tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
771     }
772 
773   wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
774   return immed_wide_int_const (c, mode);
775 }
776 
777 /* Cast a target constant CST to target CHAR and if that value fits into
778    host char type, return zero and put that value into variable pointed to by
779    P.  */
780 
781 static int
target_char_cast(tree cst,char * p)782 target_char_cast (tree cst, char *p)
783 {
784   unsigned HOST_WIDE_INT val, hostval;
785 
786   if (TREE_CODE (cst) != INTEGER_CST
787       || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
788     return 1;
789 
790   /* Do not care if it fits or not right here.  */
791   val = TREE_INT_CST_LOW (cst);
792 
793   if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
794     val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
795 
796   hostval = val;
797   if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
798     hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
799 
800   if (val != hostval)
801     return 1;
802 
803   *p = hostval;
804   return 0;
805 }
806 
807 /* Similar to save_expr, but assumes that arbitrary code is not executed
808    in between the multiple evaluations.  In particular, we assume that a
809    non-addressable local variable will not be modified.  */
810 
811 static tree
builtin_save_expr(tree exp)812 builtin_save_expr (tree exp)
813 {
814   if (TREE_CODE (exp) == SSA_NAME
815       || (TREE_ADDRESSABLE (exp) == 0
816 	  && (TREE_CODE (exp) == PARM_DECL
817 	      || (VAR_P (exp) && !TREE_STATIC (exp)))))
818     return exp;
819 
820   return save_expr (exp);
821 }
822 
823 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
824    times to get the address of either a higher stack frame, or a return
825    address located within it (depending on FNDECL_CODE).  */
826 
827 static rtx
expand_builtin_return_addr(enum built_in_function fndecl_code,int count)828 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
829 {
830   int i;
831   rtx tem = INITIAL_FRAME_ADDRESS_RTX;
832   if (tem == NULL_RTX)
833     {
834       /* For a zero count with __builtin_return_address, we don't care what
835 	 frame address we return, because target-specific definitions will
836 	 override us.  Therefore frame pointer elimination is OK, and using
837 	 the soft frame pointer is OK.
838 
839 	 For a nonzero count, or a zero count with __builtin_frame_address,
840 	 we require a stable offset from the current frame pointer to the
841 	 previous one, so we must use the hard frame pointer, and
842 	 we must disable frame pointer elimination.  */
843       if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
844 	tem = frame_pointer_rtx;
845       else
846 	{
847 	  tem = hard_frame_pointer_rtx;
848 
849 	  /* Tell reload not to eliminate the frame pointer.  */
850 	  crtl->accesses_prior_frames = 1;
851 	}
852     }
853 
854   if (count > 0)
855     SETUP_FRAME_ADDRESSES ();
856 
857   /* On the SPARC, the return address is not in the frame, it is in a
858      register.  There is no way to access it off of the current frame
859      pointer, but it can be accessed off the previous frame pointer by
860      reading the value from the register window save area.  */
861   if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
862     count--;
863 
864   /* Scan back COUNT frames to the specified frame.  */
865   for (i = 0; i < count; i++)
866     {
867       /* Assume the dynamic chain pointer is in the word that the
868 	 frame address points to, unless otherwise specified.  */
869       tem = DYNAMIC_CHAIN_ADDRESS (tem);
870       tem = memory_address (Pmode, tem);
871       tem = gen_frame_mem (Pmode, tem);
872       tem = copy_to_reg (tem);
873     }
874 
875   /* For __builtin_frame_address, return what we've got.  But, on
876      the SPARC for example, we may have to add a bias.  */
877   if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
878     return FRAME_ADDR_RTX (tem);
879 
880   /* For __builtin_return_address, get the return address from that frame.  */
881 #ifdef RETURN_ADDR_RTX
882   tem = RETURN_ADDR_RTX (count, tem);
883 #else
884   tem = memory_address (Pmode,
885 			plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
886   tem = gen_frame_mem (Pmode, tem);
887 #endif
888   return tem;
889 }
890 
891 /* Alias set used for setjmp buffer.  */
892 static alias_set_type setjmp_alias_set = -1;
893 
894 /* Construct the leading half of a __builtin_setjmp call.  Control will
895    return to RECEIVER_LABEL.  This is also called directly by the SJLJ
896    exception handling code.  */
897 
898 void
expand_builtin_setjmp_setup(rtx buf_addr,rtx receiver_label)899 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
900 {
901   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
902   rtx stack_save;
903   rtx mem;
904 
905   if (setjmp_alias_set == -1)
906     setjmp_alias_set = new_alias_set ();
907 
908   buf_addr = convert_memory_address (Pmode, buf_addr);
909 
910   buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
911 
912   /* We store the frame pointer and the address of receiver_label in
913      the buffer and use the rest of it for the stack save area, which
914      is machine-dependent.  */
915 
916   mem = gen_rtx_MEM (Pmode, buf_addr);
917   set_mem_alias_set (mem, setjmp_alias_set);
918   emit_move_insn (mem, hard_frame_pointer_rtx);
919 
920   mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
921 					   GET_MODE_SIZE (Pmode))),
922   set_mem_alias_set (mem, setjmp_alias_set);
923 
924   emit_move_insn (validize_mem (mem),
925 		  force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
926 
927   stack_save = gen_rtx_MEM (sa_mode,
928 			    plus_constant (Pmode, buf_addr,
929 					   2 * GET_MODE_SIZE (Pmode)));
930   set_mem_alias_set (stack_save, setjmp_alias_set);
931   emit_stack_save (SAVE_NONLOCAL, &stack_save);
932 
933   /* If there is further processing to do, do it.  */
934   if (targetm.have_builtin_setjmp_setup ())
935     emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
936 
937   /* We have a nonlocal label.   */
938   cfun->has_nonlocal_label = 1;
939 }
940 
941 /* Construct the trailing part of a __builtin_setjmp call.  This is
942    also called directly by the SJLJ exception handling code.
943    If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler.  */
944 
945 void
expand_builtin_setjmp_receiver(rtx receiver_label)946 expand_builtin_setjmp_receiver (rtx receiver_label)
947 {
948   rtx chain;
949 
950   /* Mark the FP as used when we get here, so we have to make sure it's
951      marked as used by this function.  */
952   emit_use (hard_frame_pointer_rtx);
953 
954   /* Mark the static chain as clobbered here so life information
955      doesn't get messed up for it.  */
956   chain = rtx_for_static_chain (current_function_decl, true);
957   if (chain && REG_P (chain))
958     emit_clobber (chain);
959 
960   if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
961     {
962       /* If the argument pointer can be eliminated in favor of the
963 	 frame pointer, we don't need to restore it.  We assume here
964 	 that if such an elimination is present, it can always be used.
965 	 This is the case on all known machines; if we don't make this
966 	 assumption, we do unnecessary saving on many machines.  */
967       size_t i;
968       static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
969 
970       for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
971 	if (elim_regs[i].from == ARG_POINTER_REGNUM
972 	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
973 	  break;
974 
975       if (i == ARRAY_SIZE (elim_regs))
976 	{
977 	  /* Now restore our arg pointer from the address at which it
978 	     was saved in our stack frame.  */
979 	  emit_move_insn (crtl->args.internal_arg_pointer,
980 			  copy_to_reg (get_arg_pointer_save_area ()));
981 	}
982     }
983 
984   if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
985     emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
986   else if (targetm.have_nonlocal_goto_receiver ())
987     emit_insn (targetm.gen_nonlocal_goto_receiver ());
988   else
989     { /* Nothing */ }
990 
991   /* We must not allow the code we just generated to be reordered by
992      scheduling.  Specifically, the update of the frame pointer must
993      happen immediately, not later.  */
994   emit_insn (gen_blockage ());
995 }
996 
997 /* __builtin_longjmp is passed a pointer to an array of five words (not
998    all will be used on all machines).  It operates similarly to the C
999    library function of the same name, but is more efficient.  Much of
1000    the code below is copied from the handling of non-local gotos.  */
1001 
1002 static void
expand_builtin_longjmp(rtx buf_addr,rtx value)1003 expand_builtin_longjmp (rtx buf_addr, rtx value)
1004 {
1005   rtx fp, lab, stack;
1006   rtx_insn *insn, *last;
1007   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1008 
1009   /* DRAP is needed for stack realign if longjmp is expanded to current
1010      function  */
1011   if (SUPPORTS_STACK_ALIGNMENT)
1012     crtl->need_drap = true;
1013 
1014   if (setjmp_alias_set == -1)
1015     setjmp_alias_set = new_alias_set ();
1016 
1017   buf_addr = convert_memory_address (Pmode, buf_addr);
1018 
1019   buf_addr = force_reg (Pmode, buf_addr);
1020 
1021   /* We require that the user must pass a second argument of 1, because
1022      that is what builtin_setjmp will return.  */
1023   gcc_assert (value == const1_rtx);
1024 
1025   last = get_last_insn ();
1026   if (targetm.have_builtin_longjmp ())
1027     emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1028   else
1029     {
1030       fp = gen_rtx_MEM (Pmode, buf_addr);
1031       lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1032 					       GET_MODE_SIZE (Pmode)));
1033 
1034       stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1035 						   2 * GET_MODE_SIZE (Pmode)));
1036       set_mem_alias_set (fp, setjmp_alias_set);
1037       set_mem_alias_set (lab, setjmp_alias_set);
1038       set_mem_alias_set (stack, setjmp_alias_set);
1039 
1040       /* Pick up FP, label, and SP from the block and jump.  This code is
1041 	 from expand_goto in stmt.c; see there for detailed comments.  */
1042       if (targetm.have_nonlocal_goto ())
1043 	/* We have to pass a value to the nonlocal_goto pattern that will
1044 	   get copied into the static_chain pointer, but it does not matter
1045 	   what that value is, because builtin_setjmp does not use it.  */
1046 	emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1047       else
1048 	{
1049 	  emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1050 	  emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1051 
1052 	  lab = copy_to_reg (lab);
1053 
1054 	  /* Restore the frame pointer and stack pointer.  We must use a
1055 	     temporary since the setjmp buffer may be a local.  */
1056 	  fp = copy_to_reg (fp);
1057 	  emit_stack_restore (SAVE_NONLOCAL, stack);
1058 
1059 	  /* Ensure the frame pointer move is not optimized.  */
1060 	  emit_insn (gen_blockage ());
1061 	  emit_clobber (hard_frame_pointer_rtx);
1062 	  emit_clobber (frame_pointer_rtx);
1063 	  emit_move_insn (hard_frame_pointer_rtx, fp);
1064 
1065 	  emit_use (hard_frame_pointer_rtx);
1066 	  emit_use (stack_pointer_rtx);
1067 	  emit_indirect_jump (lab);
1068 	}
1069     }
1070 
1071   /* Search backwards and mark the jump insn as a non-local goto.
1072      Note that this precludes the use of __builtin_longjmp to a
1073      __builtin_setjmp target in the same function.  However, we've
1074      already cautioned the user that these functions are for
1075      internal exception handling use only.  */
1076   for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1077     {
1078       gcc_assert (insn != last);
1079 
1080       if (JUMP_P (insn))
1081 	{
1082 	  add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1083 	  break;
1084 	}
1085       else if (CALL_P (insn))
1086 	break;
1087     }
1088 }
1089 
1090 static inline bool
more_const_call_expr_args_p(const const_call_expr_arg_iterator * iter)1091 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1092 {
1093   return (iter->i < iter->n);
1094 }
1095 
1096 /* This function validates the types of a function call argument list
1097    against a specified list of tree_codes.  If the last specifier is a 0,
1098    that represents an ellipsis, otherwise the last specifier must be a
1099    VOID_TYPE.  */
1100 
1101 static bool
validate_arglist(const_tree callexpr,...)1102 validate_arglist (const_tree callexpr, ...)
1103 {
1104   enum tree_code code;
1105   bool res = 0;
1106   va_list ap;
1107   const_call_expr_arg_iterator iter;
1108   const_tree arg;
1109 
1110   va_start (ap, callexpr);
1111   init_const_call_expr_arg_iterator (callexpr, &iter);
1112 
1113   /* Get a bitmap of pointer argument numbers declared attribute nonnull.  */
1114   tree fn = CALL_EXPR_FN (callexpr);
1115   bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1116 
1117   for (unsigned argno = 1; ; ++argno)
1118     {
1119       code = (enum tree_code) va_arg (ap, int);
1120 
1121       switch (code)
1122 	{
1123 	case 0:
1124 	  /* This signifies an ellipses, any further arguments are all ok.  */
1125 	  res = true;
1126 	  goto end;
1127 	case VOID_TYPE:
1128 	  /* This signifies an endlink, if no arguments remain, return
1129 	     true, otherwise return false.  */
1130 	  res = !more_const_call_expr_args_p (&iter);
1131 	  goto end;
1132 	case POINTER_TYPE:
1133 	  /* The actual argument must be nonnull when either the whole
1134 	     called function has been declared nonnull, or when the formal
1135 	     argument corresponding to the actual argument has been.  */
1136 	  if (argmap
1137 	      && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1138 	    {
1139 	      arg = next_const_call_expr_arg (&iter);
1140 	      if (!validate_arg (arg, code) || integer_zerop (arg))
1141 		goto end;
1142 	      break;
1143 	    }
1144 	  /* FALLTHRU */
1145 	default:
1146 	  /* If no parameters remain or the parameter's code does not
1147 	     match the specified code, return false.  Otherwise continue
1148 	     checking any remaining arguments.  */
1149 	  arg = next_const_call_expr_arg (&iter);
1150 	  if (!validate_arg (arg, code))
1151 	    goto end;
1152 	  break;
1153 	}
1154     }
1155 
1156   /* We need gotos here since we can only have one VA_CLOSE in a
1157      function.  */
1158  end: ;
1159   va_end (ap);
1160 
1161   BITMAP_FREE (argmap);
1162 
1163   return res;
1164 }
1165 
1166 /* Expand a call to __builtin_nonlocal_goto.  We're passed the target label
1167    and the address of the save area.  */
1168 
1169 static rtx
expand_builtin_nonlocal_goto(tree exp)1170 expand_builtin_nonlocal_goto (tree exp)
1171 {
1172   tree t_label, t_save_area;
1173   rtx r_label, r_save_area, r_fp, r_sp;
1174   rtx_insn *insn;
1175 
1176   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1177     return NULL_RTX;
1178 
1179   t_label = CALL_EXPR_ARG (exp, 0);
1180   t_save_area = CALL_EXPR_ARG (exp, 1);
1181 
1182   r_label = expand_normal (t_label);
1183   r_label = convert_memory_address (Pmode, r_label);
1184   r_save_area = expand_normal (t_save_area);
1185   r_save_area = convert_memory_address (Pmode, r_save_area);
1186   /* Copy the address of the save location to a register just in case it was
1187      based on the frame pointer.   */
1188   r_save_area = copy_to_reg (r_save_area);
1189   r_fp = gen_rtx_MEM (Pmode, r_save_area);
1190   r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1191 		      plus_constant (Pmode, r_save_area,
1192 				     GET_MODE_SIZE (Pmode)));
1193 
1194   crtl->has_nonlocal_goto = 1;
1195 
1196   /* ??? We no longer need to pass the static chain value, afaik.  */
1197   if (targetm.have_nonlocal_goto ())
1198     emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1199   else
1200     {
1201       emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1202       emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1203 
1204       r_label = copy_to_reg (r_label);
1205 
1206       /* Restore the frame pointer and stack pointer.  We must use a
1207 	 temporary since the setjmp buffer may be a local.  */
1208       r_fp = copy_to_reg (r_fp);
1209       emit_stack_restore (SAVE_NONLOCAL, r_sp);
1210 
1211       /* Ensure the frame pointer move is not optimized.  */
1212       emit_insn (gen_blockage ());
1213       emit_clobber (hard_frame_pointer_rtx);
1214       emit_clobber (frame_pointer_rtx);
1215       emit_move_insn (hard_frame_pointer_rtx, r_fp);
1216 
1217       /* USE of hard_frame_pointer_rtx added for consistency;
1218 	 not clear if really needed.  */
1219       emit_use (hard_frame_pointer_rtx);
1220       emit_use (stack_pointer_rtx);
1221 
1222       /* If the architecture is using a GP register, we must
1223 	 conservatively assume that the target function makes use of it.
1224 	 The prologue of functions with nonlocal gotos must therefore
1225 	 initialize the GP register to the appropriate value, and we
1226 	 must then make sure that this value is live at the point
1227 	 of the jump.  (Note that this doesn't necessarily apply
1228 	 to targets with a nonlocal_goto pattern; they are free
1229 	 to implement it in their own way.  Note also that this is
1230 	 a no-op if the GP register is a global invariant.)  */
1231       unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1232       if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1233 	emit_use (pic_offset_table_rtx);
1234 
1235       emit_indirect_jump (r_label);
1236     }
1237 
1238   /* Search backwards to the jump insn and mark it as a
1239      non-local goto.  */
1240   for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1241     {
1242       if (JUMP_P (insn))
1243 	{
1244 	  add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1245 	  break;
1246 	}
1247       else if (CALL_P (insn))
1248 	break;
1249     }
1250 
1251   return const0_rtx;
1252 }
1253 
1254 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1255    (not all will be used on all machines) that was passed to __builtin_setjmp.
1256    It updates the stack pointer in that block to the current value.  This is
1257    also called directly by the SJLJ exception handling code.  */
1258 
1259 void
expand_builtin_update_setjmp_buf(rtx buf_addr)1260 expand_builtin_update_setjmp_buf (rtx buf_addr)
1261 {
1262   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1263   buf_addr = convert_memory_address (Pmode, buf_addr);
1264   rtx stack_save
1265     = gen_rtx_MEM (sa_mode,
1266 		   memory_address
1267 		   (sa_mode,
1268 		    plus_constant (Pmode, buf_addr,
1269 				   2 * GET_MODE_SIZE (Pmode))));
1270 
1271   emit_stack_save (SAVE_NONLOCAL, &stack_save);
1272 }
1273 
1274 /* Expand a call to __builtin_prefetch.  For a target that does not support
1275    data prefetch, evaluate the memory address argument in case it has side
1276    effects.  */
1277 
1278 static void
expand_builtin_prefetch(tree exp)1279 expand_builtin_prefetch (tree exp)
1280 {
1281   tree arg0, arg1, arg2;
1282   int nargs;
1283   rtx op0, op1, op2;
1284 
1285   if (!validate_arglist (exp, POINTER_TYPE, 0))
1286     return;
1287 
1288   arg0 = CALL_EXPR_ARG (exp, 0);
1289 
1290   /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1291      zero (read) and argument 2 (locality) defaults to 3 (high degree of
1292      locality).  */
1293   nargs = call_expr_nargs (exp);
1294   if (nargs > 1)
1295     arg1 = CALL_EXPR_ARG (exp, 1);
1296   else
1297     arg1 = integer_zero_node;
1298   if (nargs > 2)
1299     arg2 = CALL_EXPR_ARG (exp, 2);
1300   else
1301     arg2 = integer_three_node;
1302 
1303   /* Argument 0 is an address.  */
1304   op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1305 
1306   /* Argument 1 (read/write flag) must be a compile-time constant int.  */
1307   if (TREE_CODE (arg1) != INTEGER_CST)
1308     {
1309       error ("second argument to %<__builtin_prefetch%> must be a constant");
1310       arg1 = integer_zero_node;
1311     }
1312   op1 = expand_normal (arg1);
1313   /* Argument 1 must be either zero or one.  */
1314   if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1315     {
1316       warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1317 	       " using zero");
1318       op1 = const0_rtx;
1319     }
1320 
1321   /* Argument 2 (locality) must be a compile-time constant int.  */
1322   if (TREE_CODE (arg2) != INTEGER_CST)
1323     {
1324       error ("third argument to %<__builtin_prefetch%> must be a constant");
1325       arg2 = integer_zero_node;
1326     }
1327   op2 = expand_normal (arg2);
1328   /* Argument 2 must be 0, 1, 2, or 3.  */
1329   if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1330     {
1331       warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1332       op2 = const0_rtx;
1333     }
1334 
1335   if (targetm.have_prefetch ())
1336     {
1337       class expand_operand ops[3];
1338 
1339       create_address_operand (&ops[0], op0);
1340       create_integer_operand (&ops[1], INTVAL (op1));
1341       create_integer_operand (&ops[2], INTVAL (op2));
1342       if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1343 	return;
1344     }
1345 
1346   /* Don't do anything with direct references to volatile memory, but
1347      generate code to handle other side effects.  */
1348   if (!MEM_P (op0) && side_effects_p (op0))
1349     emit_insn (op0);
1350 }
1351 
1352 /* Get a MEM rtx for expression EXP which is the address of an operand
1353    to be used in a string instruction (cmpstrsi, cpymemsi, ..).  LEN is
1354    the maximum length of the block of memory that might be accessed or
1355    NULL if unknown.  */
1356 
1357 rtx
get_memory_rtx(tree exp,tree len)1358 get_memory_rtx (tree exp, tree len)
1359 {
1360   tree orig_exp = exp;
1361   rtx addr, mem;
1362 
1363   /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1364      from its expression, for expr->a.b only <variable>.a.b is recorded.  */
1365   if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1366     exp = TREE_OPERAND (exp, 0);
1367 
1368   addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1369   mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1370 
1371   /* Get an expression we can use to find the attributes to assign to MEM.
1372      First remove any nops.  */
1373   while (CONVERT_EXPR_P (exp)
1374 	 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1375     exp = TREE_OPERAND (exp, 0);
1376 
1377   /* Build a MEM_REF representing the whole accessed area as a byte blob,
1378      (as builtin stringops may alias with anything).  */
1379   exp = fold_build2 (MEM_REF,
1380 		     build_array_type (char_type_node,
1381 				       build_range_type (sizetype,
1382 							 size_one_node, len)),
1383 		     exp, build_int_cst (ptr_type_node, 0));
1384 
1385   /* If the MEM_REF has no acceptable address, try to get the base object
1386      from the original address we got, and build an all-aliasing
1387      unknown-sized access to that one.  */
1388   if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1389     set_mem_attributes (mem, exp, 0);
1390   else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1391 	   && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1392 						     0))))
1393     {
1394       exp = build_fold_addr_expr (exp);
1395       exp = fold_build2 (MEM_REF,
1396 			 build_array_type (char_type_node,
1397 					   build_range_type (sizetype,
1398 							     size_zero_node,
1399 							     NULL)),
1400 			 exp, build_int_cst (ptr_type_node, 0));
1401       set_mem_attributes (mem, exp, 0);
1402     }
1403   set_mem_alias_set (mem, 0);
1404   return mem;
1405 }
1406 
1407 /* Built-in functions to perform an untyped call and return.  */
1408 
1409 #define apply_args_mode \
1410   (this_target_builtins->x_apply_args_mode)
1411 #define apply_result_mode \
1412   (this_target_builtins->x_apply_result_mode)
1413 
1414 /* Return the size required for the block returned by __builtin_apply_args,
1415    and initialize apply_args_mode.  */
1416 
1417 static int
apply_args_size(void)1418 apply_args_size (void)
1419 {
1420   static int size = -1;
1421   int align;
1422   unsigned int regno;
1423 
1424   /* The values computed by this function never change.  */
1425   if (size < 0)
1426     {
1427       /* The first value is the incoming arg-pointer.  */
1428       size = GET_MODE_SIZE (Pmode);
1429 
1430       /* The second value is the structure value address unless this is
1431 	 passed as an "invisible" first argument.  */
1432       if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1433 	size += GET_MODE_SIZE (Pmode);
1434 
1435       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1436 	if (FUNCTION_ARG_REGNO_P (regno))
1437 	  {
1438 	    fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
1439 
1440 	    gcc_assert (mode != VOIDmode);
1441 
1442 	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1443 	    if (size % align != 0)
1444 	      size = CEIL (size, align) * align;
1445 	    size += GET_MODE_SIZE (mode);
1446 	    apply_args_mode[regno] = mode;
1447 	  }
1448 	else
1449 	  {
1450 	    apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1451 	  }
1452     }
1453   return size;
1454 }
1455 
1456 /* Return the size required for the block returned by __builtin_apply,
1457    and initialize apply_result_mode.  */
1458 
1459 static int
apply_result_size(void)1460 apply_result_size (void)
1461 {
1462   static int size = -1;
1463   int align, regno;
1464 
1465   /* The values computed by this function never change.  */
1466   if (size < 0)
1467     {
1468       size = 0;
1469 
1470       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1471 	if (targetm.calls.function_value_regno_p (regno))
1472 	  {
1473 	    fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
1474 
1475 	    gcc_assert (mode != VOIDmode);
1476 
1477 	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1478 	    if (size % align != 0)
1479 	      size = CEIL (size, align) * align;
1480 	    size += GET_MODE_SIZE (mode);
1481 	    apply_result_mode[regno] = mode;
1482 	  }
1483 	else
1484 	  apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
1485 
1486       /* Allow targets that use untyped_call and untyped_return to override
1487 	 the size so that machine-specific information can be stored here.  */
1488 #ifdef APPLY_RESULT_SIZE
1489       size = APPLY_RESULT_SIZE;
1490 #endif
1491     }
1492   return size;
1493 }
1494 
1495 /* Create a vector describing the result block RESULT.  If SAVEP is true,
1496    the result block is used to save the values; otherwise it is used to
1497    restore the values.  */
1498 
1499 static rtx
result_vector(int savep,rtx result)1500 result_vector (int savep, rtx result)
1501 {
1502   int regno, size, align, nelts;
1503   fixed_size_mode mode;
1504   rtx reg, mem;
1505   rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1506 
1507   size = nelts = 0;
1508   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1509     if ((mode = apply_result_mode[regno]) != VOIDmode)
1510       {
1511 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1512 	if (size % align != 0)
1513 	  size = CEIL (size, align) * align;
1514 	reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1515 	mem = adjust_address (result, mode, size);
1516 	savevec[nelts++] = (savep
1517 			    ? gen_rtx_SET (mem, reg)
1518 			    : gen_rtx_SET (reg, mem));
1519 	size += GET_MODE_SIZE (mode);
1520       }
1521   return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1522 }
1523 
1524 /* Save the state required to perform an untyped call with the same
1525    arguments as were passed to the current function.  */
1526 
1527 static rtx
expand_builtin_apply_args_1(void)1528 expand_builtin_apply_args_1 (void)
1529 {
1530   rtx registers, tem;
1531   int size, align, regno;
1532   fixed_size_mode mode;
1533   rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1534 
1535   /* Create a block where the arg-pointer, structure value address,
1536      and argument registers can be saved.  */
1537   registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1538 
1539   /* Walk past the arg-pointer and structure value address.  */
1540   size = GET_MODE_SIZE (Pmode);
1541   if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1542     size += GET_MODE_SIZE (Pmode);
1543 
1544   /* Save each register used in calling a function to the block.  */
1545   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1546     if ((mode = apply_args_mode[regno]) != VOIDmode)
1547       {
1548 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1549 	if (size % align != 0)
1550 	  size = CEIL (size, align) * align;
1551 
1552 	tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1553 
1554 	emit_move_insn (adjust_address (registers, mode, size), tem);
1555 	size += GET_MODE_SIZE (mode);
1556       }
1557 
1558   /* Save the arg pointer to the block.  */
1559   tem = copy_to_reg (crtl->args.internal_arg_pointer);
1560   /* We need the pointer as the caller actually passed them to us, not
1561      as we might have pretended they were passed.  Make sure it's a valid
1562      operand, as emit_move_insn isn't expected to handle a PLUS.  */
1563   if (STACK_GROWS_DOWNWARD)
1564     tem
1565       = force_operand (plus_constant (Pmode, tem,
1566 				      crtl->args.pretend_args_size),
1567 		       NULL_RTX);
1568   emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1569 
1570   size = GET_MODE_SIZE (Pmode);
1571 
1572   /* Save the structure value address unless this is passed as an
1573      "invisible" first argument.  */
1574   if (struct_incoming_value)
1575     emit_move_insn (adjust_address (registers, Pmode, size),
1576 		    copy_to_reg (struct_incoming_value));
1577 
1578   /* Return the address of the block.  */
1579   return copy_addr_to_reg (XEXP (registers, 0));
1580 }
1581 
1582 /* __builtin_apply_args returns block of memory allocated on
1583    the stack into which is stored the arg pointer, structure
1584    value address, static chain, and all the registers that might
1585    possibly be used in performing a function call.  The code is
1586    moved to the start of the function so the incoming values are
1587    saved.  */
1588 
1589 static rtx
expand_builtin_apply_args(void)1590 expand_builtin_apply_args (void)
1591 {
1592   /* Don't do __builtin_apply_args more than once in a function.
1593      Save the result of the first call and reuse it.  */
1594   if (apply_args_value != 0)
1595     return apply_args_value;
1596   {
1597     /* When this function is called, it means that registers must be
1598        saved on entry to this function.  So we migrate the
1599        call to the first insn of this function.  */
1600     rtx temp;
1601 
1602     start_sequence ();
1603     temp = expand_builtin_apply_args_1 ();
1604     rtx_insn *seq = get_insns ();
1605     end_sequence ();
1606 
1607     apply_args_value = temp;
1608 
1609     /* Put the insns after the NOTE that starts the function.
1610        If this is inside a start_sequence, make the outer-level insn
1611        chain current, so the code is placed at the start of the
1612        function.  If internal_arg_pointer is a non-virtual pseudo,
1613        it needs to be placed after the function that initializes
1614        that pseudo.  */
1615     push_topmost_sequence ();
1616     if (REG_P (crtl->args.internal_arg_pointer)
1617 	&& REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1618       emit_insn_before (seq, parm_birth_insn);
1619     else
1620       emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1621     pop_topmost_sequence ();
1622     return temp;
1623   }
1624 }
1625 
1626 /* Perform an untyped call and save the state required to perform an
1627    untyped return of whatever value was returned by the given function.  */
1628 
1629 static rtx
expand_builtin_apply(rtx function,rtx arguments,rtx argsize)1630 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1631 {
1632   int size, align, regno;
1633   fixed_size_mode mode;
1634   rtx incoming_args, result, reg, dest, src;
1635   rtx_call_insn *call_insn;
1636   rtx old_stack_level = 0;
1637   rtx call_fusage = 0;
1638   rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1639 
1640   arguments = convert_memory_address (Pmode, arguments);
1641 
1642   /* Create a block where the return registers can be saved.  */
1643   result = assign_stack_local (BLKmode, apply_result_size (), -1);
1644 
1645   /* Fetch the arg pointer from the ARGUMENTS block.  */
1646   incoming_args = gen_reg_rtx (Pmode);
1647   emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1648   if (!STACK_GROWS_DOWNWARD)
1649     incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1650 					 incoming_args, 0, OPTAB_LIB_WIDEN);
1651 
1652   /* Push a new argument block and copy the arguments.  Do not allow
1653      the (potential) memcpy call below to interfere with our stack
1654      manipulations.  */
1655   do_pending_stack_adjust ();
1656   NO_DEFER_POP;
1657 
1658   /* Save the stack with nonlocal if available.  */
1659   if (targetm.have_save_stack_nonlocal ())
1660     emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1661   else
1662     emit_stack_save (SAVE_BLOCK, &old_stack_level);
1663 
1664   /* Allocate a block of memory onto the stack and copy the memory
1665      arguments to the outgoing arguments address.  We can pass TRUE
1666      as the 4th argument because we just saved the stack pointer
1667      and will restore it right after the call.  */
1668   allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
1669 
1670   /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1671      may have already set current_function_calls_alloca to true.
1672      current_function_calls_alloca won't be set if argsize is zero,
1673      so we have to guarantee need_drap is true here.  */
1674   if (SUPPORTS_STACK_ALIGNMENT)
1675     crtl->need_drap = true;
1676 
1677   dest = virtual_outgoing_args_rtx;
1678   if (!STACK_GROWS_DOWNWARD)
1679     {
1680       if (CONST_INT_P (argsize))
1681 	dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1682       else
1683 	dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1684     }
1685   dest = gen_rtx_MEM (BLKmode, dest);
1686   set_mem_align (dest, PARM_BOUNDARY);
1687   src = gen_rtx_MEM (BLKmode, incoming_args);
1688   set_mem_align (src, PARM_BOUNDARY);
1689   emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1690 
1691   /* Refer to the argument block.  */
1692   apply_args_size ();
1693   arguments = gen_rtx_MEM (BLKmode, arguments);
1694   set_mem_align (arguments, PARM_BOUNDARY);
1695 
1696   /* Walk past the arg-pointer and structure value address.  */
1697   size = GET_MODE_SIZE (Pmode);
1698   if (struct_value)
1699     size += GET_MODE_SIZE (Pmode);
1700 
1701   /* Restore each of the registers previously saved.  Make USE insns
1702      for each of these registers for use in making the call.  */
1703   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1704     if ((mode = apply_args_mode[regno]) != VOIDmode)
1705       {
1706 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1707 	if (size % align != 0)
1708 	  size = CEIL (size, align) * align;
1709 	reg = gen_rtx_REG (mode, regno);
1710 	emit_move_insn (reg, adjust_address (arguments, mode, size));
1711 	use_reg (&call_fusage, reg);
1712 	size += GET_MODE_SIZE (mode);
1713       }
1714 
1715   /* Restore the structure value address unless this is passed as an
1716      "invisible" first argument.  */
1717   size = GET_MODE_SIZE (Pmode);
1718   if (struct_value)
1719     {
1720       rtx value = gen_reg_rtx (Pmode);
1721       emit_move_insn (value, adjust_address (arguments, Pmode, size));
1722       emit_move_insn (struct_value, value);
1723       if (REG_P (struct_value))
1724 	use_reg (&call_fusage, struct_value);
1725     }
1726 
1727   /* All arguments and registers used for the call are set up by now!  */
1728   function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1729 
1730   /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
1731      and we don't want to load it into a register as an optimization,
1732      because prepare_call_address already did it if it should be done.  */
1733   if (GET_CODE (function) != SYMBOL_REF)
1734     function = memory_address (FUNCTION_MODE, function);
1735 
1736   /* Generate the actual call instruction and save the return value.  */
1737   if (targetm.have_untyped_call ())
1738     {
1739       rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1740       rtx_insn *seq = targetm.gen_untyped_call (mem, result,
1741 						result_vector (1, result));
1742       for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
1743 	if (CALL_P (insn))
1744 	  add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
1745       emit_insn (seq);
1746     }
1747   else if (targetm.have_call_value ())
1748     {
1749       rtx valreg = 0;
1750 
1751       /* Locate the unique return register.  It is not possible to
1752 	 express a call that sets more than one return register using
1753 	 call_value; use untyped_call for that.  In fact, untyped_call
1754 	 only needs to save the return registers in the given block.  */
1755       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1756 	if ((mode = apply_result_mode[regno]) != VOIDmode)
1757 	  {
1758 	    gcc_assert (!valreg); /* have_untyped_call required.  */
1759 
1760 	    valreg = gen_rtx_REG (mode, regno);
1761 	  }
1762 
1763       emit_insn (targetm.gen_call_value (valreg,
1764 					 gen_rtx_MEM (FUNCTION_MODE, function),
1765 					 const0_rtx, NULL_RTX, const0_rtx));
1766 
1767       emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1768     }
1769   else
1770     gcc_unreachable ();
1771 
1772   /* Find the CALL insn we just emitted, and attach the register usage
1773      information.  */
1774   call_insn = last_call_insn ();
1775   add_function_usage_to (call_insn, call_fusage);
1776 
1777   /* Restore the stack.  */
1778   if (targetm.have_save_stack_nonlocal ())
1779     emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1780   else
1781     emit_stack_restore (SAVE_BLOCK, old_stack_level);
1782   fixup_args_size_notes (call_insn, get_last_insn (), 0);
1783 
1784   OK_DEFER_POP;
1785 
1786   /* Return the address of the result block.  */
1787   result = copy_addr_to_reg (XEXP (result, 0));
1788   return convert_memory_address (ptr_mode, result);
1789 }
1790 
1791 /* Perform an untyped return.  */
1792 
1793 static void
expand_builtin_return(rtx result)1794 expand_builtin_return (rtx result)
1795 {
1796   int size, align, regno;
1797   fixed_size_mode mode;
1798   rtx reg;
1799   rtx_insn *call_fusage = 0;
1800 
1801   result = convert_memory_address (Pmode, result);
1802 
1803   apply_result_size ();
1804   result = gen_rtx_MEM (BLKmode, result);
1805 
1806   if (targetm.have_untyped_return ())
1807     {
1808       rtx vector = result_vector (0, result);
1809       emit_jump_insn (targetm.gen_untyped_return (result, vector));
1810       emit_barrier ();
1811       return;
1812     }
1813 
1814   /* Restore the return value and note that each value is used.  */
1815   size = 0;
1816   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1817     if ((mode = apply_result_mode[regno]) != VOIDmode)
1818       {
1819 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1820 	if (size % align != 0)
1821 	  size = CEIL (size, align) * align;
1822 	reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1823 	emit_move_insn (reg, adjust_address (result, mode, size));
1824 
1825 	push_to_sequence (call_fusage);
1826 	emit_use (reg);
1827 	call_fusage = get_insns ();
1828 	end_sequence ();
1829 	size += GET_MODE_SIZE (mode);
1830       }
1831 
1832   /* Put the USE insns before the return.  */
1833   emit_insn (call_fusage);
1834 
1835   /* Return whatever values was restored by jumping directly to the end
1836      of the function.  */
1837   expand_naked_return ();
1838 }
1839 
1840 /* Used by expand_builtin_classify_type and fold_builtin_classify_type.  */
1841 
1842 static enum type_class
type_to_class(tree type)1843 type_to_class (tree type)
1844 {
1845   switch (TREE_CODE (type))
1846     {
1847     case VOID_TYPE:	   return void_type_class;
1848     case INTEGER_TYPE:	   return integer_type_class;
1849     case ENUMERAL_TYPE:	   return enumeral_type_class;
1850     case BOOLEAN_TYPE:	   return boolean_type_class;
1851     case POINTER_TYPE:	   return pointer_type_class;
1852     case REFERENCE_TYPE:   return reference_type_class;
1853     case OFFSET_TYPE:	   return offset_type_class;
1854     case REAL_TYPE:	   return real_type_class;
1855     case COMPLEX_TYPE:	   return complex_type_class;
1856     case FUNCTION_TYPE:	   return function_type_class;
1857     case METHOD_TYPE:	   return method_type_class;
1858     case RECORD_TYPE:	   return record_type_class;
1859     case UNION_TYPE:
1860     case QUAL_UNION_TYPE:  return union_type_class;
1861     case ARRAY_TYPE:	   return (TYPE_STRING_FLAG (type)
1862 				   ? string_type_class : array_type_class);
1863     case LANG_TYPE:	   return lang_type_class;
1864     case OPAQUE_TYPE:      return opaque_type_class;
1865     default:		   return no_type_class;
1866     }
1867 }
1868 
1869 /* Expand a call EXP to __builtin_classify_type.  */
1870 
1871 static rtx
expand_builtin_classify_type(tree exp)1872 expand_builtin_classify_type (tree exp)
1873 {
1874   if (call_expr_nargs (exp))
1875     return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1876   return GEN_INT (no_type_class);
1877 }
1878 
1879 /* This helper macro, meant to be used in mathfn_built_in below, determines
1880    which among a set of builtin math functions is appropriate for a given type
1881    mode.  The `F' (float) and `L' (long double) are automatically generated
1882    from the 'double' case.  If a function supports the _Float<N> and _Float<N>X
1883    types, there are additional types that are considered with 'F32', 'F64',
1884    'F128', etc. suffixes.  */
1885 #define CASE_MATHFN(MATHFN) \
1886   CASE_CFN_##MATHFN: \
1887   fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1888   fcodel = BUILT_IN_##MATHFN##L ; break;
1889 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
1890    types.  */
1891 #define CASE_MATHFN_FLOATN(MATHFN) \
1892   CASE_CFN_##MATHFN: \
1893   fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1894   fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
1895   fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
1896   fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
1897   fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
1898   break;
1899 /* Similar to above, but appends _R after any F/L suffix.  */
1900 #define CASE_MATHFN_REENT(MATHFN) \
1901   case CFN_BUILT_IN_##MATHFN##_R: \
1902   case CFN_BUILT_IN_##MATHFN##F_R: \
1903   case CFN_BUILT_IN_##MATHFN##L_R: \
1904   fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1905   fcodel = BUILT_IN_##MATHFN##L_R ; break;
1906 
1907 /* Return a function equivalent to FN but operating on floating-point
1908    values of type TYPE, or END_BUILTINS if no such function exists.
1909    This is purely an operation on function codes; it does not guarantee
1910    that the target actually has an implementation of the function.  */
1911 
1912 static built_in_function
mathfn_built_in_2(tree type,combined_fn fn)1913 mathfn_built_in_2 (tree type, combined_fn fn)
1914 {
1915   tree mtype;
1916   built_in_function fcode, fcodef, fcodel;
1917   built_in_function fcodef16 = END_BUILTINS;
1918   built_in_function fcodef32 = END_BUILTINS;
1919   built_in_function fcodef64 = END_BUILTINS;
1920   built_in_function fcodef128 = END_BUILTINS;
1921   built_in_function fcodef32x = END_BUILTINS;
1922   built_in_function fcodef64x = END_BUILTINS;
1923   built_in_function fcodef128x = END_BUILTINS;
1924 
1925   switch (fn)
1926     {
1927 #define SEQ_OF_CASE_MATHFN			\
1928     CASE_MATHFN (ACOS)				\
1929     CASE_MATHFN (ACOSH)				\
1930     CASE_MATHFN (ASIN)				\
1931     CASE_MATHFN (ASINH)				\
1932     CASE_MATHFN (ATAN)				\
1933     CASE_MATHFN (ATAN2)				\
1934     CASE_MATHFN (ATANH)				\
1935     CASE_MATHFN (CBRT)				\
1936     CASE_MATHFN_FLOATN (CEIL)			\
1937     CASE_MATHFN (CEXPI)				\
1938     CASE_MATHFN_FLOATN (COPYSIGN)		\
1939     CASE_MATHFN (COS)				\
1940     CASE_MATHFN (COSH)				\
1941     CASE_MATHFN (DREM)				\
1942     CASE_MATHFN (ERF)				\
1943     CASE_MATHFN (ERFC)				\
1944     CASE_MATHFN (EXP)				\
1945     CASE_MATHFN (EXP10)				\
1946     CASE_MATHFN (EXP2)				\
1947     CASE_MATHFN (EXPM1)				\
1948     CASE_MATHFN (FABS)				\
1949     CASE_MATHFN (FDIM)				\
1950     CASE_MATHFN_FLOATN (FLOOR)			\
1951     CASE_MATHFN_FLOATN (FMA)			\
1952     CASE_MATHFN_FLOATN (FMAX)			\
1953     CASE_MATHFN_FLOATN (FMIN)			\
1954     CASE_MATHFN (FMOD)				\
1955     CASE_MATHFN (FREXP)				\
1956     CASE_MATHFN (GAMMA)				\
1957     CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */	\
1958     CASE_MATHFN (HUGE_VAL)			\
1959     CASE_MATHFN (HYPOT)				\
1960     CASE_MATHFN (ILOGB)				\
1961     CASE_MATHFN (ICEIL)				\
1962     CASE_MATHFN (IFLOOR)			\
1963     CASE_MATHFN (INF)				\
1964     CASE_MATHFN (IRINT)				\
1965     CASE_MATHFN (IROUND)			\
1966     CASE_MATHFN (ISINF)				\
1967     CASE_MATHFN (J0)				\
1968     CASE_MATHFN (J1)				\
1969     CASE_MATHFN (JN)				\
1970     CASE_MATHFN (LCEIL)				\
1971     CASE_MATHFN (LDEXP)				\
1972     CASE_MATHFN (LFLOOR)			\
1973     CASE_MATHFN (LGAMMA)			\
1974     CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */	\
1975     CASE_MATHFN (LLCEIL)			\
1976     CASE_MATHFN (LLFLOOR)			\
1977     CASE_MATHFN (LLRINT)			\
1978     CASE_MATHFN (LLROUND)			\
1979     CASE_MATHFN (LOG)				\
1980     CASE_MATHFN (LOG10)				\
1981     CASE_MATHFN (LOG1P)				\
1982     CASE_MATHFN (LOG2)				\
1983     CASE_MATHFN (LOGB)				\
1984     CASE_MATHFN (LRINT)				\
1985     CASE_MATHFN (LROUND)			\
1986     CASE_MATHFN (MODF)				\
1987     CASE_MATHFN (NAN)				\
1988     CASE_MATHFN (NANS)				\
1989     CASE_MATHFN_FLOATN (NEARBYINT)		\
1990     CASE_MATHFN (NEXTAFTER)			\
1991     CASE_MATHFN (NEXTTOWARD)			\
1992     CASE_MATHFN (POW)				\
1993     CASE_MATHFN (POWI)				\
1994     CASE_MATHFN (POW10)				\
1995     CASE_MATHFN (REMAINDER)			\
1996     CASE_MATHFN (REMQUO)			\
1997     CASE_MATHFN_FLOATN (RINT)			\
1998     CASE_MATHFN_FLOATN (ROUND)			\
1999     CASE_MATHFN_FLOATN (ROUNDEVEN)		\
2000     CASE_MATHFN (SCALB)				\
2001     CASE_MATHFN (SCALBLN)			\
2002     CASE_MATHFN (SCALBN)			\
2003     CASE_MATHFN (SIGNBIT)			\
2004     CASE_MATHFN (SIGNIFICAND)			\
2005     CASE_MATHFN (SIN)				\
2006     CASE_MATHFN (SINCOS)			\
2007     CASE_MATHFN (SINH)				\
2008     CASE_MATHFN_FLOATN (SQRT)			\
2009     CASE_MATHFN (TAN)				\
2010     CASE_MATHFN (TANH)				\
2011     CASE_MATHFN (TGAMMA)			\
2012     CASE_MATHFN_FLOATN (TRUNC)			\
2013     CASE_MATHFN (Y0)				\
2014     CASE_MATHFN (Y1)				\
2015     CASE_MATHFN (YN)
2016 
2017     SEQ_OF_CASE_MATHFN
2018 
2019     default:
2020       return END_BUILTINS;
2021     }
2022 
2023   mtype = TYPE_MAIN_VARIANT (type);
2024   if (mtype == double_type_node)
2025     return fcode;
2026   else if (mtype == float_type_node)
2027     return fcodef;
2028   else if (mtype == long_double_type_node)
2029     return fcodel;
2030   else if (mtype == float16_type_node)
2031     return fcodef16;
2032   else if (mtype == float32_type_node)
2033     return fcodef32;
2034   else if (mtype == float64_type_node)
2035     return fcodef64;
2036   else if (mtype == float128_type_node)
2037     return fcodef128;
2038   else if (mtype == float32x_type_node)
2039     return fcodef32x;
2040   else if (mtype == float64x_type_node)
2041     return fcodef64x;
2042   else if (mtype == float128x_type_node)
2043     return fcodef128x;
2044   else
2045     return END_BUILTINS;
2046 }
2047 
2048 #undef CASE_MATHFN
2049 #undef CASE_MATHFN_FLOATN
2050 #undef CASE_MATHFN_REENT
2051 
2052 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2053    if available.  If IMPLICIT_P is true use the implicit builtin declaration,
2054    otherwise use the explicit declaration.  If we can't do the conversion,
2055    return null.  */
2056 
2057 static tree
mathfn_built_in_1(tree type,combined_fn fn,bool implicit_p)2058 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2059 {
2060   built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2061   if (fcode2 == END_BUILTINS)
2062     return NULL_TREE;
2063 
2064   if (implicit_p && !builtin_decl_implicit_p (fcode2))
2065     return NULL_TREE;
2066 
2067   return builtin_decl_explicit (fcode2);
2068 }
2069 
2070 /* Like mathfn_built_in_1, but always use the implicit array.  */
2071 
2072 tree
mathfn_built_in(tree type,combined_fn fn)2073 mathfn_built_in (tree type, combined_fn fn)
2074 {
2075   return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2076 }
2077 
2078 /* Like mathfn_built_in_1, but take a built_in_function and
2079    always use the implicit array.  */
2080 
2081 tree
mathfn_built_in(tree type,enum built_in_function fn)2082 mathfn_built_in (tree type, enum built_in_function fn)
2083 {
2084   return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2085 }
2086 
2087 /* Return the type associated with a built in function, i.e., the one
2088    to be passed to mathfn_built_in to get the type-specific
2089    function.  */
2090 
2091 tree
mathfn_built_in_type(combined_fn fn)2092 mathfn_built_in_type (combined_fn fn)
2093 {
2094 #define CASE_MATHFN(MATHFN)			\
2095   case CFN_BUILT_IN_##MATHFN:			\
2096     return double_type_node;			\
2097   case CFN_BUILT_IN_##MATHFN##F:		\
2098     return float_type_node;			\
2099   case CFN_BUILT_IN_##MATHFN##L:		\
2100     return long_double_type_node;
2101 
2102 #define CASE_MATHFN_FLOATN(MATHFN)		\
2103   CASE_MATHFN(MATHFN)				\
2104   case CFN_BUILT_IN_##MATHFN##F16:		\
2105     return float16_type_node;			\
2106   case CFN_BUILT_IN_##MATHFN##F32:		\
2107     return float32_type_node;			\
2108   case CFN_BUILT_IN_##MATHFN##F64:		\
2109     return float64_type_node;			\
2110   case CFN_BUILT_IN_##MATHFN##F128:		\
2111     return float128_type_node;			\
2112   case CFN_BUILT_IN_##MATHFN##F32X:		\
2113     return float32x_type_node;			\
2114   case CFN_BUILT_IN_##MATHFN##F64X:		\
2115     return float64x_type_node;			\
2116   case CFN_BUILT_IN_##MATHFN##F128X:		\
2117     return float128x_type_node;
2118 
2119 /* Similar to above, but appends _R after any F/L suffix.  */
2120 #define CASE_MATHFN_REENT(MATHFN) \
2121   case CFN_BUILT_IN_##MATHFN##_R:		\
2122     return double_type_node;			\
2123   case CFN_BUILT_IN_##MATHFN##F_R:		\
2124     return float_type_node;			\
2125   case CFN_BUILT_IN_##MATHFN##L_R:		\
2126     return long_double_type_node;
2127 
2128   switch (fn)
2129     {
2130     SEQ_OF_CASE_MATHFN
2131 
2132     default:
2133       return NULL_TREE;
2134     }
2135 
2136 #undef CASE_MATHFN
2137 #undef CASE_MATHFN_FLOATN
2138 #undef CASE_MATHFN_REENT
2139 #undef SEQ_OF_CASE_MATHFN
2140 }
2141 
2142 /* Check whether there is an internal function associated with function FN
2143    and return type RETURN_TYPE.  Return the function if so, otherwise return
2144    IFN_LAST.
2145 
2146    Note that this function only tests whether the function is defined in
2147    internals.def, not whether it is actually available on the target.  */
2148 
2149 static internal_fn
associated_internal_fn(built_in_function fn,tree return_type)2150 associated_internal_fn (built_in_function fn, tree return_type)
2151 {
2152   switch (fn)
2153     {
2154 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2155     CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2156 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2157     CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2158     CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2159 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2160     CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2161 #include "internal-fn.def"
2162 
2163     CASE_FLT_FN (BUILT_IN_POW10):
2164       return IFN_EXP10;
2165 
2166     CASE_FLT_FN (BUILT_IN_DREM):
2167       return IFN_REMAINDER;
2168 
2169     CASE_FLT_FN (BUILT_IN_SCALBN):
2170     CASE_FLT_FN (BUILT_IN_SCALBLN):
2171       if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2172 	return IFN_LDEXP;
2173       return IFN_LAST;
2174 
2175     default:
2176       return IFN_LAST;
2177     }
2178 }
2179 
2180 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2181    return its code, otherwise return IFN_LAST.  Note that this function
2182    only tests whether the function is defined in internals.def, not whether
2183    it is actually available on the target.  */
2184 
2185 internal_fn
associated_internal_fn(tree fndecl)2186 associated_internal_fn (tree fndecl)
2187 {
2188   gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2189   return associated_internal_fn (DECL_FUNCTION_CODE (fndecl),
2190 				 TREE_TYPE (TREE_TYPE (fndecl)));
2191 }
2192 
2193 /* Check whether there is an internal function associated with function CFN
2194    and return type RETURN_TYPE.  Return the function if so, otherwise return
2195    IFN_LAST.
2196 
2197    Note that this function only tests whether the function is defined in
2198    internals.def, not whether it is actually available on the target.  */
2199 
2200 internal_fn
associated_internal_fn(combined_fn cfn,tree return_type)2201 associated_internal_fn (combined_fn cfn, tree return_type)
2202 {
2203   if (internal_fn_p (cfn))
2204     return as_internal_fn (cfn);
2205   return associated_internal_fn (as_builtin_fn (cfn), return_type);
2206 }
2207 
2208 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2209    on the current target by a call to an internal function, return the
2210    code of that internal function, otherwise return IFN_LAST.  The caller
2211    is responsible for ensuring that any side-effects of the built-in
2212    call are dealt with correctly.  E.g. if CALL sets errno, the caller
2213    must decide that the errno result isn't needed or make it available
2214    in some other way.  */
2215 
2216 internal_fn
replacement_internal_fn(gcall * call)2217 replacement_internal_fn (gcall *call)
2218 {
2219   if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2220     {
2221       internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2222       if (ifn != IFN_LAST)
2223 	{
2224 	  tree_pair types = direct_internal_fn_types (ifn, call);
2225 	  optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2226 	  if (direct_internal_fn_supported_p (ifn, types, opt_type))
2227 	    return ifn;
2228 	}
2229     }
2230   return IFN_LAST;
2231 }
2232 
2233 /* Expand a call to the builtin trinary math functions (fma).
2234    Return NULL_RTX if a normal call should be emitted rather than expanding the
2235    function in-line.  EXP is the expression that is a call to the builtin
2236    function; if convenient, the result should be placed in TARGET.
2237    SUBTARGET may be used as the target for computing one of EXP's
2238    operands.  */
2239 
2240 static rtx
expand_builtin_mathfn_ternary(tree exp,rtx target,rtx subtarget)2241 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2242 {
2243   optab builtin_optab;
2244   rtx op0, op1, op2, result;
2245   rtx_insn *insns;
2246   tree fndecl = get_callee_fndecl (exp);
2247   tree arg0, arg1, arg2;
2248   machine_mode mode;
2249 
2250   if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2251     return NULL_RTX;
2252 
2253   arg0 = CALL_EXPR_ARG (exp, 0);
2254   arg1 = CALL_EXPR_ARG (exp, 1);
2255   arg2 = CALL_EXPR_ARG (exp, 2);
2256 
2257   switch (DECL_FUNCTION_CODE (fndecl))
2258     {
2259     CASE_FLT_FN (BUILT_IN_FMA):
2260     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2261       builtin_optab = fma_optab; break;
2262     default:
2263       gcc_unreachable ();
2264     }
2265 
2266   /* Make a suitable register to place result in.  */
2267   mode = TYPE_MODE (TREE_TYPE (exp));
2268 
2269   /* Before working hard, check whether the instruction is available.  */
2270   if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2271     return NULL_RTX;
2272 
2273   result = gen_reg_rtx (mode);
2274 
2275   /* Always stabilize the argument list.  */
2276   CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2277   CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2278   CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2279 
2280   op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2281   op1 = expand_normal (arg1);
2282   op2 = expand_normal (arg2);
2283 
2284   start_sequence ();
2285 
2286   /* Compute into RESULT.
2287      Set RESULT to wherever the result comes back.  */
2288   result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2289 			      result, 0);
2290 
2291   /* If we were unable to expand via the builtin, stop the sequence
2292      (without outputting the insns) and call to the library function
2293      with the stabilized argument list.  */
2294   if (result == 0)
2295     {
2296       end_sequence ();
2297       return expand_call (exp, target, target == const0_rtx);
2298     }
2299 
2300   /* Output the entire sequence.  */
2301   insns = get_insns ();
2302   end_sequence ();
2303   emit_insn (insns);
2304 
2305   return result;
2306 }
2307 
2308 /* Expand a call to the builtin sin and cos math functions.
2309    Return NULL_RTX if a normal call should be emitted rather than expanding the
2310    function in-line.  EXP is the expression that is a call to the builtin
2311    function; if convenient, the result should be placed in TARGET.
2312    SUBTARGET may be used as the target for computing one of EXP's
2313    operands.  */
2314 
2315 static rtx
expand_builtin_mathfn_3(tree exp,rtx target,rtx subtarget)2316 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2317 {
2318   optab builtin_optab;
2319   rtx op0;
2320   rtx_insn *insns;
2321   tree fndecl = get_callee_fndecl (exp);
2322   machine_mode mode;
2323   tree arg;
2324 
2325   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2326     return NULL_RTX;
2327 
2328   arg = CALL_EXPR_ARG (exp, 0);
2329 
2330   switch (DECL_FUNCTION_CODE (fndecl))
2331     {
2332     CASE_FLT_FN (BUILT_IN_SIN):
2333     CASE_FLT_FN (BUILT_IN_COS):
2334       builtin_optab = sincos_optab; break;
2335     default:
2336       gcc_unreachable ();
2337     }
2338 
2339   /* Make a suitable register to place result in.  */
2340   mode = TYPE_MODE (TREE_TYPE (exp));
2341 
2342   /* Check if sincos insn is available, otherwise fallback
2343      to sin or cos insn.  */
2344   if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2345     switch (DECL_FUNCTION_CODE (fndecl))
2346       {
2347       CASE_FLT_FN (BUILT_IN_SIN):
2348 	builtin_optab = sin_optab; break;
2349       CASE_FLT_FN (BUILT_IN_COS):
2350 	builtin_optab = cos_optab; break;
2351       default:
2352 	gcc_unreachable ();
2353       }
2354 
2355   /* Before working hard, check whether the instruction is available.  */
2356   if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2357     {
2358       rtx result = gen_reg_rtx (mode);
2359 
2360       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2361 	 need to expand the argument again.  This way, we will not perform
2362 	 side-effects more the once.  */
2363       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2364 
2365       op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2366 
2367       start_sequence ();
2368 
2369       /* Compute into RESULT.
2370 	 Set RESULT to wherever the result comes back.  */
2371       if (builtin_optab == sincos_optab)
2372 	{
2373 	  int ok;
2374 
2375 	  switch (DECL_FUNCTION_CODE (fndecl))
2376 	    {
2377 	    CASE_FLT_FN (BUILT_IN_SIN):
2378 	      ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2379 	      break;
2380 	    CASE_FLT_FN (BUILT_IN_COS):
2381 	      ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2382 	      break;
2383 	    default:
2384 	      gcc_unreachable ();
2385 	    }
2386 	  gcc_assert (ok);
2387 	}
2388       else
2389 	result = expand_unop (mode, builtin_optab, op0, result, 0);
2390 
2391       if (result != 0)
2392 	{
2393 	  /* Output the entire sequence.  */
2394 	  insns = get_insns ();
2395 	  end_sequence ();
2396 	  emit_insn (insns);
2397 	  return result;
2398 	}
2399 
2400       /* If we were unable to expand via the builtin, stop the sequence
2401 	 (without outputting the insns) and call to the library function
2402 	 with the stabilized argument list.  */
2403       end_sequence ();
2404     }
2405 
2406   return expand_call (exp, target, target == const0_rtx);
2407 }
2408 
2409 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2410    return an RTL instruction code that implements the functionality.
2411    If that isn't possible or available return CODE_FOR_nothing.  */
2412 
2413 static enum insn_code
interclass_mathfn_icode(tree arg,tree fndecl)2414 interclass_mathfn_icode (tree arg, tree fndecl)
2415 {
2416   bool errno_set = false;
2417   optab builtin_optab = unknown_optab;
2418   machine_mode mode;
2419 
2420   switch (DECL_FUNCTION_CODE (fndecl))
2421     {
2422     CASE_FLT_FN (BUILT_IN_ILOGB):
2423       errno_set = true; builtin_optab = ilogb_optab; break;
2424     CASE_FLT_FN (BUILT_IN_ISINF):
2425       builtin_optab = isinf_optab; break;
2426     case BUILT_IN_ISNORMAL:
2427     case BUILT_IN_ISFINITE:
2428     CASE_FLT_FN (BUILT_IN_FINITE):
2429     case BUILT_IN_FINITED32:
2430     case BUILT_IN_FINITED64:
2431     case BUILT_IN_FINITED128:
2432     case BUILT_IN_ISINFD32:
2433     case BUILT_IN_ISINFD64:
2434     case BUILT_IN_ISINFD128:
2435       /* These builtins have no optabs (yet).  */
2436       break;
2437     default:
2438       gcc_unreachable ();
2439     }
2440 
2441   /* There's no easy way to detect the case we need to set EDOM.  */
2442   if (flag_errno_math && errno_set)
2443     return CODE_FOR_nothing;
2444 
2445   /* Optab mode depends on the mode of the input argument.  */
2446   mode = TYPE_MODE (TREE_TYPE (arg));
2447 
2448   if (builtin_optab)
2449     return optab_handler (builtin_optab, mode);
2450   return CODE_FOR_nothing;
2451 }
2452 
2453 /* Expand a call to one of the builtin math functions that operate on
2454    floating point argument and output an integer result (ilogb, isinf,
2455    isnan, etc).
2456    Return 0 if a normal call should be emitted rather than expanding the
2457    function in-line.  EXP is the expression that is a call to the builtin
2458    function; if convenient, the result should be placed in TARGET.  */
2459 
2460 static rtx
expand_builtin_interclass_mathfn(tree exp,rtx target)2461 expand_builtin_interclass_mathfn (tree exp, rtx target)
2462 {
2463   enum insn_code icode = CODE_FOR_nothing;
2464   rtx op0;
2465   tree fndecl = get_callee_fndecl (exp);
2466   machine_mode mode;
2467   tree arg;
2468 
2469   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2470     return NULL_RTX;
2471 
2472   arg = CALL_EXPR_ARG (exp, 0);
2473   icode = interclass_mathfn_icode (arg, fndecl);
2474   mode = TYPE_MODE (TREE_TYPE (arg));
2475 
2476   if (icode != CODE_FOR_nothing)
2477     {
2478       class expand_operand ops[1];
2479       rtx_insn *last = get_last_insn ();
2480       tree orig_arg = arg;
2481 
2482       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2483 	 need to expand the argument again.  This way, we will not perform
2484 	 side-effects more the once.  */
2485       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2486 
2487       op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2488 
2489       if (mode != GET_MODE (op0))
2490 	op0 = convert_to_mode (mode, op0, 0);
2491 
2492       create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2493       if (maybe_legitimize_operands (icode, 0, 1, ops)
2494 	  && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2495 	return ops[0].value;
2496 
2497       delete_insns_since (last);
2498       CALL_EXPR_ARG (exp, 0) = orig_arg;
2499     }
2500 
2501   return NULL_RTX;
2502 }
2503 
2504 /* Expand a call to the builtin sincos math function.
2505    Return NULL_RTX if a normal call should be emitted rather than expanding the
2506    function in-line.  EXP is the expression that is a call to the builtin
2507    function.  */
2508 
2509 static rtx
expand_builtin_sincos(tree exp)2510 expand_builtin_sincos (tree exp)
2511 {
2512   rtx op0, op1, op2, target1, target2;
2513   machine_mode mode;
2514   tree arg, sinp, cosp;
2515   int result;
2516   location_t loc = EXPR_LOCATION (exp);
2517   tree alias_type, alias_off;
2518 
2519   if (!validate_arglist (exp, REAL_TYPE,
2520  			 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2521     return NULL_RTX;
2522 
2523   arg = CALL_EXPR_ARG (exp, 0);
2524   sinp = CALL_EXPR_ARG (exp, 1);
2525   cosp = CALL_EXPR_ARG (exp, 2);
2526 
2527   /* Make a suitable register to place result in.  */
2528   mode = TYPE_MODE (TREE_TYPE (arg));
2529 
2530   /* Check if sincos insn is available, otherwise emit the call.  */
2531   if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2532     return NULL_RTX;
2533 
2534   target1 = gen_reg_rtx (mode);
2535   target2 = gen_reg_rtx (mode);
2536 
2537   op0 = expand_normal (arg);
2538   alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2539   alias_off = build_int_cst (alias_type, 0);
2540   op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2541 					sinp, alias_off));
2542   op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2543 					cosp, alias_off));
2544 
2545   /* Compute into target1 and target2.
2546      Set TARGET to wherever the result comes back.  */
2547   result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2548   gcc_assert (result);
2549 
2550   /* Move target1 and target2 to the memory locations indicated
2551      by op1 and op2.  */
2552   emit_move_insn (op1, target1);
2553   emit_move_insn (op2, target2);
2554 
2555   return const0_rtx;
2556 }
2557 
2558 /* Expand a call to the internal cexpi builtin to the sincos math function.
2559    EXP is the expression that is a call to the builtin function; if convenient,
2560    the result should be placed in TARGET.  */
2561 
2562 static rtx
expand_builtin_cexpi(tree exp,rtx target)2563 expand_builtin_cexpi (tree exp, rtx target)
2564 {
2565   tree fndecl = get_callee_fndecl (exp);
2566   tree arg, type;
2567   machine_mode mode;
2568   rtx op0, op1, op2;
2569   location_t loc = EXPR_LOCATION (exp);
2570 
2571   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2572     return NULL_RTX;
2573 
2574   arg = CALL_EXPR_ARG (exp, 0);
2575   type = TREE_TYPE (arg);
2576   mode = TYPE_MODE (TREE_TYPE (arg));
2577 
2578   /* Try expanding via a sincos optab, fall back to emitting a libcall
2579      to sincos or cexp.  We are sure we have sincos or cexp because cexpi
2580      is only generated from sincos, cexp or if we have either of them.  */
2581   if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2582     {
2583       op1 = gen_reg_rtx (mode);
2584       op2 = gen_reg_rtx (mode);
2585 
2586       op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2587 
2588       /* Compute into op1 and op2.  */
2589       expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2590     }
2591   else if (targetm.libc_has_function (function_sincos, type))
2592     {
2593       tree call, fn = NULL_TREE;
2594       tree top1, top2;
2595       rtx op1a, op2a;
2596 
2597       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2598 	fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2599       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2600 	fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2601       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2602 	fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2603       else
2604 	gcc_unreachable ();
2605 
2606       op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2607       op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2608       op1a = copy_addr_to_reg (XEXP (op1, 0));
2609       op2a = copy_addr_to_reg (XEXP (op2, 0));
2610       top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2611       top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2612 
2613       /* Make sure not to fold the sincos call again.  */
2614       call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2615       expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2616 				      call, 3, arg, top1, top2));
2617     }
2618   else
2619     {
2620       tree call, fn = NULL_TREE, narg;
2621       tree ctype = build_complex_type (type);
2622 
2623       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2624 	fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2625       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2626 	fn = builtin_decl_explicit (BUILT_IN_CEXP);
2627       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2628 	fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2629       else
2630 	gcc_unreachable ();
2631 
2632       /* If we don't have a decl for cexp create one.  This is the
2633 	 friendliest fallback if the user calls __builtin_cexpi
2634 	 without full target C99 function support.  */
2635       if (fn == NULL_TREE)
2636 	{
2637 	  tree fntype;
2638 	  const char *name = NULL;
2639 
2640 	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2641 	    name = "cexpf";
2642 	  else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2643 	    name = "cexp";
2644 	  else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2645 	    name = "cexpl";
2646 
2647 	  fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2648 	  fn = build_fn_decl (name, fntype);
2649 	}
2650 
2651       narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2652 			  build_real (type, dconst0), arg);
2653 
2654       /* Make sure not to fold the cexp call again.  */
2655       call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2656       return expand_expr (build_call_nary (ctype, call, 1, narg),
2657 			  target, VOIDmode, EXPAND_NORMAL);
2658     }
2659 
2660   /* Now build the proper return type.  */
2661   return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2662 			      make_tree (TREE_TYPE (arg), op2),
2663 			      make_tree (TREE_TYPE (arg), op1)),
2664 		      target, VOIDmode, EXPAND_NORMAL);
2665 }
2666 
2667 /* Conveniently construct a function call expression.  FNDECL names the
2668    function to be called, N is the number of arguments, and the "..."
2669    parameters are the argument expressions.  Unlike build_call_exr
2670    this doesn't fold the call, hence it will always return a CALL_EXPR.  */
2671 
2672 static tree
build_call_nofold_loc(location_t loc,tree fndecl,int n,...)2673 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2674 {
2675   va_list ap;
2676   tree fntype = TREE_TYPE (fndecl);
2677   tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2678 
2679   va_start (ap, n);
2680   fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2681   va_end (ap);
2682   SET_EXPR_LOCATION (fn, loc);
2683   return fn;
2684 }
2685 
2686 /* Expand a call to one of the builtin rounding functions gcc defines
2687    as an extension (lfloor and lceil).  As these are gcc extensions we
2688    do not need to worry about setting errno to EDOM.
2689    If expanding via optab fails, lower expression to (int)(floor(x)).
2690    EXP is the expression that is a call to the builtin function;
2691    if convenient, the result should be placed in TARGET.  */
2692 
2693 static rtx
expand_builtin_int_roundingfn(tree exp,rtx target)2694 expand_builtin_int_roundingfn (tree exp, rtx target)
2695 {
2696   convert_optab builtin_optab;
2697   rtx op0, tmp;
2698   rtx_insn *insns;
2699   tree fndecl = get_callee_fndecl (exp);
2700   enum built_in_function fallback_fn;
2701   tree fallback_fndecl;
2702   machine_mode mode;
2703   tree arg;
2704 
2705   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2706     return NULL_RTX;
2707 
2708   arg = CALL_EXPR_ARG (exp, 0);
2709 
2710   switch (DECL_FUNCTION_CODE (fndecl))
2711     {
2712     CASE_FLT_FN (BUILT_IN_ICEIL):
2713     CASE_FLT_FN (BUILT_IN_LCEIL):
2714     CASE_FLT_FN (BUILT_IN_LLCEIL):
2715       builtin_optab = lceil_optab;
2716       fallback_fn = BUILT_IN_CEIL;
2717       break;
2718 
2719     CASE_FLT_FN (BUILT_IN_IFLOOR):
2720     CASE_FLT_FN (BUILT_IN_LFLOOR):
2721     CASE_FLT_FN (BUILT_IN_LLFLOOR):
2722       builtin_optab = lfloor_optab;
2723       fallback_fn = BUILT_IN_FLOOR;
2724       break;
2725 
2726     default:
2727       gcc_unreachable ();
2728     }
2729 
2730   /* Make a suitable register to place result in.  */
2731   mode = TYPE_MODE (TREE_TYPE (exp));
2732 
2733   target = gen_reg_rtx (mode);
2734 
2735   /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2736      need to expand the argument again.  This way, we will not perform
2737      side-effects more the once.  */
2738   CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2739 
2740   op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2741 
2742   start_sequence ();
2743 
2744   /* Compute into TARGET.  */
2745   if (expand_sfix_optab (target, op0, builtin_optab))
2746     {
2747       /* Output the entire sequence.  */
2748       insns = get_insns ();
2749       end_sequence ();
2750       emit_insn (insns);
2751       return target;
2752     }
2753 
2754   /* If we were unable to expand via the builtin, stop the sequence
2755      (without outputting the insns).  */
2756   end_sequence ();
2757 
2758   /* Fall back to floating point rounding optab.  */
2759   fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2760 
2761   /* For non-C99 targets we may end up without a fallback fndecl here
2762      if the user called __builtin_lfloor directly.  In this case emit
2763      a call to the floor/ceil variants nevertheless.  This should result
2764      in the best user experience for not full C99 targets.  */
2765   if (fallback_fndecl == NULL_TREE)
2766     {
2767       tree fntype;
2768       const char *name = NULL;
2769 
2770       switch (DECL_FUNCTION_CODE (fndecl))
2771 	{
2772 	case BUILT_IN_ICEIL:
2773 	case BUILT_IN_LCEIL:
2774 	case BUILT_IN_LLCEIL:
2775 	  name = "ceil";
2776 	  break;
2777 	case BUILT_IN_ICEILF:
2778 	case BUILT_IN_LCEILF:
2779 	case BUILT_IN_LLCEILF:
2780 	  name = "ceilf";
2781 	  break;
2782 	case BUILT_IN_ICEILL:
2783 	case BUILT_IN_LCEILL:
2784 	case BUILT_IN_LLCEILL:
2785 	  name = "ceill";
2786 	  break;
2787 	case BUILT_IN_IFLOOR:
2788 	case BUILT_IN_LFLOOR:
2789 	case BUILT_IN_LLFLOOR:
2790 	  name = "floor";
2791 	  break;
2792 	case BUILT_IN_IFLOORF:
2793 	case BUILT_IN_LFLOORF:
2794 	case BUILT_IN_LLFLOORF:
2795 	  name = "floorf";
2796 	  break;
2797 	case BUILT_IN_IFLOORL:
2798 	case BUILT_IN_LFLOORL:
2799 	case BUILT_IN_LLFLOORL:
2800 	  name = "floorl";
2801 	  break;
2802 	default:
2803 	  gcc_unreachable ();
2804 	}
2805 
2806       fntype = build_function_type_list (TREE_TYPE (arg),
2807 					 TREE_TYPE (arg), NULL_TREE);
2808       fallback_fndecl = build_fn_decl (name, fntype);
2809     }
2810 
2811   exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2812 
2813   tmp = expand_normal (exp);
2814   tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2815 
2816   /* Truncate the result of floating point optab to integer
2817      via expand_fix ().  */
2818   target = gen_reg_rtx (mode);
2819   expand_fix (target, tmp, 0);
2820 
2821   return target;
2822 }
2823 
2824 /* Expand a call to one of the builtin math functions doing integer
2825    conversion (lrint).
2826    Return 0 if a normal call should be emitted rather than expanding the
2827    function in-line.  EXP is the expression that is a call to the builtin
2828    function; if convenient, the result should be placed in TARGET.  */
2829 
2830 static rtx
expand_builtin_int_roundingfn_2(tree exp,rtx target)2831 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2832 {
2833   convert_optab builtin_optab;
2834   rtx op0;
2835   rtx_insn *insns;
2836   tree fndecl = get_callee_fndecl (exp);
2837   tree arg;
2838   machine_mode mode;
2839   enum built_in_function fallback_fn = BUILT_IN_NONE;
2840 
2841   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2842     return NULL_RTX;
2843 
2844   arg = CALL_EXPR_ARG (exp, 0);
2845 
2846   switch (DECL_FUNCTION_CODE (fndecl))
2847     {
2848     CASE_FLT_FN (BUILT_IN_IRINT):
2849       fallback_fn = BUILT_IN_LRINT;
2850       gcc_fallthrough ();
2851     CASE_FLT_FN (BUILT_IN_LRINT):
2852     CASE_FLT_FN (BUILT_IN_LLRINT):
2853       builtin_optab = lrint_optab;
2854       break;
2855 
2856     CASE_FLT_FN (BUILT_IN_IROUND):
2857       fallback_fn = BUILT_IN_LROUND;
2858       gcc_fallthrough ();
2859     CASE_FLT_FN (BUILT_IN_LROUND):
2860     CASE_FLT_FN (BUILT_IN_LLROUND):
2861       builtin_optab = lround_optab;
2862       break;
2863 
2864     default:
2865       gcc_unreachable ();
2866     }
2867 
2868   /* There's no easy way to detect the case we need to set EDOM.  */
2869   if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2870     return NULL_RTX;
2871 
2872   /* Make a suitable register to place result in.  */
2873   mode = TYPE_MODE (TREE_TYPE (exp));
2874 
2875   /* There's no easy way to detect the case we need to set EDOM.  */
2876   if (!flag_errno_math)
2877     {
2878       rtx result = gen_reg_rtx (mode);
2879 
2880       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2881 	 need to expand the argument again.  This way, we will not perform
2882 	 side-effects more the once.  */
2883       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2884 
2885       op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2886 
2887       start_sequence ();
2888 
2889       if (expand_sfix_optab (result, op0, builtin_optab))
2890 	{
2891 	  /* Output the entire sequence.  */
2892 	  insns = get_insns ();
2893 	  end_sequence ();
2894 	  emit_insn (insns);
2895 	  return result;
2896 	}
2897 
2898       /* If we were unable to expand via the builtin, stop the sequence
2899 	 (without outputting the insns) and call to the library function
2900 	 with the stabilized argument list.  */
2901       end_sequence ();
2902     }
2903 
2904   if (fallback_fn != BUILT_IN_NONE)
2905     {
2906       /* Fall back to rounding to long int.  Use implicit_p 0 - for non-C99
2907 	 targets, (int) round (x) should never be transformed into
2908 	 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2909 	 a call to lround in the hope that the target provides at least some
2910 	 C99 functions.  This should result in the best user experience for
2911 	 not full C99 targets.  */
2912       tree fallback_fndecl = mathfn_built_in_1
2913 	(TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2914 
2915       exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2916 				   fallback_fndecl, 1, arg);
2917 
2918       target = expand_call (exp, NULL_RTX, target == const0_rtx);
2919       target = maybe_emit_group_store (target, TREE_TYPE (exp));
2920       return convert_to_mode (mode, target, 0);
2921     }
2922 
2923   return expand_call (exp, target, target == const0_rtx);
2924 }
2925 
2926 /* Expand a call to the powi built-in mathematical function.  Return NULL_RTX if
2927    a normal call should be emitted rather than expanding the function
2928    in-line.  EXP is the expression that is a call to the builtin
2929    function; if convenient, the result should be placed in TARGET.  */
2930 
2931 static rtx
expand_builtin_powi(tree exp,rtx target)2932 expand_builtin_powi (tree exp, rtx target)
2933 {
2934   tree arg0, arg1;
2935   rtx op0, op1;
2936   machine_mode mode;
2937   machine_mode mode2;
2938 
2939   if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2940     return NULL_RTX;
2941 
2942   arg0 = CALL_EXPR_ARG (exp, 0);
2943   arg1 = CALL_EXPR_ARG (exp, 1);
2944   mode = TYPE_MODE (TREE_TYPE (exp));
2945 
2946   /* Emit a libcall to libgcc.  */
2947 
2948   /* Mode of the 2nd argument must match that of an int.  */
2949   mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
2950 
2951   if (target == NULL_RTX)
2952     target = gen_reg_rtx (mode);
2953 
2954   op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2955   if (GET_MODE (op0) != mode)
2956     op0 = convert_to_mode (mode, op0, 0);
2957   op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2958   if (GET_MODE (op1) != mode2)
2959     op1 = convert_to_mode (mode2, op1, 0);
2960 
2961   target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2962 				    target, LCT_CONST, mode,
2963 				    op0, mode, op1, mode2);
2964 
2965   return target;
2966 }
2967 
2968 /* Expand expression EXP which is a call to the strlen builtin.  Return
2969    NULL_RTX if we failed and the caller should emit a normal call, otherwise
2970    try to get the result in TARGET, if convenient.  */
2971 
2972 static rtx
expand_builtin_strlen(tree exp,rtx target,machine_mode target_mode)2973 expand_builtin_strlen (tree exp, rtx target,
2974 		       machine_mode target_mode)
2975 {
2976   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2977     return NULL_RTX;
2978 
2979   tree src = CALL_EXPR_ARG (exp, 0);
2980 
2981   /* If the length can be computed at compile-time, return it.  */
2982   if (tree len = c_strlen (src, 0))
2983     return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2984 
2985   /* If the length can be computed at compile-time and is constant
2986      integer, but there are side-effects in src, evaluate
2987      src for side-effects, then return len.
2988      E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2989      can be optimized into: i++; x = 3;  */
2990   tree len = c_strlen (src, 1);
2991   if (len && TREE_CODE (len) == INTEGER_CST)
2992     {
2993       expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2994       return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2995     }
2996 
2997   unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
2998 
2999   /* If SRC is not a pointer type, don't do this operation inline.  */
3000   if (align == 0)
3001     return NULL_RTX;
3002 
3003   /* Bail out if we can't compute strlen in the right mode.  */
3004   machine_mode insn_mode;
3005   enum insn_code icode = CODE_FOR_nothing;
3006   FOR_EACH_MODE_FROM (insn_mode, target_mode)
3007     {
3008       icode = optab_handler (strlen_optab, insn_mode);
3009       if (icode != CODE_FOR_nothing)
3010 	break;
3011     }
3012   if (insn_mode == VOIDmode)
3013     return NULL_RTX;
3014 
3015   /* Make a place to hold the source address.  We will not expand
3016      the actual source until we are sure that the expansion will
3017      not fail -- there are trees that cannot be expanded twice.  */
3018   rtx src_reg = gen_reg_rtx (Pmode);
3019 
3020   /* Mark the beginning of the strlen sequence so we can emit the
3021      source operand later.  */
3022   rtx_insn *before_strlen = get_last_insn ();
3023 
3024   class expand_operand ops[4];
3025   create_output_operand (&ops[0], target, insn_mode);
3026   create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3027   create_integer_operand (&ops[2], 0);
3028   create_integer_operand (&ops[3], align);
3029   if (!maybe_expand_insn (icode, 4, ops))
3030     return NULL_RTX;
3031 
3032   /* Check to see if the argument was declared attribute nonstring
3033      and if so, issue a warning since at this point it's not known
3034      to be nul-terminated.  */
3035   maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3036 
3037   /* Now that we are assured of success, expand the source.  */
3038   start_sequence ();
3039   rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3040   if (pat != src_reg)
3041     {
3042 #ifdef POINTERS_EXTEND_UNSIGNED
3043       if (GET_MODE (pat) != Pmode)
3044 	pat = convert_to_mode (Pmode, pat,
3045 			       POINTERS_EXTEND_UNSIGNED);
3046 #endif
3047       emit_move_insn (src_reg, pat);
3048     }
3049   pat = get_insns ();
3050   end_sequence ();
3051 
3052   if (before_strlen)
3053     emit_insn_after (pat, before_strlen);
3054   else
3055     emit_insn_before (pat, get_insns ());
3056 
3057   /* Return the value in the proper mode for this function.  */
3058   if (GET_MODE (ops[0].value) == target_mode)
3059     target = ops[0].value;
3060   else if (target != 0)
3061     convert_move (target, ops[0].value, 0);
3062   else
3063     target = convert_to_mode (target_mode, ops[0].value, 0);
3064 
3065   return target;
3066 }
3067 
3068 /* Expand call EXP to the strnlen built-in, returning the result
3069    and setting it in TARGET.  Otherwise return NULL_RTX on failure.  */
3070 
3071 static rtx
expand_builtin_strnlen(tree exp,rtx target,machine_mode target_mode)3072 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3073 {
3074   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3075     return NULL_RTX;
3076 
3077   tree src = CALL_EXPR_ARG (exp, 0);
3078   tree bound = CALL_EXPR_ARG (exp, 1);
3079 
3080   if (!bound)
3081     return NULL_RTX;
3082 
3083   location_t loc = UNKNOWN_LOCATION;
3084   if (EXPR_HAS_LOCATION (exp))
3085     loc = EXPR_LOCATION (exp);
3086 
3087   /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3088      so these conversions aren't necessary.  */
3089   c_strlen_data lendata = { };
3090   tree len = c_strlen (src, 0, &lendata, 1);
3091   if (len)
3092     len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3093 
3094   if (TREE_CODE (bound) == INTEGER_CST)
3095     {
3096       if (!len)
3097 	return NULL_RTX;
3098 
3099       len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3100       return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3101     }
3102 
3103   if (TREE_CODE (bound) != SSA_NAME)
3104     return NULL_RTX;
3105 
3106   wide_int min, max;
3107   value_range r;
3108   get_global_range_query ()->range_of_expr (r, bound);
3109   if (r.kind () != VR_RANGE)
3110     return NULL_RTX;
3111   min = r.lower_bound ();
3112   max = r.upper_bound ();
3113 
3114   if (!len || TREE_CODE (len) != INTEGER_CST)
3115     {
3116       bool exact;
3117       lendata.decl = unterminated_array (src, &len, &exact);
3118       if (!lendata.decl)
3119 	return NULL_RTX;
3120     }
3121 
3122   if (lendata.decl)
3123     return NULL_RTX;
3124 
3125   if (wi::gtu_p (min, wi::to_wide (len)))
3126     return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3127 
3128   len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3129   return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3130 }
3131 
3132 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
3133    bytes from bytes at DATA + OFFSET and return it reinterpreted as
3134    a target constant.  */
3135 
3136 static rtx
builtin_memcpy_read_str(void * data,void *,HOST_WIDE_INT offset,fixed_size_mode mode)3137 builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3138 			 fixed_size_mode mode)
3139 {
3140   /* The REPresentation pointed to by DATA need not be a nul-terminated
3141      string but the caller guarantees it's large enough for MODE.  */
3142   const char *rep = (const char *) data;
3143 
3144   /* The by-pieces infrastructure does not try to pick a vector mode
3145      for memcpy expansion.  */
3146   return c_readstr (rep + offset, as_a <scalar_int_mode> (mode),
3147 		    /*nul_terminated=*/false);
3148 }
3149 
3150 /* LEN specify length of the block of memcpy/memset operation.
3151    Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3152    In some cases we can make very likely guess on max size, then we
3153    set it into PROBABLE_MAX_SIZE.  */
3154 
3155 static void
determine_block_size(tree len,rtx len_rtx,unsigned HOST_WIDE_INT * min_size,unsigned HOST_WIDE_INT * max_size,unsigned HOST_WIDE_INT * probable_max_size)3156 determine_block_size (tree len, rtx len_rtx,
3157 		      unsigned HOST_WIDE_INT *min_size,
3158 		      unsigned HOST_WIDE_INT *max_size,
3159 		      unsigned HOST_WIDE_INT *probable_max_size)
3160 {
3161   if (CONST_INT_P (len_rtx))
3162     {
3163       *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3164       return;
3165     }
3166   else
3167     {
3168       wide_int min, max;
3169       enum value_range_kind range_type = VR_UNDEFINED;
3170 
3171       /* Determine bounds from the type.  */
3172       if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3173 	*min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3174       else
3175 	*min_size = 0;
3176       if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3177 	*probable_max_size = *max_size
3178 	  = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3179       else
3180 	*probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3181 
3182       if (TREE_CODE (len) == SSA_NAME)
3183 	{
3184 	  value_range r;
3185 	  get_global_range_query ()->range_of_expr (r, len);
3186 	  range_type = r.kind ();
3187 	  if (range_type != VR_UNDEFINED)
3188 	    {
3189 	      min = wi::to_wide (r.min ());
3190 	      max = wi::to_wide (r.max ());
3191 	    }
3192 	}
3193       if (range_type == VR_RANGE)
3194 	{
3195 	  if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3196 	    *min_size = min.to_uhwi ();
3197 	  if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3198 	    *probable_max_size = *max_size = max.to_uhwi ();
3199 	}
3200       else if (range_type == VR_ANTI_RANGE)
3201 	{
3202 	  /* Code like
3203 
3204 	     int n;
3205 	     if (n < 100)
3206 	       memcpy (a, b, n)
3207 
3208 	     Produce anti range allowing negative values of N.  We still
3209 	     can use the information and make a guess that N is not negative.
3210 	     */
3211 	  if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3212 	    *probable_max_size = min.to_uhwi () - 1;
3213 	}
3214     }
3215   gcc_checking_assert (*max_size <=
3216 		       (unsigned HOST_WIDE_INT)
3217 			  GET_MODE_MASK (GET_MODE (len_rtx)));
3218 }
3219 
3220 /* Expand a call EXP to the memcpy builtin.
3221    Return NULL_RTX if we failed, the caller should emit a normal call,
3222    otherwise try to get the result in TARGET, if convenient (and in
3223    mode MODE if that's convenient).  */
3224 
3225 static rtx
expand_builtin_memcpy(tree exp,rtx target)3226 expand_builtin_memcpy (tree exp, rtx target)
3227 {
3228   if (!validate_arglist (exp,
3229  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3230     return NULL_RTX;
3231 
3232   tree dest = CALL_EXPR_ARG (exp, 0);
3233   tree src = CALL_EXPR_ARG (exp, 1);
3234   tree len = CALL_EXPR_ARG (exp, 2);
3235 
3236   return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3237 					  /*retmode=*/ RETURN_BEGIN, false);
3238 }
3239 
3240 /* Check a call EXP to the memmove built-in for validity.
3241    Return NULL_RTX on both success and failure.  */
3242 
3243 static rtx
expand_builtin_memmove(tree exp,rtx target)3244 expand_builtin_memmove (tree exp, rtx target)
3245 {
3246   if (!validate_arglist (exp,
3247  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3248     return NULL_RTX;
3249 
3250   tree dest = CALL_EXPR_ARG (exp, 0);
3251   tree src = CALL_EXPR_ARG (exp, 1);
3252   tree len = CALL_EXPR_ARG (exp, 2);
3253 
3254   return expand_builtin_memory_copy_args (dest, src, len, target, exp,
3255 					  /*retmode=*/ RETURN_BEGIN, true);
3256 }
3257 
3258 /* Expand a call EXP to the mempcpy builtin.
3259    Return NULL_RTX if we failed; the caller should emit a normal call,
3260    otherwise try to get the result in TARGET, if convenient (and in
3261    mode MODE if that's convenient).  */
3262 
3263 static rtx
expand_builtin_mempcpy(tree exp,rtx target)3264 expand_builtin_mempcpy (tree exp, rtx target)
3265 {
3266   if (!validate_arglist (exp,
3267  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3268     return NULL_RTX;
3269 
3270   tree dest = CALL_EXPR_ARG (exp, 0);
3271   tree src = CALL_EXPR_ARG (exp, 1);
3272   tree len = CALL_EXPR_ARG (exp, 2);
3273 
3274   /* Policy does not generally allow using compute_objsize (which
3275      is used internally by check_memop_size) to change code generation
3276      or drive optimization decisions.
3277 
3278      In this instance it is safe because the code we generate has
3279      the same semantics regardless of the return value of
3280      check_memop_sizes.   Exactly the same amount of data is copied
3281      and the return value is exactly the same in both cases.
3282 
3283      Furthermore, check_memop_size always uses mode 0 for the call to
3284      compute_objsize, so the imprecise nature of compute_objsize is
3285      avoided.  */
3286 
3287   /* Avoid expanding mempcpy into memcpy when the call is determined
3288      to overflow the buffer.  This also prevents the same overflow
3289      from being diagnosed again when expanding memcpy.  */
3290 
3291   return expand_builtin_mempcpy_args (dest, src, len,
3292 				      target, exp, /*retmode=*/ RETURN_END);
3293 }
3294 
3295 /* Helper function to do the actual work for expand of memory copy family
3296    functions (memcpy, mempcpy, stpcpy).  Expansing should assign LEN bytes
3297    of memory from SRC to DEST and assign to TARGET if convenient.  Return
3298    value is based on RETMODE argument.  */
3299 
3300 static rtx
expand_builtin_memory_copy_args(tree dest,tree src,tree len,rtx target,tree exp,memop_ret retmode,bool might_overlap)3301 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
3302 				 rtx target, tree exp, memop_ret retmode,
3303 				 bool might_overlap)
3304 {
3305   unsigned int src_align = get_pointer_alignment (src);
3306   unsigned int dest_align = get_pointer_alignment (dest);
3307   rtx dest_mem, src_mem, dest_addr, len_rtx;
3308   HOST_WIDE_INT expected_size = -1;
3309   unsigned int expected_align = 0;
3310   unsigned HOST_WIDE_INT min_size;
3311   unsigned HOST_WIDE_INT max_size;
3312   unsigned HOST_WIDE_INT probable_max_size;
3313 
3314   bool is_move_done;
3315 
3316   /* If DEST is not a pointer type, call the normal function.  */
3317   if (dest_align == 0)
3318     return NULL_RTX;
3319 
3320   /* If either SRC is not a pointer type, don't do this
3321      operation in-line.  */
3322   if (src_align == 0)
3323     return NULL_RTX;
3324 
3325   if (currently_expanding_gimple_stmt)
3326     stringop_block_profile (currently_expanding_gimple_stmt,
3327 			    &expected_align, &expected_size);
3328 
3329   if (expected_align < dest_align)
3330     expected_align = dest_align;
3331   dest_mem = get_memory_rtx (dest, len);
3332   set_mem_align (dest_mem, dest_align);
3333   len_rtx = expand_normal (len);
3334   determine_block_size (len, len_rtx, &min_size, &max_size,
3335 			&probable_max_size);
3336 
3337   /* Try to get the byte representation of the constant SRC points to,
3338      with its byte size in NBYTES.  */
3339   unsigned HOST_WIDE_INT nbytes;
3340   const char *rep = getbyterep (src, &nbytes);
3341 
3342   /* If the function's constant bound LEN_RTX is less than or equal
3343      to the byte size of the representation of the constant argument,
3344      and if block move would be done by pieces, we can avoid loading
3345      the bytes from memory and only store the computed constant.
3346      This works in the overlap (memmove) case as well because
3347      store_by_pieces just generates a series of stores of constants
3348      from the representation returned by getbyterep().  */
3349   if (rep
3350       && CONST_INT_P (len_rtx)
3351       && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
3352       && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3353 			      CONST_CAST (char *, rep),
3354 			      dest_align, false))
3355     {
3356       dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3357 				  builtin_memcpy_read_str,
3358 				  CONST_CAST (char *, rep),
3359 				  dest_align, false, retmode);
3360       dest_mem = force_operand (XEXP (dest_mem, 0), target);
3361       dest_mem = convert_memory_address (ptr_mode, dest_mem);
3362       return dest_mem;
3363     }
3364 
3365   src_mem = get_memory_rtx (src, len);
3366   set_mem_align (src_mem, src_align);
3367 
3368   /* Copy word part most expediently.  */
3369   enum block_op_methods method = BLOCK_OP_NORMAL;
3370   if (CALL_EXPR_TAILCALL (exp)
3371       && (retmode == RETURN_BEGIN || target == const0_rtx))
3372     method = BLOCK_OP_TAILCALL;
3373   bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
3374 			   && retmode == RETURN_END
3375 			   && !might_overlap
3376 			   && target != const0_rtx);
3377   if (use_mempcpy_call)
3378     method = BLOCK_OP_NO_LIBCALL_RET;
3379   dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
3380 				     expected_align, expected_size,
3381 				     min_size, max_size, probable_max_size,
3382 				     use_mempcpy_call, &is_move_done,
3383 				     might_overlap);
3384 
3385   /* Bail out when a mempcpy call would be expanded as libcall and when
3386      we have a target that provides a fast implementation
3387      of mempcpy routine.  */
3388   if (!is_move_done)
3389     return NULL_RTX;
3390 
3391   if (dest_addr == pc_rtx)
3392     return NULL_RTX;
3393 
3394   if (dest_addr == 0)
3395     {
3396       dest_addr = force_operand (XEXP (dest_mem, 0), target);
3397       dest_addr = convert_memory_address (ptr_mode, dest_addr);
3398     }
3399 
3400   if (retmode != RETURN_BEGIN && target != const0_rtx)
3401     {
3402       dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
3403       /* stpcpy pointer to last byte.  */
3404       if (retmode == RETURN_END_MINUS_ONE)
3405 	dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
3406     }
3407 
3408   return dest_addr;
3409 }
3410 
3411 static rtx
expand_builtin_mempcpy_args(tree dest,tree src,tree len,rtx target,tree orig_exp,memop_ret retmode)3412 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3413 			     rtx target, tree orig_exp, memop_ret retmode)
3414 {
3415   return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
3416 					  retmode, false);
3417 }
3418 
3419 /* Expand into a movstr instruction, if one is available.  Return NULL_RTX if
3420    we failed, the caller should emit a normal call, otherwise try to
3421    get the result in TARGET, if convenient.
3422    Return value is based on RETMODE argument.  */
3423 
3424 static rtx
expand_movstr(tree dest,tree src,rtx target,memop_ret retmode)3425 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
3426 {
3427   class expand_operand ops[3];
3428   rtx dest_mem;
3429   rtx src_mem;
3430 
3431   if (!targetm.have_movstr ())
3432     return NULL_RTX;
3433 
3434   dest_mem = get_memory_rtx (dest, NULL);
3435   src_mem = get_memory_rtx (src, NULL);
3436   if (retmode == RETURN_BEGIN)
3437     {
3438       target = force_reg (Pmode, XEXP (dest_mem, 0));
3439       dest_mem = replace_equiv_address (dest_mem, target);
3440     }
3441 
3442   create_output_operand (&ops[0],
3443 			 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
3444   create_fixed_operand (&ops[1], dest_mem);
3445   create_fixed_operand (&ops[2], src_mem);
3446   if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3447     return NULL_RTX;
3448 
3449   if (retmode != RETURN_BEGIN && target != const0_rtx)
3450     {
3451       target = ops[0].value;
3452       /* movstr is supposed to set end to the address of the NUL
3453 	 terminator.  If the caller requested a mempcpy-like return value,
3454 	 adjust it.  */
3455       if (retmode == RETURN_END)
3456 	{
3457 	  rtx tem = plus_constant (GET_MODE (target),
3458 				   gen_lowpart (GET_MODE (target), target), 1);
3459 	  emit_move_insn (target, force_operand (tem, NULL_RTX));
3460 	}
3461     }
3462   return target;
3463 }
3464 
3465 /* Expand expression EXP, which is a call to the strcpy builtin.  Return
3466    NULL_RTX if we failed the caller should emit a normal call, otherwise
3467    try to get the result in TARGET, if convenient (and in mode MODE if that's
3468    convenient).  */
3469 
3470 static rtx
expand_builtin_strcpy(tree exp,rtx target)3471 expand_builtin_strcpy (tree exp, rtx target)
3472 {
3473   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3474     return NULL_RTX;
3475 
3476   tree dest = CALL_EXPR_ARG (exp, 0);
3477   tree src = CALL_EXPR_ARG (exp, 1);
3478 
3479   return expand_builtin_strcpy_args (exp, dest, src, target);
3480 }
3481 
3482 /* Helper function to do the actual work for expand_builtin_strcpy.  The
3483    arguments to the builtin_strcpy call DEST and SRC are broken out
3484    so that this can also be called without constructing an actual CALL_EXPR.
3485    The other arguments and return value are the same as for
3486    expand_builtin_strcpy.  */
3487 
3488 static rtx
expand_builtin_strcpy_args(tree,tree dest,tree src,rtx target)3489 expand_builtin_strcpy_args (tree, tree dest, tree src, rtx target)
3490 {
3491   return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
3492 }
3493 
3494 /* Expand a call EXP to the stpcpy builtin.
3495    Return NULL_RTX if we failed the caller should emit a normal call,
3496    otherwise try to get the result in TARGET, if convenient (and in
3497    mode MODE if that's convenient).  */
3498 
3499 static rtx
expand_builtin_stpcpy_1(tree exp,rtx target,machine_mode mode)3500 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
3501 {
3502   tree dst, src;
3503   location_t loc = EXPR_LOCATION (exp);
3504 
3505   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3506     return NULL_RTX;
3507 
3508   dst = CALL_EXPR_ARG (exp, 0);
3509   src = CALL_EXPR_ARG (exp, 1);
3510 
3511   /* If return value is ignored, transform stpcpy into strcpy.  */
3512   if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3513     {
3514       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3515       tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3516       return expand_expr (result, target, mode, EXPAND_NORMAL);
3517     }
3518   else
3519     {
3520       tree len, lenp1;
3521       rtx ret;
3522 
3523       /* Ensure we get an actual string whose length can be evaluated at
3524 	 compile-time, not an expression containing a string.  This is
3525 	 because the latter will potentially produce pessimized code
3526 	 when used to produce the return value.  */
3527       c_strlen_data lendata = { };
3528       if (!c_getstr (src)
3529 	  || !(len = c_strlen (src, 0, &lendata, 1)))
3530 	return expand_movstr (dst, src, target,
3531 			      /*retmode=*/ RETURN_END_MINUS_ONE);
3532 
3533       lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3534       ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3535 					 target, exp,
3536 					 /*retmode=*/ RETURN_END_MINUS_ONE);
3537 
3538       if (ret)
3539 	return ret;
3540 
3541       if (TREE_CODE (len) == INTEGER_CST)
3542 	{
3543 	  rtx len_rtx = expand_normal (len);
3544 
3545 	  if (CONST_INT_P (len_rtx))
3546 	    {
3547 	      ret = expand_builtin_strcpy_args (exp, dst, src, target);
3548 
3549 	      if (ret)
3550 		{
3551 		  if (! target)
3552 		    {
3553 		      if (mode != VOIDmode)
3554 			target = gen_reg_rtx (mode);
3555 		      else
3556 			target = gen_reg_rtx (GET_MODE (ret));
3557 		    }
3558 		  if (GET_MODE (target) != GET_MODE (ret))
3559 		    ret = gen_lowpart (GET_MODE (target), ret);
3560 
3561 		  ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3562 		  ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3563 		  gcc_assert (ret);
3564 
3565 		  return target;
3566 		}
3567 	    }
3568 	}
3569 
3570       return expand_movstr (dst, src, target,
3571 			    /*retmode=*/ RETURN_END_MINUS_ONE);
3572     }
3573 }
3574 
3575 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
3576    arguments while being careful to avoid duplicate warnings (which could
3577    be issued if the expander were to expand the call, resulting in it
3578    being emitted in expand_call().  */
3579 
3580 static rtx
expand_builtin_stpcpy(tree exp,rtx target,machine_mode mode)3581 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3582 {
3583   if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
3584     {
3585       /* The call has been successfully expanded.  Check for nonstring
3586 	 arguments and issue warnings as appropriate.  */
3587       maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3588       return ret;
3589     }
3590 
3591   return NULL_RTX;
3592 }
3593 
3594 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
3595    bytes from constant string DATA + OFFSET and return it as target
3596    constant.  */
3597 
3598 rtx
builtin_strncpy_read_str(void * data,void *,HOST_WIDE_INT offset,fixed_size_mode mode)3599 builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3600 			  fixed_size_mode mode)
3601 {
3602   const char *str = (const char *) data;
3603 
3604   if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3605     return const0_rtx;
3606 
3607   /* The by-pieces infrastructure does not try to pick a vector mode
3608      for strncpy expansion.  */
3609   return c_readstr (str + offset, as_a <scalar_int_mode> (mode));
3610 }
3611 
3612 /* Helper to check the sizes of sequences and the destination of calls
3613    to __builtin_strncat and __builtin___strncat_chk.  Returns true on
3614    success (no overflow or invalid sizes), false otherwise.  */
3615 
3616 static bool
check_strncat_sizes(tree exp,tree objsize)3617 check_strncat_sizes (tree exp, tree objsize)
3618 {
3619   tree dest = CALL_EXPR_ARG (exp, 0);
3620   tree src = CALL_EXPR_ARG (exp, 1);
3621   tree maxread = CALL_EXPR_ARG (exp, 2);
3622 
3623   /* Try to determine the range of lengths that the source expression
3624      refers to.  */
3625   c_strlen_data lendata = { };
3626   get_range_strlen (src, &lendata, /* eltsize = */ 1);
3627 
3628   /* Try to verify that the destination is big enough for the shortest
3629      string.  */
3630 
3631   access_data data (nullptr, exp, access_read_write, maxread, true);
3632   if (!objsize && warn_stringop_overflow)
3633     {
3634       /* If it hasn't been provided by __strncat_chk, try to determine
3635 	 the size of the destination object into which the source is
3636 	 being copied.  */
3637       objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
3638     }
3639 
3640   /* Add one for the terminating nul.  */
3641   tree srclen = (lendata.minlen
3642 		 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
3643 				size_one_node)
3644 		 : NULL_TREE);
3645 
3646   /* The strncat function copies at most MAXREAD bytes and always appends
3647      the terminating nul so the specified upper bound should never be equal
3648      to (or greater than) the size of the destination.  */
3649   if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
3650       && tree_int_cst_equal (objsize, maxread))
3651     {
3652       location_t loc = EXPR_LOCATION (exp);
3653       warning_at (loc, OPT_Wstringop_overflow_,
3654 		  "%qD specified bound %E equals destination size",
3655 		  get_callee_fndecl (exp), maxread);
3656 
3657       return false;
3658     }
3659 
3660   if (!srclen
3661       || (maxread && tree_fits_uhwi_p (maxread)
3662 	  && tree_fits_uhwi_p (srclen)
3663 	  && tree_int_cst_lt (maxread, srclen)))
3664     srclen = maxread;
3665 
3666   /* The number of bytes to write is LEN but check_access will alsoa
3667      check SRCLEN if LEN's value isn't known.  */
3668   return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
3669 		       objsize, data.mode, &data);
3670 }
3671 
3672 /* Expand expression EXP, which is a call to the strncpy builtin.  Return
3673    NULL_RTX if we failed the caller should emit a normal call.  */
3674 
3675 static rtx
expand_builtin_strncpy(tree exp,rtx target)3676 expand_builtin_strncpy (tree exp, rtx target)
3677 {
3678   location_t loc = EXPR_LOCATION (exp);
3679 
3680   if (!validate_arglist (exp,
3681 			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3682     return NULL_RTX;
3683   tree dest = CALL_EXPR_ARG (exp, 0);
3684   tree src = CALL_EXPR_ARG (exp, 1);
3685   /* The number of bytes to write (not the maximum).  */
3686   tree len = CALL_EXPR_ARG (exp, 2);
3687 
3688   /* The length of the source sequence.  */
3689   tree slen = c_strlen (src, 1);
3690 
3691   /* We must be passed a constant len and src parameter.  */
3692   if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3693     return NULL_RTX;
3694 
3695   slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3696 
3697   /* We're required to pad with trailing zeros if the requested
3698      len is greater than strlen(s2)+1.  In that case try to
3699      use store_by_pieces, if it fails, punt.  */
3700   if (tree_int_cst_lt (slen, len))
3701     {
3702       unsigned int dest_align = get_pointer_alignment (dest);
3703       const char *p = c_getstr (src);
3704       rtx dest_mem;
3705 
3706       if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3707 	  || !can_store_by_pieces (tree_to_uhwi (len),
3708 				   builtin_strncpy_read_str,
3709 				   CONST_CAST (char *, p),
3710 				   dest_align, false))
3711 	return NULL_RTX;
3712 
3713       dest_mem = get_memory_rtx (dest, len);
3714       store_by_pieces (dest_mem, tree_to_uhwi (len),
3715 		       builtin_strncpy_read_str,
3716 		       CONST_CAST (char *, p), dest_align, false,
3717 		       RETURN_BEGIN);
3718       dest_mem = force_operand (XEXP (dest_mem, 0), target);
3719       dest_mem = convert_memory_address (ptr_mode, dest_mem);
3720       return dest_mem;
3721     }
3722 
3723   return NULL_RTX;
3724 }
3725 
3726 /* Return the RTL of a register in MODE generated from PREV in the
3727    previous iteration.  */
3728 
3729 static rtx
gen_memset_value_from_prev(by_pieces_prev * prev,fixed_size_mode mode)3730 gen_memset_value_from_prev (by_pieces_prev *prev, fixed_size_mode mode)
3731 {
3732   rtx target = nullptr;
3733   if (prev != nullptr && prev->data != nullptr)
3734     {
3735       /* Use the previous data in the same mode.  */
3736       if (prev->mode == mode)
3737 	return prev->data;
3738 
3739       fixed_size_mode prev_mode = prev->mode;
3740 
3741       /* Don't use the previous data to write QImode if it is in a
3742 	 vector mode.  */
3743       if (VECTOR_MODE_P (prev_mode) && mode == QImode)
3744 	return target;
3745 
3746       rtx prev_rtx = prev->data;
3747 
3748       if (REG_P (prev_rtx)
3749 	  && HARD_REGISTER_P (prev_rtx)
3750 	  && lowpart_subreg_regno (REGNO (prev_rtx), prev_mode, mode) < 0)
3751 	{
3752 	  /* This case occurs when PREV_MODE is a vector and when
3753 	     MODE is too small to store using vector operations.
3754 	     After register allocation, the code will need to move the
3755 	     lowpart of the vector register into a non-vector register.
3756 
3757 	     Also, the target has chosen to use a hard register
3758 	     instead of going with the default choice of using a
3759 	     pseudo register.  We should respect that choice and try to
3760 	     avoid creating a pseudo register with the same mode as the
3761 	     current hard register.
3762 
3763 	     In principle, we could just use a lowpart MODE subreg of
3764 	     the vector register.  However, the vector register mode might
3765 	     be too wide for non-vector registers, and we already know
3766 	     that the non-vector mode is too small for vector registers.
3767 	     It's therefore likely that we'd need to spill to memory in
3768 	     the vector mode and reload the non-vector value from there.
3769 
3770 	     Try to avoid that by reducing the vector register to the
3771 	     smallest size that it can hold.  This should increase the
3772 	     chances that non-vector registers can hold both the inner
3773 	     and outer modes of the subreg that we generate later.  */
3774 	  machine_mode m;
3775 	  fixed_size_mode candidate;
3776 	  FOR_EACH_MODE_IN_CLASS (m, GET_MODE_CLASS (mode))
3777 	    if (is_a<fixed_size_mode> (m, &candidate))
3778 	      {
3779 		if (GET_MODE_SIZE (candidate)
3780 		    >= GET_MODE_SIZE (prev_mode))
3781 		  break;
3782 		if (GET_MODE_SIZE (candidate) >= GET_MODE_SIZE (mode)
3783 		    && lowpart_subreg_regno (REGNO (prev_rtx),
3784 					     prev_mode, candidate) >= 0)
3785 		  {
3786 		    target = lowpart_subreg (candidate, prev_rtx,
3787 					     prev_mode);
3788 		    prev_rtx = target;
3789 		    prev_mode = candidate;
3790 		    break;
3791 		  }
3792 	      }
3793 	  if (target == nullptr)
3794 	    prev_rtx = copy_to_reg (prev_rtx);
3795 	}
3796 
3797       target = lowpart_subreg (mode, prev_rtx, prev_mode);
3798     }
3799   return target;
3800 }
3801 
3802 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
3803    bytes from constant string DATA + OFFSET and return it as target
3804    constant.  If PREV isn't nullptr, it has the RTL info from the
3805    previous iteration.  */
3806 
3807 rtx
builtin_memset_read_str(void * data,void * prev,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,fixed_size_mode mode)3808 builtin_memset_read_str (void *data, void *prev,
3809 			 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3810 			 fixed_size_mode mode)
3811 {
3812   const char *c = (const char *) data;
3813   unsigned int size = GET_MODE_SIZE (mode);
3814 
3815   rtx target = gen_memset_value_from_prev ((by_pieces_prev *) prev,
3816 					   mode);
3817   if (target != nullptr)
3818     return target;
3819   rtx src = gen_int_mode (*c, QImode);
3820 
3821   if (VECTOR_MODE_P (mode))
3822     {
3823       gcc_assert (GET_MODE_INNER (mode) == QImode);
3824 
3825       rtx const_vec = gen_const_vec_duplicate (mode, src);
3826       if (prev == NULL)
3827 	/* Return CONST_VECTOR when called by a query function.  */
3828 	return const_vec;
3829 
3830       /* Use the move expander with CONST_VECTOR.  */
3831       target = targetm.gen_memset_scratch_rtx (mode);
3832       emit_move_insn (target, const_vec);
3833       return target;
3834     }
3835 
3836   char *p = XALLOCAVEC (char, size);
3837 
3838   memset (p, *c, size);
3839 
3840   /* Vector modes should be handled above.  */
3841   return c_readstr (p, as_a <scalar_int_mode> (mode));
3842 }
3843 
3844 /* Callback routine for store_by_pieces.  Return the RTL of a register
3845    containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3846    char value given in the RTL register data.  For example, if mode is
3847    4 bytes wide, return the RTL for 0x01010101*data.  If PREV isn't
3848    nullptr, it has the RTL info from the previous iteration.  */
3849 
3850 static rtx
builtin_memset_gen_str(void * data,void * prev,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,fixed_size_mode mode)3851 builtin_memset_gen_str (void *data, void *prev,
3852 			HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3853 			fixed_size_mode mode)
3854 {
3855   rtx target, coeff;
3856   size_t size;
3857   char *p;
3858 
3859   size = GET_MODE_SIZE (mode);
3860   if (size == 1)
3861     return (rtx) data;
3862 
3863   target = gen_memset_value_from_prev ((by_pieces_prev *) prev, mode);
3864   if (target != nullptr)
3865     return target;
3866 
3867   if (VECTOR_MODE_P (mode))
3868     {
3869       gcc_assert (GET_MODE_INNER (mode) == QImode);
3870 
3871       /* vec_duplicate_optab is a precondition to pick a vector mode for
3872 	 the memset expander.  */
3873       insn_code icode = optab_handler (vec_duplicate_optab, mode);
3874 
3875       target = targetm.gen_memset_scratch_rtx (mode);
3876       class expand_operand ops[2];
3877       create_output_operand (&ops[0], target, mode);
3878       create_input_operand (&ops[1], (rtx) data, QImode);
3879       expand_insn (icode, 2, ops);
3880       if (!rtx_equal_p (target, ops[0].value))
3881 	emit_move_insn (target, ops[0].value);
3882 
3883       return target;
3884     }
3885 
3886   p = XALLOCAVEC (char, size);
3887   memset (p, 1, size);
3888   /* Vector modes should be handled above.  */
3889   coeff = c_readstr (p, as_a <scalar_int_mode> (mode));
3890 
3891   target = convert_to_mode (mode, (rtx) data, 1);
3892   target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3893   return force_reg (mode, target);
3894 }
3895 
3896 /* Expand expression EXP, which is a call to the memset builtin.  Return
3897    NULL_RTX if we failed the caller should emit a normal call, otherwise
3898    try to get the result in TARGET, if convenient (and in mode MODE if that's
3899    convenient).  */
3900 
3901 rtx
expand_builtin_memset(tree exp,rtx target,machine_mode mode)3902 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3903 {
3904   if (!validate_arglist (exp,
3905  			 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3906     return NULL_RTX;
3907 
3908   tree dest = CALL_EXPR_ARG (exp, 0);
3909   tree val = CALL_EXPR_ARG (exp, 1);
3910   tree len = CALL_EXPR_ARG (exp, 2);
3911 
3912   return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3913 }
3914 
3915 /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
3916    Return TRUE if successful, FALSE otherwise.  TO is assumed to be
3917    aligned at an ALIGN-bits boundary.  LEN must be a multiple of
3918    1<<CTZ_LEN between MIN_LEN and MAX_LEN.
3919 
3920    The strategy is to issue one store_by_pieces for each power of two,
3921    from most to least significant, guarded by a test on whether there
3922    are at least that many bytes left to copy in LEN.
3923 
3924    ??? Should we skip some powers of two in favor of loops?  Maybe start
3925    at the max of TO/LEN/word alignment, at least when optimizing for
3926    size, instead of ensuring O(log len) dynamic compares?  */
3927 
3928 bool
try_store_by_multiple_pieces(rtx to,rtx len,unsigned int ctz_len,unsigned HOST_WIDE_INT min_len,unsigned HOST_WIDE_INT max_len,rtx val,char valc,unsigned int align)3929 try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
3930 			      unsigned HOST_WIDE_INT min_len,
3931 			      unsigned HOST_WIDE_INT max_len,
3932 			      rtx val, char valc, unsigned int align)
3933 {
3934   int max_bits = floor_log2 (max_len);
3935   int min_bits = floor_log2 (min_len);
3936   int sctz_len = ctz_len;
3937 
3938   gcc_checking_assert (sctz_len >= 0);
3939 
3940   if (val)
3941     valc = 1;
3942 
3943   /* Bits more significant than TST_BITS are part of the shared prefix
3944      in the binary representation of both min_len and max_len.  Since
3945      they're identical, we don't need to test them in the loop.  */
3946   int tst_bits = (max_bits != min_bits ? max_bits
3947 		  : floor_log2 (max_len ^ min_len));
3948 
3949   /* Check whether it's profitable to start by storing a fixed BLKSIZE
3950      bytes, to lower max_bits.  In the unlikely case of a constant LEN
3951      (implied by identical MAX_LEN and MIN_LEN), we want to issue a
3952      single store_by_pieces, but otherwise, select the minimum multiple
3953      of the ALIGN (in bytes) and of the MCD of the possible LENs, that
3954      brings MAX_LEN below TST_BITS, if that's lower than min_len.  */
3955   unsigned HOST_WIDE_INT blksize;
3956   if (max_len > min_len)
3957     {
3958       unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
3959 					  align / BITS_PER_UNIT);
3960       blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
3961       blksize &= ~(alrng - 1);
3962     }
3963   else if (max_len == min_len)
3964     blksize = max_len;
3965   else
3966     gcc_unreachable ();
3967   if (min_len >= blksize)
3968     {
3969       min_len -= blksize;
3970       min_bits = floor_log2 (min_len);
3971       max_len -= blksize;
3972       max_bits = floor_log2 (max_len);
3973 
3974       tst_bits = (max_bits != min_bits ? max_bits
3975 		 : floor_log2 (max_len ^ min_len));
3976     }
3977   else
3978     blksize = 0;
3979 
3980   /* Check that we can use store by pieces for the maximum store count
3981      we may issue (initial fixed-size block, plus conditional
3982      power-of-two-sized from max_bits to ctz_len.  */
3983   unsigned HOST_WIDE_INT xlenest = blksize;
3984   if (max_bits >= 0)
3985     xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
3986 		- (HOST_WIDE_INT_1U << ctz_len));
3987   if (!can_store_by_pieces (xlenest, builtin_memset_read_str,
3988 			    &valc, align, true))
3989     return false;
3990 
3991   by_pieces_constfn constfun;
3992   void *constfundata;
3993   if (val)
3994     {
3995       constfun = builtin_memset_gen_str;
3996       constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
3997 				      val);
3998     }
3999   else
4000     {
4001       constfun = builtin_memset_read_str;
4002       constfundata = &valc;
4003     }
4004 
4005   rtx ptr = copy_addr_to_reg (convert_to_mode (ptr_mode, XEXP (to, 0), 0));
4006   rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
4007   to = replace_equiv_address (to, ptr);
4008   set_mem_align (to, align);
4009 
4010   if (blksize)
4011     {
4012       to = store_by_pieces (to, blksize,
4013 			    constfun, constfundata,
4014 			    align, true,
4015 			    max_len != 0 ? RETURN_END : RETURN_BEGIN);
4016       if (max_len == 0)
4017 	return true;
4018 
4019       /* Adjust PTR, TO and REM.  Since TO's address is likely
4020 	 PTR+offset, we have to replace it.  */
4021       emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4022       to = replace_equiv_address (to, ptr);
4023       rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4024       emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4025     }
4026 
4027   /* Iterate over power-of-two block sizes from the maximum length to
4028      the least significant bit possibly set in the length.  */
4029   for (int i = max_bits; i >= sctz_len; i--)
4030     {
4031       rtx_code_label *label = NULL;
4032       blksize = HOST_WIDE_INT_1U << i;
4033 
4034       /* If we're past the bits shared between min_ and max_len, expand
4035 	 a test on the dynamic length, comparing it with the
4036 	 BLKSIZE.  */
4037       if (i <= tst_bits)
4038 	{
4039 	  label = gen_label_rtx ();
4040 	  emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
4041 				   ptr_mode, 1, label,
4042 				   profile_probability::even ());
4043 	}
4044       /* If we are at a bit that is in the prefix shared by min_ and
4045 	 max_len, skip this BLKSIZE if the bit is clear.  */
4046       else if ((max_len & blksize) == 0)
4047 	continue;
4048 
4049       /* Issue a store of BLKSIZE bytes.  */
4050       to = store_by_pieces (to, blksize,
4051 			    constfun, constfundata,
4052 			    align, true,
4053 			    i != sctz_len ? RETURN_END : RETURN_BEGIN);
4054 
4055       /* Adjust REM and PTR, unless this is the last iteration.  */
4056       if (i != sctz_len)
4057 	{
4058 	  emit_move_insn (ptr, force_operand (XEXP (to, 0), NULL_RTX));
4059 	  to = replace_equiv_address (to, ptr);
4060 	  rtx rem_minus_blksize = plus_constant (ptr_mode, rem, -blksize);
4061 	  emit_move_insn (rem, force_operand (rem_minus_blksize, NULL_RTX));
4062 	}
4063 
4064       if (label)
4065 	{
4066 	  emit_label (label);
4067 
4068 	  /* Given conditional stores, the offset can no longer be
4069 	     known, so clear it.  */
4070 	  clear_mem_offset (to);
4071 	}
4072     }
4073 
4074   return true;
4075 }
4076 
4077 /* Helper function to do the actual work for expand_builtin_memset.  The
4078    arguments to the builtin_memset call DEST, VAL, and LEN are broken out
4079    so that this can also be called without constructing an actual CALL_EXPR.
4080    The other arguments and return value are the same as for
4081    expand_builtin_memset.  */
4082 
4083 static rtx
expand_builtin_memset_args(tree dest,tree val,tree len,rtx target,machine_mode mode,tree orig_exp)4084 expand_builtin_memset_args (tree dest, tree val, tree len,
4085 			    rtx target, machine_mode mode, tree orig_exp)
4086 {
4087   tree fndecl, fn;
4088   enum built_in_function fcode;
4089   machine_mode val_mode;
4090   char c;
4091   unsigned int dest_align;
4092   rtx dest_mem, dest_addr, len_rtx;
4093   HOST_WIDE_INT expected_size = -1;
4094   unsigned int expected_align = 0;
4095   unsigned HOST_WIDE_INT min_size;
4096   unsigned HOST_WIDE_INT max_size;
4097   unsigned HOST_WIDE_INT probable_max_size;
4098 
4099   dest_align = get_pointer_alignment (dest);
4100 
4101   /* If DEST is not a pointer type, don't do this operation in-line.  */
4102   if (dest_align == 0)
4103     return NULL_RTX;
4104 
4105   if (currently_expanding_gimple_stmt)
4106     stringop_block_profile (currently_expanding_gimple_stmt,
4107 			    &expected_align, &expected_size);
4108 
4109   if (expected_align < dest_align)
4110     expected_align = dest_align;
4111 
4112   /* If the LEN parameter is zero, return DEST.  */
4113   if (integer_zerop (len))
4114     {
4115       /* Evaluate and ignore VAL in case it has side-effects.  */
4116       expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
4117       return expand_expr (dest, target, mode, EXPAND_NORMAL);
4118     }
4119 
4120   /* Stabilize the arguments in case we fail.  */
4121   dest = builtin_save_expr (dest);
4122   val = builtin_save_expr (val);
4123   len = builtin_save_expr (len);
4124 
4125   len_rtx = expand_normal (len);
4126   determine_block_size (len, len_rtx, &min_size, &max_size,
4127 			&probable_max_size);
4128   dest_mem = get_memory_rtx (dest, len);
4129   val_mode = TYPE_MODE (unsigned_char_type_node);
4130 
4131   if (TREE_CODE (val) != INTEGER_CST
4132       || target_char_cast (val, &c))
4133     {
4134       rtx val_rtx;
4135 
4136       val_rtx = expand_normal (val);
4137       val_rtx = convert_to_mode (val_mode, val_rtx, 0);
4138 
4139       /* Assume that we can memset by pieces if we can store
4140        * the coefficients by pieces (in the required modes).
4141        * We can't pass builtin_memset_gen_str as that emits RTL.  */
4142       c = 1;
4143       if (tree_fits_uhwi_p (len)
4144 	  && can_store_by_pieces (tree_to_uhwi (len),
4145 				  builtin_memset_read_str, &c, dest_align,
4146 				  true))
4147 	{
4148 	  val_rtx = force_reg (val_mode, val_rtx);
4149 	  store_by_pieces (dest_mem, tree_to_uhwi (len),
4150 			   builtin_memset_gen_str, val_rtx, dest_align,
4151 			   true, RETURN_BEGIN);
4152 	}
4153       else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
4154 					dest_align, expected_align,
4155 					expected_size, min_size, max_size,
4156 					probable_max_size)
4157 	       && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4158 						 tree_ctz (len),
4159 						 min_size, max_size,
4160 						 val_rtx, 0,
4161 						 dest_align))
4162 	goto do_libcall;
4163 
4164       dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4165       dest_mem = convert_memory_address (ptr_mode, dest_mem);
4166       return dest_mem;
4167     }
4168 
4169   if (c)
4170     {
4171       if (tree_fits_uhwi_p (len)
4172 	  && can_store_by_pieces (tree_to_uhwi (len),
4173 				  builtin_memset_read_str, &c, dest_align,
4174 				  true))
4175 	store_by_pieces (dest_mem, tree_to_uhwi (len),
4176 			 builtin_memset_read_str, &c, dest_align, true,
4177 			 RETURN_BEGIN);
4178       else if (!set_storage_via_setmem (dest_mem, len_rtx,
4179 					gen_int_mode (c, val_mode),
4180 					dest_align, expected_align,
4181 					expected_size, min_size, max_size,
4182 					probable_max_size)
4183 	       && !try_store_by_multiple_pieces (dest_mem, len_rtx,
4184 						 tree_ctz (len),
4185 						 min_size, max_size,
4186 						 NULL_RTX, c,
4187 						 dest_align))
4188 	goto do_libcall;
4189 
4190       dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4191       dest_mem = convert_memory_address (ptr_mode, dest_mem);
4192       return dest_mem;
4193     }
4194 
4195   set_mem_align (dest_mem, dest_align);
4196   dest_addr = clear_storage_hints (dest_mem, len_rtx,
4197 				   CALL_EXPR_TAILCALL (orig_exp)
4198 				   ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
4199 				   expected_align, expected_size,
4200 				   min_size, max_size,
4201 				   probable_max_size, tree_ctz (len));
4202 
4203   if (dest_addr == 0)
4204     {
4205       dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
4206       dest_addr = convert_memory_address (ptr_mode, dest_addr);
4207     }
4208 
4209   return dest_addr;
4210 
4211  do_libcall:
4212   fndecl = get_callee_fndecl (orig_exp);
4213   fcode = DECL_FUNCTION_CODE (fndecl);
4214   if (fcode == BUILT_IN_MEMSET)
4215     fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
4216 				dest, val, len);
4217   else if (fcode == BUILT_IN_BZERO)
4218     fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
4219 				dest, len);
4220   else
4221     gcc_unreachable ();
4222   gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4223   CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
4224   return expand_call (fn, target, target == const0_rtx);
4225 }
4226 
4227 /* Expand expression EXP, which is a call to the bzero builtin.  Return
4228    NULL_RTX if we failed the caller should emit a normal call.  */
4229 
4230 static rtx
expand_builtin_bzero(tree exp)4231 expand_builtin_bzero (tree exp)
4232 {
4233   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4234     return NULL_RTX;
4235 
4236   tree dest = CALL_EXPR_ARG (exp, 0);
4237   tree size = CALL_EXPR_ARG (exp, 1);
4238 
4239   /* New argument list transforming bzero(ptr x, int y) to
4240      memset(ptr x, int 0, size_t y).   This is done this way
4241      so that if it isn't expanded inline, we fallback to
4242      calling bzero instead of memset.  */
4243 
4244   location_t loc = EXPR_LOCATION (exp);
4245 
4246   return expand_builtin_memset_args (dest, integer_zero_node,
4247 				     fold_convert_loc (loc,
4248 						       size_type_node, size),
4249 				     const0_rtx, VOIDmode, exp);
4250 }
4251 
4252 /* Try to expand cmpstr operation ICODE with the given operands.
4253    Return the result rtx on success, otherwise return null.  */
4254 
4255 static rtx
expand_cmpstr(insn_code icode,rtx target,rtx arg1_rtx,rtx arg2_rtx,HOST_WIDE_INT align)4256 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
4257 	       HOST_WIDE_INT align)
4258 {
4259   machine_mode insn_mode = insn_data[icode].operand[0].mode;
4260 
4261   if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
4262     target = NULL_RTX;
4263 
4264   class expand_operand ops[4];
4265   create_output_operand (&ops[0], target, insn_mode);
4266   create_fixed_operand (&ops[1], arg1_rtx);
4267   create_fixed_operand (&ops[2], arg2_rtx);
4268   create_integer_operand (&ops[3], align);
4269   if (maybe_expand_insn (icode, 4, ops))
4270     return ops[0].value;
4271   return NULL_RTX;
4272 }
4273 
4274 /* Expand expression EXP, which is a call to the memcmp built-in function.
4275    Return NULL_RTX if we failed and the caller should emit a normal call,
4276    otherwise try to get the result in TARGET, if convenient.
4277    RESULT_EQ is true if we can relax the returned value to be either zero
4278    or nonzero, without caring about the sign.  */
4279 
4280 static rtx
expand_builtin_memcmp(tree exp,rtx target,bool result_eq)4281 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
4282 {
4283   if (!validate_arglist (exp,
4284  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4285     return NULL_RTX;
4286 
4287   tree arg1 = CALL_EXPR_ARG (exp, 0);
4288   tree arg2 = CALL_EXPR_ARG (exp, 1);
4289   tree len = CALL_EXPR_ARG (exp, 2);
4290 
4291   /* Due to the performance benefit, always inline the calls first
4292      when result_eq is false.  */
4293   rtx result = NULL_RTX;
4294   enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
4295   if (!result_eq && fcode != BUILT_IN_BCMP)
4296     {
4297       result = inline_expand_builtin_bytecmp (exp, target);
4298       if (result)
4299 	return result;
4300     }
4301 
4302   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4303   location_t loc = EXPR_LOCATION (exp);
4304 
4305   unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4306   unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4307 
4308   /* If we don't have POINTER_TYPE, call the function.  */
4309   if (arg1_align == 0 || arg2_align == 0)
4310     return NULL_RTX;
4311 
4312   rtx arg1_rtx = get_memory_rtx (arg1, len);
4313   rtx arg2_rtx = get_memory_rtx (arg2, len);
4314   rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
4315 
4316   /* Set MEM_SIZE as appropriate.  */
4317   if (CONST_INT_P (len_rtx))
4318     {
4319       set_mem_size (arg1_rtx, INTVAL (len_rtx));
4320       set_mem_size (arg2_rtx, INTVAL (len_rtx));
4321     }
4322 
4323   by_pieces_constfn constfn = NULL;
4324 
4325   /* Try to get the byte representation of the constant ARG2 (or, only
4326      when the function's result is used for equality to zero, ARG1)
4327      points to, with its byte size in NBYTES.  */
4328   unsigned HOST_WIDE_INT nbytes;
4329   const char *rep = getbyterep (arg2, &nbytes);
4330   if (result_eq && rep == NULL)
4331     {
4332       /* For equality to zero the arguments are interchangeable.  */
4333       rep = getbyterep (arg1, &nbytes);
4334       if (rep != NULL)
4335 	std::swap (arg1_rtx, arg2_rtx);
4336     }
4337 
4338   /* If the function's constant bound LEN_RTX is less than or equal
4339      to the byte size of the representation of the constant argument,
4340      and if block move would be done by pieces, we can avoid loading
4341      the bytes from memory and only store the computed constant result.  */
4342   if (rep
4343       && CONST_INT_P (len_rtx)
4344       && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
4345     constfn = builtin_memcpy_read_str;
4346 
4347   result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
4348 				 TREE_TYPE (len), target,
4349 				 result_eq, constfn,
4350 				 CONST_CAST (char *, rep));
4351 
4352   if (result)
4353     {
4354       /* Return the value in the proper mode for this function.  */
4355       if (GET_MODE (result) == mode)
4356 	return result;
4357 
4358       if (target != 0)
4359 	{
4360 	  convert_move (target, result, 0);
4361 	  return target;
4362 	}
4363 
4364       return convert_to_mode (mode, result, 0);
4365     }
4366 
4367   return NULL_RTX;
4368 }
4369 
4370 /* Expand expression EXP, which is a call to the strcmp builtin.  Return NULL_RTX
4371    if we failed the caller should emit a normal call, otherwise try to get
4372    the result in TARGET, if convenient.  */
4373 
4374 static rtx
expand_builtin_strcmp(tree exp,ATTRIBUTE_UNUSED rtx target)4375 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
4376 {
4377   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4378     return NULL_RTX;
4379 
4380   tree arg1 = CALL_EXPR_ARG (exp, 0);
4381   tree arg2 = CALL_EXPR_ARG (exp, 1);
4382 
4383   /* Due to the performance benefit, always inline the calls first.  */
4384   rtx result = NULL_RTX;
4385   result = inline_expand_builtin_bytecmp (exp, target);
4386   if (result)
4387     return result;
4388 
4389   insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
4390   insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4391   if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
4392     return NULL_RTX;
4393 
4394   unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4395   unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4396 
4397   /* If we don't have POINTER_TYPE, call the function.  */
4398   if (arg1_align == 0 || arg2_align == 0)
4399     return NULL_RTX;
4400 
4401   /* Stabilize the arguments in case gen_cmpstr(n)si fail.  */
4402   arg1 = builtin_save_expr (arg1);
4403   arg2 = builtin_save_expr (arg2);
4404 
4405   rtx arg1_rtx = get_memory_rtx (arg1, NULL);
4406   rtx arg2_rtx = get_memory_rtx (arg2, NULL);
4407 
4408   /* Try to call cmpstrsi.  */
4409   if (cmpstr_icode != CODE_FOR_nothing)
4410     result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
4411 			    MIN (arg1_align, arg2_align));
4412 
4413   /* Try to determine at least one length and call cmpstrnsi.  */
4414   if (!result && cmpstrn_icode != CODE_FOR_nothing)
4415     {
4416       tree len;
4417       rtx arg3_rtx;
4418 
4419       tree len1 = c_strlen (arg1, 1);
4420       tree len2 = c_strlen (arg2, 1);
4421 
4422       if (len1)
4423 	len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
4424       if (len2)
4425 	len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
4426 
4427       /* If we don't have a constant length for the first, use the length
4428 	 of the second, if we know it.  We don't require a constant for
4429 	 this case; some cost analysis could be done if both are available
4430 	 but neither is constant.  For now, assume they're equally cheap,
4431 	 unless one has side effects.  If both strings have constant lengths,
4432 	 use the smaller.  */
4433 
4434       if (!len1)
4435 	len = len2;
4436       else if (!len2)
4437 	len = len1;
4438       else if (TREE_SIDE_EFFECTS (len1))
4439 	len = len2;
4440       else if (TREE_SIDE_EFFECTS (len2))
4441 	len = len1;
4442       else if (TREE_CODE (len1) != INTEGER_CST)
4443 	len = len2;
4444       else if (TREE_CODE (len2) != INTEGER_CST)
4445 	len = len1;
4446       else if (tree_int_cst_lt (len1, len2))
4447 	len = len1;
4448       else
4449 	len = len2;
4450 
4451       /* If both arguments have side effects, we cannot optimize.  */
4452       if (len && !TREE_SIDE_EFFECTS (len))
4453 	{
4454 	  arg3_rtx = expand_normal (len);
4455 	  result = expand_cmpstrn_or_cmpmem
4456 	    (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
4457 	     arg3_rtx, MIN (arg1_align, arg2_align));
4458 	}
4459     }
4460 
4461   tree fndecl = get_callee_fndecl (exp);
4462   if (result)
4463     {
4464       /* Return the value in the proper mode for this function.  */
4465       machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
4466       if (GET_MODE (result) == mode)
4467 	return result;
4468       if (target == 0)
4469 	return convert_to_mode (mode, result, 0);
4470       convert_move (target, result, 0);
4471       return target;
4472     }
4473 
4474   /* Expand the library call ourselves using a stabilized argument
4475      list to avoid re-evaluating the function's arguments twice.  */
4476   tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
4477   copy_warning (fn, exp);
4478   gcc_assert (TREE_CODE (fn) == CALL_EXPR);
4479   CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
4480   return expand_call (fn, target, target == const0_rtx);
4481 }
4482 
4483 /* Expand expression EXP, which is a call to the strncmp builtin. Return
4484    NULL_RTX if we failed the caller should emit a normal call, otherwise
4485    try to get the result in TARGET, if convenient.  */
4486 
4487 static rtx
expand_builtin_strncmp(tree exp,ATTRIBUTE_UNUSED rtx target,ATTRIBUTE_UNUSED machine_mode mode)4488 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
4489 			ATTRIBUTE_UNUSED machine_mode mode)
4490 {
4491   if (!validate_arglist (exp,
4492  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4493     return NULL_RTX;
4494 
4495   tree arg1 = CALL_EXPR_ARG (exp, 0);
4496   tree arg2 = CALL_EXPR_ARG (exp, 1);
4497   tree arg3 = CALL_EXPR_ARG (exp, 2);
4498 
4499   location_t loc = EXPR_LOCATION (exp);
4500   tree len1 = c_strlen (arg1, 1);
4501   tree len2 = c_strlen (arg2, 1);
4502 
4503   /* Due to the performance benefit, always inline the calls first.  */
4504   rtx result = NULL_RTX;
4505   result = inline_expand_builtin_bytecmp (exp, target);
4506   if (result)
4507     return result;
4508 
4509   /* If c_strlen can determine an expression for one of the string
4510      lengths, and it doesn't have side effects, then emit cmpstrnsi
4511      using length MIN(strlen(string)+1, arg3).  */
4512   insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
4513   if (cmpstrn_icode == CODE_FOR_nothing)
4514     return NULL_RTX;
4515 
4516   tree len;
4517 
4518   unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
4519   unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
4520 
4521   if (len1)
4522     len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
4523   if (len2)
4524     len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
4525 
4526   tree len3 = fold_convert_loc (loc, sizetype, arg3);
4527 
4528   /* If we don't have a constant length for the first, use the length
4529      of the second, if we know it.  If neither string is constant length,
4530      use the given length argument.  We don't require a constant for
4531      this case; some cost analysis could be done if both are available
4532      but neither is constant.  For now, assume they're equally cheap,
4533      unless one has side effects.  If both strings have constant lengths,
4534      use the smaller.  */
4535 
4536   if (!len1 && !len2)
4537     len = len3;
4538   else if (!len1)
4539     len = len2;
4540   else if (!len2)
4541     len = len1;
4542   else if (TREE_SIDE_EFFECTS (len1))
4543     len = len2;
4544   else if (TREE_SIDE_EFFECTS (len2))
4545     len = len1;
4546   else if (TREE_CODE (len1) != INTEGER_CST)
4547     len = len2;
4548   else if (TREE_CODE (len2) != INTEGER_CST)
4549     len = len1;
4550   else if (tree_int_cst_lt (len1, len2))
4551     len = len1;
4552   else
4553     len = len2;
4554 
4555   /* If we are not using the given length, we must incorporate it here.
4556      The actual new length parameter will be MIN(len,arg3) in this case.  */
4557   if (len != len3)
4558     {
4559       len = fold_convert_loc (loc, sizetype, len);
4560       len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
4561     }
4562   rtx arg1_rtx = get_memory_rtx (arg1, len);
4563   rtx arg2_rtx = get_memory_rtx (arg2, len);
4564   rtx arg3_rtx = expand_normal (len);
4565   result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
4566 				     arg2_rtx, TREE_TYPE (len), arg3_rtx,
4567 				     MIN (arg1_align, arg2_align));
4568 
4569   tree fndecl = get_callee_fndecl (exp);
4570   if (result)
4571     {
4572       /* Return the value in the proper mode for this function.  */
4573       mode = TYPE_MODE (TREE_TYPE (exp));
4574       if (GET_MODE (result) == mode)
4575 	return result;
4576       if (target == 0)
4577 	return convert_to_mode (mode, result, 0);
4578       convert_move (target, result, 0);
4579       return target;
4580     }
4581 
4582   /* Expand the library call ourselves using a stabilized argument
4583      list to avoid re-evaluating the function's arguments twice.  */
4584   tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
4585   copy_warning (call, exp);
4586   gcc_assert (TREE_CODE (call) == CALL_EXPR);
4587   CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
4588   return expand_call (call, target, target == const0_rtx);
4589 }
4590 
4591 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
4592    if that's convenient.  */
4593 
4594 rtx
expand_builtin_saveregs(void)4595 expand_builtin_saveregs (void)
4596 {
4597   rtx val;
4598   rtx_insn *seq;
4599 
4600   /* Don't do __builtin_saveregs more than once in a function.
4601      Save the result of the first call and reuse it.  */
4602   if (saveregs_value != 0)
4603     return saveregs_value;
4604 
4605   /* When this function is called, it means that registers must be
4606      saved on entry to this function.  So we migrate the call to the
4607      first insn of this function.  */
4608 
4609   start_sequence ();
4610 
4611   /* Do whatever the machine needs done in this case.  */
4612   val = targetm.calls.expand_builtin_saveregs ();
4613 
4614   seq = get_insns ();
4615   end_sequence ();
4616 
4617   saveregs_value = val;
4618 
4619   /* Put the insns after the NOTE that starts the function.  If this
4620      is inside a start_sequence, make the outer-level insn chain current, so
4621      the code is placed at the start of the function.  */
4622   push_topmost_sequence ();
4623   emit_insn_after (seq, entry_of_function ());
4624   pop_topmost_sequence ();
4625 
4626   return val;
4627 }
4628 
4629 /* Expand a call to __builtin_next_arg.  */
4630 
4631 static rtx
expand_builtin_next_arg(void)4632 expand_builtin_next_arg (void)
4633 {
4634   /* Checking arguments is already done in fold_builtin_next_arg
4635      that must be called before this function.  */
4636   return expand_binop (ptr_mode, add_optab,
4637 		       crtl->args.internal_arg_pointer,
4638 		       crtl->args.arg_offset_rtx,
4639 		       NULL_RTX, 0, OPTAB_LIB_WIDEN);
4640 }
4641 
4642 /* Make it easier for the backends by protecting the valist argument
4643    from multiple evaluations.  */
4644 
4645 static tree
stabilize_va_list_loc(location_t loc,tree valist,int needs_lvalue)4646 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4647 {
4648   tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4649 
4650   /* The current way of determining the type of valist is completely
4651      bogus.  We should have the information on the va builtin instead.  */
4652   if (!vatype)
4653     vatype = targetm.fn_abi_va_list (cfun->decl);
4654 
4655   if (TREE_CODE (vatype) == ARRAY_TYPE)
4656     {
4657       if (TREE_SIDE_EFFECTS (valist))
4658 	valist = save_expr (valist);
4659 
4660       /* For this case, the backends will be expecting a pointer to
4661 	 vatype, but it's possible we've actually been given an array
4662 	 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4663 	 So fix it.  */
4664       if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4665 	{
4666 	  tree p1 = build_pointer_type (TREE_TYPE (vatype));
4667 	  valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4668 	}
4669     }
4670   else
4671     {
4672       tree pt = build_pointer_type (vatype);
4673 
4674       if (! needs_lvalue)
4675 	{
4676 	  if (! TREE_SIDE_EFFECTS (valist))
4677 	    return valist;
4678 
4679 	  valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4680 	  TREE_SIDE_EFFECTS (valist) = 1;
4681 	}
4682 
4683       if (TREE_SIDE_EFFECTS (valist))
4684 	valist = save_expr (valist);
4685       valist = fold_build2_loc (loc, MEM_REF,
4686 				vatype, valist, build_int_cst (pt, 0));
4687     }
4688 
4689   return valist;
4690 }
4691 
4692 /* The "standard" definition of va_list is void*.  */
4693 
4694 tree
std_build_builtin_va_list(void)4695 std_build_builtin_va_list (void)
4696 {
4697   return ptr_type_node;
4698 }
4699 
4700 /* The "standard" abi va_list is va_list_type_node.  */
4701 
4702 tree
std_fn_abi_va_list(tree fndecl ATTRIBUTE_UNUSED)4703 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4704 {
4705   return va_list_type_node;
4706 }
4707 
4708 /* The "standard" type of va_list is va_list_type_node.  */
4709 
4710 tree
std_canonical_va_list_type(tree type)4711 std_canonical_va_list_type (tree type)
4712 {
4713   tree wtype, htype;
4714 
4715   wtype = va_list_type_node;
4716   htype = type;
4717 
4718   if (TREE_CODE (wtype) == ARRAY_TYPE)
4719     {
4720       /* If va_list is an array type, the argument may have decayed
4721 	 to a pointer type, e.g. by being passed to another function.
4722 	 In that case, unwrap both types so that we can compare the
4723 	 underlying records.  */
4724       if (TREE_CODE (htype) == ARRAY_TYPE
4725 	  || POINTER_TYPE_P (htype))
4726 	{
4727 	  wtype = TREE_TYPE (wtype);
4728 	  htype = TREE_TYPE (htype);
4729 	}
4730     }
4731   if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4732     return va_list_type_node;
4733 
4734   return NULL_TREE;
4735 }
4736 
4737 /* The "standard" implementation of va_start: just assign `nextarg' to
4738    the variable.  */
4739 
4740 void
std_expand_builtin_va_start(tree valist,rtx nextarg)4741 std_expand_builtin_va_start (tree valist, rtx nextarg)
4742 {
4743   rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4744   convert_move (va_r, nextarg, 0);
4745 }
4746 
4747 /* Expand EXP, a call to __builtin_va_start.  */
4748 
4749 static rtx
expand_builtin_va_start(tree exp)4750 expand_builtin_va_start (tree exp)
4751 {
4752   rtx nextarg;
4753   tree valist;
4754   location_t loc = EXPR_LOCATION (exp);
4755 
4756   if (call_expr_nargs (exp) < 2)
4757     {
4758       error_at (loc, "too few arguments to function %<va_start%>");
4759       return const0_rtx;
4760     }
4761 
4762   if (fold_builtin_next_arg (exp, true))
4763     return const0_rtx;
4764 
4765   nextarg = expand_builtin_next_arg ();
4766   valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4767 
4768   if (targetm.expand_builtin_va_start)
4769     targetm.expand_builtin_va_start (valist, nextarg);
4770   else
4771     std_expand_builtin_va_start (valist, nextarg);
4772 
4773   return const0_rtx;
4774 }
4775 
4776 /* Expand EXP, a call to __builtin_va_end.  */
4777 
4778 static rtx
expand_builtin_va_end(tree exp)4779 expand_builtin_va_end (tree exp)
4780 {
4781   tree valist = CALL_EXPR_ARG (exp, 0);
4782 
4783   /* Evaluate for side effects, if needed.  I hate macros that don't
4784      do that.  */
4785   if (TREE_SIDE_EFFECTS (valist))
4786     expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4787 
4788   return const0_rtx;
4789 }
4790 
4791 /* Expand EXP, a call to __builtin_va_copy.  We do this as a
4792    builtin rather than just as an assignment in stdarg.h because of the
4793    nastiness of array-type va_list types.  */
4794 
4795 static rtx
expand_builtin_va_copy(tree exp)4796 expand_builtin_va_copy (tree exp)
4797 {
4798   tree dst, src, t;
4799   location_t loc = EXPR_LOCATION (exp);
4800 
4801   dst = CALL_EXPR_ARG (exp, 0);
4802   src = CALL_EXPR_ARG (exp, 1);
4803 
4804   dst = stabilize_va_list_loc (loc, dst, 1);
4805   src = stabilize_va_list_loc (loc, src, 0);
4806 
4807   gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4808 
4809   if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4810     {
4811       t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4812       TREE_SIDE_EFFECTS (t) = 1;
4813       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4814     }
4815   else
4816     {
4817       rtx dstb, srcb, size;
4818 
4819       /* Evaluate to pointers.  */
4820       dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4821       srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4822       size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4823       		  NULL_RTX, VOIDmode, EXPAND_NORMAL);
4824 
4825       dstb = convert_memory_address (Pmode, dstb);
4826       srcb = convert_memory_address (Pmode, srcb);
4827 
4828       /* "Dereference" to BLKmode memories.  */
4829       dstb = gen_rtx_MEM (BLKmode, dstb);
4830       set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4831       set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4832       srcb = gen_rtx_MEM (BLKmode, srcb);
4833       set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4834       set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4835 
4836       /* Copy.  */
4837       emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4838     }
4839 
4840   return const0_rtx;
4841 }
4842 
4843 /* Expand a call to one of the builtin functions __builtin_frame_address or
4844    __builtin_return_address.  */
4845 
4846 static rtx
expand_builtin_frame_address(tree fndecl,tree exp)4847 expand_builtin_frame_address (tree fndecl, tree exp)
4848 {
4849   /* The argument must be a nonnegative integer constant.
4850      It counts the number of frames to scan up the stack.
4851      The value is either the frame pointer value or the return
4852      address saved in that frame.  */
4853   if (call_expr_nargs (exp) == 0)
4854     /* Warning about missing arg was already issued.  */
4855     return const0_rtx;
4856   else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4857     {
4858       error ("invalid argument to %qD", fndecl);
4859       return const0_rtx;
4860     }
4861   else
4862     {
4863       /* Number of frames to scan up the stack.  */
4864       unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4865 
4866       rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4867 
4868       /* Some ports cannot access arbitrary stack frames.  */
4869       if (tem == NULL)
4870 	{
4871 	  warning (0, "unsupported argument to %qD", fndecl);
4872 	  return const0_rtx;
4873 	}
4874 
4875       if (count)
4876 	{
4877 	  /* Warn since no effort is made to ensure that any frame
4878 	     beyond the current one exists or can be safely reached.  */
4879 	  warning (OPT_Wframe_address, "calling %qD with "
4880 		   "a nonzero argument is unsafe", fndecl);
4881 	}
4882 
4883       /* For __builtin_frame_address, return what we've got.  */
4884       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4885 	return tem;
4886 
4887       if (!REG_P (tem)
4888 	  && ! CONSTANT_P (tem))
4889 	tem = copy_addr_to_reg (tem);
4890       return tem;
4891     }
4892 }
4893 
4894 /* Expand EXP, a call to the alloca builtin.  Return NULL_RTX if we
4895    failed and the caller should emit a normal call.  */
4896 
4897 static rtx
expand_builtin_alloca(tree exp)4898 expand_builtin_alloca (tree exp)
4899 {
4900   rtx op0;
4901   rtx result;
4902   unsigned int align;
4903   tree fndecl = get_callee_fndecl (exp);
4904   HOST_WIDE_INT max_size;
4905   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4906   bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
4907   bool valid_arglist
4908     = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4909        ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
4910 			   VOID_TYPE)
4911        : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
4912 	 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4913 	 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4914 
4915   if (!valid_arglist)
4916     return NULL_RTX;
4917 
4918   /* Compute the argument.  */
4919   op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4920 
4921   /* Compute the alignment.  */
4922   align = (fcode == BUILT_IN_ALLOCA
4923 	   ? BIGGEST_ALIGNMENT
4924 	   : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
4925 
4926   /* Compute the maximum size.  */
4927   max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
4928               ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
4929               : -1);
4930 
4931   /* Allocate the desired space.  If the allocation stems from the declaration
4932      of a variable-sized object, it cannot accumulate.  */
4933   result
4934     = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
4935   result = convert_memory_address (ptr_mode, result);
4936 
4937   /* Dynamic allocations for variables are recorded during gimplification.  */
4938   if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
4939     record_dynamic_alloc (exp);
4940 
4941   return result;
4942 }
4943 
4944 /* Emit a call to __asan_allocas_unpoison call in EXP.  Add to second argument
4945    of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
4946    STACK_DYNAMIC_OFFSET value.  See motivation for this in comment to
4947    handle_builtin_stack_restore function.  */
4948 
4949 static rtx
expand_asan_emit_allocas_unpoison(tree exp)4950 expand_asan_emit_allocas_unpoison (tree exp)
4951 {
4952   tree arg0 = CALL_EXPR_ARG (exp, 0);
4953   tree arg1 = CALL_EXPR_ARG (exp, 1);
4954   rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
4955   rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
4956   rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
4957 				 stack_pointer_rtx, NULL_RTX, 0,
4958 				 OPTAB_LIB_WIDEN);
4959   off = convert_modes (ptr_mode, Pmode, off, 0);
4960   bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
4961 			     OPTAB_LIB_WIDEN);
4962   rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
4963   ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
4964 				 top, ptr_mode, bot, ptr_mode);
4965   return ret;
4966 }
4967 
4968 /* Expand a call to bswap builtin in EXP.
4969    Return NULL_RTX if a normal call should be emitted rather than expanding the
4970    function in-line.  If convenient, the result should be placed in TARGET.
4971    SUBTARGET may be used as the target for computing one of EXP's operands.  */
4972 
4973 static rtx
expand_builtin_bswap(machine_mode target_mode,tree exp,rtx target,rtx subtarget)4974 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4975 		      rtx subtarget)
4976 {
4977   tree arg;
4978   rtx op0;
4979 
4980   if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4981     return NULL_RTX;
4982 
4983   arg = CALL_EXPR_ARG (exp, 0);
4984   op0 = expand_expr (arg,
4985 		     subtarget && GET_MODE (subtarget) == target_mode
4986 		     ? subtarget : NULL_RTX,
4987 		     target_mode, EXPAND_NORMAL);
4988   if (GET_MODE (op0) != target_mode)
4989     op0 = convert_to_mode (target_mode, op0, 1);
4990 
4991   target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4992 
4993   gcc_assert (target);
4994 
4995   return convert_to_mode (target_mode, target, 1);
4996 }
4997 
4998 /* Expand a call to a unary builtin in EXP.
4999    Return NULL_RTX if a normal call should be emitted rather than expanding the
5000    function in-line.  If convenient, the result should be placed in TARGET.
5001    SUBTARGET may be used as the target for computing one of EXP's operands.  */
5002 
5003 static rtx
expand_builtin_unop(machine_mode target_mode,tree exp,rtx target,rtx subtarget,optab op_optab)5004 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
5005 		     rtx subtarget, optab op_optab)
5006 {
5007   rtx op0;
5008 
5009   if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
5010     return NULL_RTX;
5011 
5012   /* Compute the argument.  */
5013   op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
5014 		     (subtarget
5015 		      && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
5016 			  == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
5017 		     VOIDmode, EXPAND_NORMAL);
5018   /* Compute op, into TARGET if possible.
5019      Set TARGET to wherever the result comes back.  */
5020   target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
5021 			op_optab, op0, target, op_optab != clrsb_optab);
5022   gcc_assert (target);
5023 
5024   return convert_to_mode (target_mode, target, 0);
5025 }
5026 
5027 /* Expand a call to __builtin_expect.  We just return our argument
5028    as the builtin_expect semantic should've been already executed by
5029    tree branch prediction pass. */
5030 
5031 static rtx
expand_builtin_expect(tree exp,rtx target)5032 expand_builtin_expect (tree exp, rtx target)
5033 {
5034   tree arg;
5035 
5036   if (call_expr_nargs (exp) < 2)
5037     return const0_rtx;
5038   arg = CALL_EXPR_ARG (exp, 0);
5039 
5040   target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5041   /* When guessing was done, the hints should be already stripped away.  */
5042   gcc_assert (!flag_guess_branch_prob
5043 	      || optimize == 0 || seen_error ());
5044   return target;
5045 }
5046 
5047 /* Expand a call to __builtin_expect_with_probability.  We just return our
5048    argument as the builtin_expect semantic should've been already executed by
5049    tree branch prediction pass.  */
5050 
5051 static rtx
expand_builtin_expect_with_probability(tree exp,rtx target)5052 expand_builtin_expect_with_probability (tree exp, rtx target)
5053 {
5054   tree arg;
5055 
5056   if (call_expr_nargs (exp) < 3)
5057     return const0_rtx;
5058   arg = CALL_EXPR_ARG (exp, 0);
5059 
5060   target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5061   /* When guessing was done, the hints should be already stripped away.  */
5062   gcc_assert (!flag_guess_branch_prob
5063 	      || optimize == 0 || seen_error ());
5064   return target;
5065 }
5066 
5067 
5068 /* Expand a call to __builtin_assume_aligned.  We just return our first
5069    argument as the builtin_assume_aligned semantic should've been already
5070    executed by CCP.  */
5071 
5072 static rtx
expand_builtin_assume_aligned(tree exp,rtx target)5073 expand_builtin_assume_aligned (tree exp, rtx target)
5074 {
5075   if (call_expr_nargs (exp) < 2)
5076     return const0_rtx;
5077   target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
5078 			EXPAND_NORMAL);
5079   gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
5080 	      && (call_expr_nargs (exp) < 3
5081 		  || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
5082   return target;
5083 }
5084 
5085 void
expand_builtin_trap(void)5086 expand_builtin_trap (void)
5087 {
5088   if (targetm.have_trap ())
5089     {
5090       rtx_insn *insn = emit_insn (targetm.gen_trap ());
5091       /* For trap insns when not accumulating outgoing args force
5092 	 REG_ARGS_SIZE note to prevent crossjumping of calls with
5093 	 different args sizes.  */
5094       if (!ACCUMULATE_OUTGOING_ARGS)
5095 	add_args_size_note (insn, stack_pointer_delta);
5096     }
5097   else
5098     {
5099       tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
5100       tree call_expr = build_call_expr (fn, 0);
5101       expand_call (call_expr, NULL_RTX, false);
5102     }
5103 
5104   emit_barrier ();
5105 }
5106 
5107 /* Expand a call to __builtin_unreachable.  We do nothing except emit
5108    a barrier saying that control flow will not pass here.
5109 
5110    It is the responsibility of the program being compiled to ensure
5111    that control flow does never reach __builtin_unreachable.  */
5112 static void
expand_builtin_unreachable(void)5113 expand_builtin_unreachable (void)
5114 {
5115   emit_barrier ();
5116 }
5117 
5118 /* Expand EXP, a call to fabs, fabsf or fabsl.
5119    Return NULL_RTX if a normal call should be emitted rather than expanding
5120    the function inline.  If convenient, the result should be placed
5121    in TARGET.  SUBTARGET may be used as the target for computing
5122    the operand.  */
5123 
5124 static rtx
expand_builtin_fabs(tree exp,rtx target,rtx subtarget)5125 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
5126 {
5127   machine_mode mode;
5128   tree arg;
5129   rtx op0;
5130 
5131   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5132     return NULL_RTX;
5133 
5134   arg = CALL_EXPR_ARG (exp, 0);
5135   CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
5136   mode = TYPE_MODE (TREE_TYPE (arg));
5137   op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5138   return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
5139 }
5140 
5141 /* Expand EXP, a call to copysign, copysignf, or copysignl.
5142    Return NULL is a normal call should be emitted rather than expanding the
5143    function inline.  If convenient, the result should be placed in TARGET.
5144    SUBTARGET may be used as the target for computing the operand.  */
5145 
5146 static rtx
expand_builtin_copysign(tree exp,rtx target,rtx subtarget)5147 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
5148 {
5149   rtx op0, op1;
5150   tree arg;
5151 
5152   if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
5153     return NULL_RTX;
5154 
5155   arg = CALL_EXPR_ARG (exp, 0);
5156   op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
5157 
5158   arg = CALL_EXPR_ARG (exp, 1);
5159   op1 = expand_normal (arg);
5160 
5161   return expand_copysign (op0, op1, target);
5162 }
5163 
5164 /* Emit a call to __builtin___clear_cache.  */
5165 
5166 void
default_emit_call_builtin___clear_cache(rtx begin,rtx end)5167 default_emit_call_builtin___clear_cache (rtx begin, rtx end)
5168 {
5169   rtx callee = gen_rtx_SYMBOL_REF (Pmode,
5170 				   BUILTIN_ASM_NAME_PTR
5171 				   (BUILT_IN_CLEAR_CACHE));
5172 
5173   emit_library_call (callee,
5174 		     LCT_NORMAL, VOIDmode,
5175 		     convert_memory_address (ptr_mode, begin), ptr_mode,
5176 		     convert_memory_address (ptr_mode, end), ptr_mode);
5177 }
5178 
5179 /* Emit a call to __builtin___clear_cache, unless the target specifies
5180    it as do-nothing.  This function can be used by trampoline
5181    finalizers to duplicate the effects of expanding a call to the
5182    clear_cache builtin.  */
5183 
5184 void
maybe_emit_call_builtin___clear_cache(rtx begin,rtx end)5185 maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
5186 {
5187   gcc_assert ((GET_MODE (begin) == ptr_mode || GET_MODE (begin) == Pmode
5188 	       || CONST_INT_P (begin))
5189 	      && (GET_MODE (end) == ptr_mode || GET_MODE (end) == Pmode
5190 		  || CONST_INT_P (end)));
5191 
5192   if (targetm.have_clear_cache ())
5193     {
5194       /* We have a "clear_cache" insn, and it will handle everything.  */
5195       class expand_operand ops[2];
5196 
5197       create_address_operand (&ops[0], begin);
5198       create_address_operand (&ops[1], end);
5199 
5200       if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
5201 	return;
5202     }
5203   else
5204     {
5205 #ifndef CLEAR_INSN_CACHE
5206       /* There is no "clear_cache" insn, and __clear_cache() in libgcc
5207 	 does nothing.  There is no need to call it.  Do nothing.  */
5208       return;
5209 #endif /* CLEAR_INSN_CACHE */
5210     }
5211 
5212   targetm.calls.emit_call_builtin___clear_cache (begin, end);
5213 }
5214 
5215 /* Expand a call to __builtin___clear_cache.  */
5216 
5217 static void
expand_builtin___clear_cache(tree exp)5218 expand_builtin___clear_cache (tree exp)
5219 {
5220   tree begin, end;
5221   rtx begin_rtx, end_rtx;
5222 
5223   /* We must not expand to a library call.  If we did, any
5224      fallback library function in libgcc that might contain a call to
5225      __builtin___clear_cache() would recurse infinitely.  */
5226   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
5227     {
5228       error ("both arguments to %<__builtin___clear_cache%> must be pointers");
5229       return;
5230     }
5231 
5232   begin = CALL_EXPR_ARG (exp, 0);
5233   begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
5234 
5235   end = CALL_EXPR_ARG (exp, 1);
5236   end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
5237 
5238   maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
5239 }
5240 
5241 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT.  */
5242 
5243 static rtx
round_trampoline_addr(rtx tramp)5244 round_trampoline_addr (rtx tramp)
5245 {
5246   rtx temp, addend, mask;
5247 
5248   /* If we don't need too much alignment, we'll have been guaranteed
5249      proper alignment by get_trampoline_type.  */
5250   if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
5251     return tramp;
5252 
5253   /* Round address up to desired boundary.  */
5254   temp = gen_reg_rtx (Pmode);
5255   addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
5256   mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
5257 
5258   temp  = expand_simple_binop (Pmode, PLUS, tramp, addend,
5259 			       temp, 0, OPTAB_LIB_WIDEN);
5260   tramp = expand_simple_binop (Pmode, AND, temp, mask,
5261 			       temp, 0, OPTAB_LIB_WIDEN);
5262 
5263   return tramp;
5264 }
5265 
5266 static rtx
expand_builtin_init_trampoline(tree exp,bool onstack)5267 expand_builtin_init_trampoline (tree exp, bool onstack)
5268 {
5269   tree t_tramp, t_func, t_chain;
5270   rtx m_tramp, r_tramp, r_chain, tmp;
5271 
5272   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
5273 			 POINTER_TYPE, VOID_TYPE))
5274     return NULL_RTX;
5275 
5276   t_tramp = CALL_EXPR_ARG (exp, 0);
5277   t_func = CALL_EXPR_ARG (exp, 1);
5278   t_chain = CALL_EXPR_ARG (exp, 2);
5279 
5280   r_tramp = expand_normal (t_tramp);
5281   m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
5282   MEM_NOTRAP_P (m_tramp) = 1;
5283 
5284   /* If ONSTACK, the TRAMP argument should be the address of a field
5285      within the local function's FRAME decl.  Either way, let's see if
5286      we can fill in the MEM_ATTRs for this memory.  */
5287   if (TREE_CODE (t_tramp) == ADDR_EXPR)
5288     set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
5289 
5290   /* Creator of a heap trampoline is responsible for making sure the
5291      address is aligned to at least STACK_BOUNDARY.  Normally malloc
5292      will ensure this anyhow.  */
5293   tmp = round_trampoline_addr (r_tramp);
5294   if (tmp != r_tramp)
5295     {
5296       m_tramp = change_address (m_tramp, BLKmode, tmp);
5297       set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
5298       set_mem_size (m_tramp, TRAMPOLINE_SIZE);
5299     }
5300 
5301   /* The FUNC argument should be the address of the nested function.
5302      Extract the actual function decl to pass to the hook.  */
5303   gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
5304   t_func = TREE_OPERAND (t_func, 0);
5305   gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
5306 
5307   r_chain = expand_normal (t_chain);
5308 
5309   /* Generate insns to initialize the trampoline.  */
5310   targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
5311 
5312   if (onstack)
5313     {
5314       trampolines_created = 1;
5315 
5316       if (targetm.calls.custom_function_descriptors != 0)
5317 	warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
5318 		    "trampoline generated for nested function %qD", t_func);
5319     }
5320 
5321   return const0_rtx;
5322 }
5323 
5324 static rtx
expand_builtin_adjust_trampoline(tree exp)5325 expand_builtin_adjust_trampoline (tree exp)
5326 {
5327   rtx tramp;
5328 
5329   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5330     return NULL_RTX;
5331 
5332   tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5333   tramp = round_trampoline_addr (tramp);
5334   if (targetm.calls.trampoline_adjust_address)
5335     tramp = targetm.calls.trampoline_adjust_address (tramp);
5336 
5337   return tramp;
5338 }
5339 
5340 /* Expand a call to the builtin descriptor initialization routine.
5341    A descriptor is made up of a couple of pointers to the static
5342    chain and the code entry in this order.  */
5343 
5344 static rtx
expand_builtin_init_descriptor(tree exp)5345 expand_builtin_init_descriptor (tree exp)
5346 {
5347   tree t_descr, t_func, t_chain;
5348   rtx m_descr, r_descr, r_func, r_chain;
5349 
5350   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
5351 			 VOID_TYPE))
5352     return NULL_RTX;
5353 
5354   t_descr = CALL_EXPR_ARG (exp, 0);
5355   t_func = CALL_EXPR_ARG (exp, 1);
5356   t_chain = CALL_EXPR_ARG (exp, 2);
5357 
5358   r_descr = expand_normal (t_descr);
5359   m_descr = gen_rtx_MEM (BLKmode, r_descr);
5360   MEM_NOTRAP_P (m_descr) = 1;
5361   set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
5362 
5363   r_func = expand_normal (t_func);
5364   r_chain = expand_normal (t_chain);
5365 
5366   /* Generate insns to initialize the descriptor.  */
5367   emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
5368   emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
5369 				     POINTER_SIZE / BITS_PER_UNIT), r_func);
5370 
5371   return const0_rtx;
5372 }
5373 
5374 /* Expand a call to the builtin descriptor adjustment routine.  */
5375 
5376 static rtx
expand_builtin_adjust_descriptor(tree exp)5377 expand_builtin_adjust_descriptor (tree exp)
5378 {
5379   rtx tramp;
5380 
5381   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5382     return NULL_RTX;
5383 
5384   tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
5385 
5386   /* Unalign the descriptor to allow runtime identification.  */
5387   tramp = plus_constant (ptr_mode, tramp,
5388 			 targetm.calls.custom_function_descriptors);
5389 
5390   return force_operand (tramp, NULL_RTX);
5391 }
5392 
5393 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
5394    function.  The function first checks whether the back end provides
5395    an insn to implement signbit for the respective mode.  If not, it
5396    checks whether the floating point format of the value is such that
5397    the sign bit can be extracted.  If that is not the case, error out.
5398    EXP is the expression that is a call to the builtin function; if
5399    convenient, the result should be placed in TARGET.  */
5400 static rtx
expand_builtin_signbit(tree exp,rtx target)5401 expand_builtin_signbit (tree exp, rtx target)
5402 {
5403   const struct real_format *fmt;
5404   scalar_float_mode fmode;
5405   scalar_int_mode rmode, imode;
5406   tree arg;
5407   int word, bitpos;
5408   enum insn_code icode;
5409   rtx temp;
5410   location_t loc = EXPR_LOCATION (exp);
5411 
5412   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
5413     return NULL_RTX;
5414 
5415   arg = CALL_EXPR_ARG (exp, 0);
5416   fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
5417   rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
5418   fmt = REAL_MODE_FORMAT (fmode);
5419 
5420   arg = builtin_save_expr (arg);
5421 
5422   /* Expand the argument yielding a RTX expression. */
5423   temp = expand_normal (arg);
5424 
5425   /* Check if the back end provides an insn that handles signbit for the
5426      argument's mode. */
5427   icode = optab_handler (signbit_optab, fmode);
5428   if (icode != CODE_FOR_nothing)
5429     {
5430       rtx_insn *last = get_last_insn ();
5431       target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5432       if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
5433 	return target;
5434       delete_insns_since (last);
5435     }
5436 
5437   /* For floating point formats without a sign bit, implement signbit
5438      as "ARG < 0.0".  */
5439   bitpos = fmt->signbit_ro;
5440   if (bitpos < 0)
5441   {
5442     /* But we can't do this if the format supports signed zero.  */
5443     gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
5444 
5445     arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
5446 		       build_real (TREE_TYPE (arg), dconst0));
5447     return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
5448   }
5449 
5450   if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
5451     {
5452       imode = int_mode_for_mode (fmode).require ();
5453       temp = gen_lowpart (imode, temp);
5454     }
5455   else
5456     {
5457       imode = word_mode;
5458       /* Handle targets with different FP word orders.  */
5459       if (FLOAT_WORDS_BIG_ENDIAN)
5460 	word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
5461       else
5462 	word = bitpos / BITS_PER_WORD;
5463       temp = operand_subword_force (temp, word, fmode);
5464       bitpos = bitpos % BITS_PER_WORD;
5465     }
5466 
5467   /* Force the intermediate word_mode (or narrower) result into a
5468      register.  This avoids attempting to create paradoxical SUBREGs
5469      of floating point modes below.  */
5470   temp = force_reg (imode, temp);
5471 
5472   /* If the bitpos is within the "result mode" lowpart, the operation
5473      can be implement with a single bitwise AND.  Otherwise, we need
5474      a right shift and an AND.  */
5475 
5476   if (bitpos < GET_MODE_BITSIZE (rmode))
5477     {
5478       wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
5479 
5480       if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
5481 	temp = gen_lowpart (rmode, temp);
5482       temp = expand_binop (rmode, and_optab, temp,
5483 			   immed_wide_int_const (mask, rmode),
5484 			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
5485     }
5486   else
5487     {
5488       /* Perform a logical right shift to place the signbit in the least
5489 	 significant bit, then truncate the result to the desired mode
5490 	 and mask just this bit.  */
5491       temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
5492       temp = gen_lowpart (rmode, temp);
5493       temp = expand_binop (rmode, and_optab, temp, const1_rtx,
5494 			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
5495     }
5496 
5497   return temp;
5498 }
5499 
5500 /* Expand fork or exec calls.  TARGET is the desired target of the
5501    call.  EXP is the call. FN is the
5502    identificator of the actual function.  IGNORE is nonzero if the
5503    value is to be ignored.  */
5504 
5505 static rtx
expand_builtin_fork_or_exec(tree fn,tree exp,rtx target,int ignore)5506 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
5507 {
5508   tree id, decl;
5509   tree call;
5510 
5511   /* If we are not profiling, just call the function.  */
5512   if (!profile_arc_flag)
5513     return NULL_RTX;
5514 
5515   /* Otherwise call the wrapper.  This should be equivalent for the rest of
5516      compiler, so the code does not diverge, and the wrapper may run the
5517      code necessary for keeping the profiling sane.  */
5518 
5519   switch (DECL_FUNCTION_CODE (fn))
5520     {
5521     case BUILT_IN_FORK:
5522       id = get_identifier ("__gcov_fork");
5523       break;
5524 
5525     case BUILT_IN_EXECL:
5526       id = get_identifier ("__gcov_execl");
5527       break;
5528 
5529     case BUILT_IN_EXECV:
5530       id = get_identifier ("__gcov_execv");
5531       break;
5532 
5533     case BUILT_IN_EXECLP:
5534       id = get_identifier ("__gcov_execlp");
5535       break;
5536 
5537     case BUILT_IN_EXECLE:
5538       id = get_identifier ("__gcov_execle");
5539       break;
5540 
5541     case BUILT_IN_EXECVP:
5542       id = get_identifier ("__gcov_execvp");
5543       break;
5544 
5545     case BUILT_IN_EXECVE:
5546       id = get_identifier ("__gcov_execve");
5547       break;
5548 
5549     default:
5550       gcc_unreachable ();
5551     }
5552 
5553   decl = build_decl (DECL_SOURCE_LOCATION (fn),
5554 		     FUNCTION_DECL, id, TREE_TYPE (fn));
5555   DECL_EXTERNAL (decl) = 1;
5556   TREE_PUBLIC (decl) = 1;
5557   DECL_ARTIFICIAL (decl) = 1;
5558   TREE_NOTHROW (decl) = 1;
5559   DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
5560   DECL_VISIBILITY_SPECIFIED (decl) = 1;
5561   call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
5562   return expand_call (call, target, ignore);
5563  }
5564 
5565 
5566 
5567 /* Reconstitute a mode for a __sync intrinsic operation.  Since the type of
5568    the pointer in these functions is void*, the tree optimizers may remove
5569    casts.  The mode computed in expand_builtin isn't reliable either, due
5570    to __sync_bool_compare_and_swap.
5571 
5572    FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
5573    group of builtins.  This gives us log2 of the mode size.  */
5574 
5575 static inline machine_mode
get_builtin_sync_mode(int fcode_diff)5576 get_builtin_sync_mode (int fcode_diff)
5577 {
5578   /* The size is not negotiable, so ask not to get BLKmode in return
5579      if the target indicates that a smaller size would be better.  */
5580   return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
5581 }
5582 
5583 /* Expand the memory expression LOC and return the appropriate memory operand
5584    for the builtin_sync operations.  */
5585 
5586 static rtx
get_builtin_sync_mem(tree loc,machine_mode mode)5587 get_builtin_sync_mem (tree loc, machine_mode mode)
5588 {
5589   rtx addr, mem;
5590   int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
5591 				    ? TREE_TYPE (TREE_TYPE (loc))
5592 				    : TREE_TYPE (loc));
5593   scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
5594 
5595   addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
5596   addr = convert_memory_address (addr_mode, addr);
5597 
5598   /* Note that we explicitly do not want any alias information for this
5599      memory, so that we kill all other live memories.  Otherwise we don't
5600      satisfy the full barrier semantics of the intrinsic.  */
5601   mem = gen_rtx_MEM (mode, addr);
5602 
5603   set_mem_addr_space (mem, addr_space);
5604 
5605   mem = validize_mem (mem);
5606 
5607   /* The alignment needs to be at least according to that of the mode.  */
5608   set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
5609 			   get_pointer_alignment (loc)));
5610   set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
5611   MEM_VOLATILE_P (mem) = 1;
5612 
5613   return mem;
5614 }
5615 
5616 /* Make sure an argument is in the right mode.
5617    EXP is the tree argument.
5618    MODE is the mode it should be in.  */
5619 
5620 static rtx
expand_expr_force_mode(tree exp,machine_mode mode)5621 expand_expr_force_mode (tree exp, machine_mode mode)
5622 {
5623   rtx val;
5624   machine_mode old_mode;
5625 
5626   if (TREE_CODE (exp) == SSA_NAME
5627       && TYPE_MODE (TREE_TYPE (exp)) != mode)
5628     {
5629       /* Undo argument promotion if possible, as combine might not
5630 	 be able to do it later due to MEM_VOLATILE_P uses in the
5631 	 patterns.  */
5632       gimple *g = get_gimple_for_ssa_name (exp);
5633       if (g && gimple_assign_cast_p (g))
5634 	{
5635 	  tree rhs = gimple_assign_rhs1 (g);
5636 	  tree_code code = gimple_assign_rhs_code (g);
5637 	  if (CONVERT_EXPR_CODE_P (code)
5638 	      && TYPE_MODE (TREE_TYPE (rhs)) == mode
5639 	      && INTEGRAL_TYPE_P (TREE_TYPE (exp))
5640 	      && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
5641 	      && (TYPE_PRECISION (TREE_TYPE (exp))
5642 		  > TYPE_PRECISION (TREE_TYPE (rhs))))
5643 	    exp = rhs;
5644 	}
5645     }
5646 
5647   val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
5648   /* If VAL is promoted to a wider mode, convert it back to MODE.  Take care
5649      of CONST_INTs, where we know the old_mode only from the call argument.  */
5650 
5651   old_mode = GET_MODE (val);
5652   if (old_mode == VOIDmode)
5653     old_mode = TYPE_MODE (TREE_TYPE (exp));
5654   val = convert_modes (mode, old_mode, val, 1);
5655   return val;
5656 }
5657 
5658 
5659 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
5660    EXP is the CALL_EXPR.  CODE is the rtx code
5661    that corresponds to the arithmetic or logical operation from the name;
5662    an exception here is that NOT actually means NAND.  TARGET is an optional
5663    place for us to store the results; AFTER is true if this is the
5664    fetch_and_xxx form.  */
5665 
5666 static rtx
expand_builtin_sync_operation(machine_mode mode,tree exp,enum rtx_code code,bool after,rtx target)5667 expand_builtin_sync_operation (machine_mode mode, tree exp,
5668 			       enum rtx_code code, bool after,
5669 			       rtx target)
5670 {
5671   rtx val, mem;
5672   location_t loc = EXPR_LOCATION (exp);
5673 
5674   if (code == NOT && warn_sync_nand)
5675     {
5676       tree fndecl = get_callee_fndecl (exp);
5677       enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5678 
5679       static bool warned_f_a_n, warned_n_a_f;
5680 
5681       switch (fcode)
5682 	{
5683 	case BUILT_IN_SYNC_FETCH_AND_NAND_1:
5684 	case BUILT_IN_SYNC_FETCH_AND_NAND_2:
5685 	case BUILT_IN_SYNC_FETCH_AND_NAND_4:
5686 	case BUILT_IN_SYNC_FETCH_AND_NAND_8:
5687 	case BUILT_IN_SYNC_FETCH_AND_NAND_16:
5688 	  if (warned_f_a_n)
5689 	    break;
5690 
5691 	  fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
5692 	  inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5693 	  warned_f_a_n = true;
5694 	  break;
5695 
5696 	case BUILT_IN_SYNC_NAND_AND_FETCH_1:
5697 	case BUILT_IN_SYNC_NAND_AND_FETCH_2:
5698 	case BUILT_IN_SYNC_NAND_AND_FETCH_4:
5699 	case BUILT_IN_SYNC_NAND_AND_FETCH_8:
5700 	case BUILT_IN_SYNC_NAND_AND_FETCH_16:
5701 	  if (warned_n_a_f)
5702 	    break;
5703 
5704 	 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
5705 	  inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
5706 	  warned_n_a_f = true;
5707 	  break;
5708 
5709 	default:
5710 	  gcc_unreachable ();
5711 	}
5712     }
5713 
5714   /* Expand the operands.  */
5715   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5716   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5717 
5718   return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
5719 				 after);
5720 }
5721 
5722 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
5723    intrinsics. EXP is the CALL_EXPR.  IS_BOOL is
5724    true if this is the boolean form.  TARGET is a place for us to store the
5725    results; this is NOT optional if IS_BOOL is true.  */
5726 
5727 static rtx
expand_builtin_compare_and_swap(machine_mode mode,tree exp,bool is_bool,rtx target)5728 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
5729 				 bool is_bool, rtx target)
5730 {
5731   rtx old_val, new_val, mem;
5732   rtx *pbool, *poval;
5733 
5734   /* Expand the operands.  */
5735   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5736   old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5737   new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5738 
5739   pbool = poval = NULL;
5740   if (target != const0_rtx)
5741     {
5742       if (is_bool)
5743 	pbool = &target;
5744       else
5745 	poval = &target;
5746     }
5747   if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
5748 				       false, MEMMODEL_SYNC_SEQ_CST,
5749 				       MEMMODEL_SYNC_SEQ_CST))
5750     return NULL_RTX;
5751 
5752   return target;
5753 }
5754 
5755 /* Expand the __sync_lock_test_and_set intrinsic.  Note that the most
5756    general form is actually an atomic exchange, and some targets only
5757    support a reduced form with the second argument being a constant 1.
5758    EXP is the CALL_EXPR; TARGET is an optional place for us to store
5759    the results.  */
5760 
5761 static rtx
expand_builtin_sync_lock_test_and_set(machine_mode mode,tree exp,rtx target)5762 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5763 				       rtx target)
5764 {
5765   rtx val, mem;
5766 
5767   /* Expand the operands.  */
5768   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5769   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5770 
5771   return expand_sync_lock_test_and_set (target, mem, val);
5772 }
5773 
5774 /* Expand the __sync_lock_release intrinsic.  EXP is the CALL_EXPR.  */
5775 
5776 static void
expand_builtin_sync_lock_release(machine_mode mode,tree exp)5777 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5778 {
5779   rtx mem;
5780 
5781   /* Expand the operands.  */
5782   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5783 
5784   expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5785 }
5786 
5787 /* Given an integer representing an ``enum memmodel'', verify its
5788    correctness and return the memory model enum.  */
5789 
5790 static enum memmodel
get_memmodel(tree exp)5791 get_memmodel (tree exp)
5792 {
5793   rtx op;
5794   unsigned HOST_WIDE_INT val;
5795   location_t loc
5796     = expansion_point_location_if_in_system_header (input_location);
5797 
5798   /* If the parameter is not a constant, it's a run time value so we'll just
5799      convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking.  */
5800   if (TREE_CODE (exp) != INTEGER_CST)
5801     return MEMMODEL_SEQ_CST;
5802 
5803   op = expand_normal (exp);
5804 
5805   val = INTVAL (op);
5806   if (targetm.memmodel_check)
5807     val = targetm.memmodel_check (val);
5808   else if (val & ~MEMMODEL_MASK)
5809     {
5810       warning_at (loc, OPT_Winvalid_memory_model,
5811 		  "unknown architecture specifier in memory model to builtin");
5812       return MEMMODEL_SEQ_CST;
5813     }
5814 
5815   /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5816   if (memmodel_base (val) >= MEMMODEL_LAST)
5817     {
5818       warning_at (loc, OPT_Winvalid_memory_model,
5819 		  "invalid memory model argument to builtin");
5820       return MEMMODEL_SEQ_CST;
5821     }
5822 
5823   /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5824      be conservative and promote consume to acquire.  */
5825   if (val == MEMMODEL_CONSUME)
5826     val = MEMMODEL_ACQUIRE;
5827 
5828   return (enum memmodel) val;
5829 }
5830 
5831 /* Expand the __atomic_exchange intrinsic:
5832    	TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5833    EXP is the CALL_EXPR.
5834    TARGET is an optional place for us to store the results.  */
5835 
5836 static rtx
expand_builtin_atomic_exchange(machine_mode mode,tree exp,rtx target)5837 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5838 {
5839   rtx val, mem;
5840   enum memmodel model;
5841 
5842   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5843 
5844   if (!flag_inline_atomics)
5845     return NULL_RTX;
5846 
5847   /* Expand the operands.  */
5848   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5849   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5850 
5851   return expand_atomic_exchange (target, mem, val, model);
5852 }
5853 
5854 /* Expand the __atomic_compare_exchange intrinsic:
5855    	bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5856 					TYPE desired, BOOL weak,
5857 					enum memmodel success,
5858 					enum memmodel failure)
5859    EXP is the CALL_EXPR.
5860    TARGET is an optional place for us to store the results.  */
5861 
5862 static rtx
expand_builtin_atomic_compare_exchange(machine_mode mode,tree exp,rtx target)5863 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5864 					rtx target)
5865 {
5866   rtx expect, desired, mem, oldval;
5867   rtx_code_label *label;
5868   enum memmodel success, failure;
5869   tree weak;
5870   bool is_weak;
5871   location_t loc
5872     = expansion_point_location_if_in_system_header (input_location);
5873 
5874   success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5875   failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5876 
5877   if (failure > success)
5878     {
5879       warning_at (loc, OPT_Winvalid_memory_model,
5880 		  "failure memory model cannot be stronger than success "
5881 		  "memory model for %<__atomic_compare_exchange%>");
5882       success = MEMMODEL_SEQ_CST;
5883     }
5884 
5885   if (is_mm_release (failure) || is_mm_acq_rel (failure))
5886     {
5887       warning_at (loc, OPT_Winvalid_memory_model,
5888 		  "invalid failure memory model for "
5889 		  "%<__atomic_compare_exchange%>");
5890       failure = MEMMODEL_SEQ_CST;
5891       success = MEMMODEL_SEQ_CST;
5892     }
5893 
5894 
5895   if (!flag_inline_atomics)
5896     return NULL_RTX;
5897 
5898   /* Expand the operands.  */
5899   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5900 
5901   expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5902   expect = convert_memory_address (Pmode, expect);
5903   expect = gen_rtx_MEM (mode, expect);
5904   desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5905 
5906   weak = CALL_EXPR_ARG (exp, 3);
5907   is_weak = false;
5908   if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5909     is_weak = true;
5910 
5911   if (target == const0_rtx)
5912     target = NULL;
5913 
5914   /* Lest the rtl backend create a race condition with an imporoper store
5915      to memory, always create a new pseudo for OLDVAL.  */
5916   oldval = NULL;
5917 
5918   if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5919 				       is_weak, success, failure))
5920     return NULL_RTX;
5921 
5922   /* Conditionally store back to EXPECT, lest we create a race condition
5923      with an improper store to memory.  */
5924   /* ??? With a rearrangement of atomics at the gimple level, we can handle
5925      the normal case where EXPECT is totally private, i.e. a register.  At
5926      which point the store can be unconditional.  */
5927   label = gen_label_rtx ();
5928   emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5929 			   GET_MODE (target), 1, label);
5930   emit_move_insn (expect, oldval);
5931   emit_label (label);
5932 
5933   return target;
5934 }
5935 
5936 /* Helper function for expand_ifn_atomic_compare_exchange - expand
5937    internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
5938    call.  The weak parameter must be dropped to match the expected parameter
5939    list and the expected argument changed from value to pointer to memory
5940    slot.  */
5941 
5942 static void
expand_ifn_atomic_compare_exchange_into_call(gcall * call,machine_mode mode)5943 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
5944 {
5945   unsigned int z;
5946   vec<tree, va_gc> *vec;
5947 
5948   vec_alloc (vec, 5);
5949   vec->quick_push (gimple_call_arg (call, 0));
5950   tree expected = gimple_call_arg (call, 1);
5951   rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
5952 				      TREE_TYPE (expected));
5953   rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
5954   if (expd != x)
5955     emit_move_insn (x, expd);
5956   tree v = make_tree (TREE_TYPE (expected), x);
5957   vec->quick_push (build1 (ADDR_EXPR,
5958 			   build_pointer_type (TREE_TYPE (expected)), v));
5959   vec->quick_push (gimple_call_arg (call, 2));
5960   /* Skip the boolean weak parameter.  */
5961   for (z = 4; z < 6; z++)
5962     vec->quick_push (gimple_call_arg (call, z));
5963   /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}.  */
5964   unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
5965   gcc_assert (bytes_log2 < 5);
5966   built_in_function fncode
5967     = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
5968 			   + bytes_log2);
5969   tree fndecl = builtin_decl_explicit (fncode);
5970   tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
5971 		    fndecl);
5972   tree exp = build_call_vec (boolean_type_node, fn, vec);
5973   tree lhs = gimple_call_lhs (call);
5974   rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
5975   if (lhs)
5976     {
5977       rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
5978       if (GET_MODE (boolret) != mode)
5979 	boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
5980       x = force_reg (mode, x);
5981       write_complex_part (target, boolret, true);
5982       write_complex_part (target, x, false);
5983     }
5984 }
5985 
5986 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function.  */
5987 
5988 void
expand_ifn_atomic_compare_exchange(gcall * call)5989 expand_ifn_atomic_compare_exchange (gcall *call)
5990 {
5991   int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
5992   gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
5993   machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
5994   rtx expect, desired, mem, oldval, boolret;
5995   enum memmodel success, failure;
5996   tree lhs;
5997   bool is_weak;
5998   location_t loc
5999     = expansion_point_location_if_in_system_header (gimple_location (call));
6000 
6001   success = get_memmodel (gimple_call_arg (call, 4));
6002   failure = get_memmodel (gimple_call_arg (call, 5));
6003 
6004   if (failure > success)
6005     {
6006       warning_at (loc, OPT_Winvalid_memory_model,
6007 		  "failure memory model cannot be stronger than success "
6008 		  "memory model for %<__atomic_compare_exchange%>");
6009       success = MEMMODEL_SEQ_CST;
6010     }
6011 
6012   if (is_mm_release (failure) || is_mm_acq_rel (failure))
6013     {
6014       warning_at (loc, OPT_Winvalid_memory_model,
6015 		  "invalid failure memory model for "
6016 		  "%<__atomic_compare_exchange%>");
6017       failure = MEMMODEL_SEQ_CST;
6018       success = MEMMODEL_SEQ_CST;
6019     }
6020 
6021   if (!flag_inline_atomics)
6022     {
6023       expand_ifn_atomic_compare_exchange_into_call (call, mode);
6024       return;
6025     }
6026 
6027   /* Expand the operands.  */
6028   mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
6029 
6030   expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
6031   desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
6032 
6033   is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
6034 
6035   boolret = NULL;
6036   oldval = NULL;
6037 
6038   if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
6039 				       is_weak, success, failure))
6040     {
6041       expand_ifn_atomic_compare_exchange_into_call (call, mode);
6042       return;
6043     }
6044 
6045   lhs = gimple_call_lhs (call);
6046   if (lhs)
6047     {
6048       rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6049       if (GET_MODE (boolret) != mode)
6050 	boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
6051       write_complex_part (target, boolret, true);
6052       write_complex_part (target, oldval, false);
6053     }
6054 }
6055 
6056 /* Expand the __atomic_load intrinsic:
6057    	TYPE __atomic_load (TYPE *object, enum memmodel)
6058    EXP is the CALL_EXPR.
6059    TARGET is an optional place for us to store the results.  */
6060 
6061 static rtx
expand_builtin_atomic_load(machine_mode mode,tree exp,rtx target)6062 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
6063 {
6064   rtx mem;
6065   enum memmodel model;
6066 
6067   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6068   if (is_mm_release (model) || is_mm_acq_rel (model))
6069     {
6070       location_t loc
6071 	= expansion_point_location_if_in_system_header (input_location);
6072       warning_at (loc, OPT_Winvalid_memory_model,
6073 		  "invalid memory model for %<__atomic_load%>");
6074       model = MEMMODEL_SEQ_CST;
6075     }
6076 
6077   if (!flag_inline_atomics)
6078     return NULL_RTX;
6079 
6080   /* Expand the operand.  */
6081   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6082 
6083   return expand_atomic_load (target, mem, model);
6084 }
6085 
6086 
6087 /* Expand the __atomic_store intrinsic:
6088    	void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
6089    EXP is the CALL_EXPR.
6090    TARGET is an optional place for us to store the results.  */
6091 
6092 static rtx
expand_builtin_atomic_store(machine_mode mode,tree exp)6093 expand_builtin_atomic_store (machine_mode mode, tree exp)
6094 {
6095   rtx mem, val;
6096   enum memmodel model;
6097 
6098   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6099   if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
6100 	|| is_mm_release (model)))
6101     {
6102       location_t loc
6103 	= expansion_point_location_if_in_system_header (input_location);
6104       warning_at (loc, OPT_Winvalid_memory_model,
6105 		  "invalid memory model for %<__atomic_store%>");
6106       model = MEMMODEL_SEQ_CST;
6107     }
6108 
6109   if (!flag_inline_atomics)
6110     return NULL_RTX;
6111 
6112   /* Expand the operands.  */
6113   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6114   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6115 
6116   return expand_atomic_store (mem, val, model, false);
6117 }
6118 
6119 /* Expand the __atomic_fetch_XXX intrinsic:
6120    	TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
6121    EXP is the CALL_EXPR.
6122    TARGET is an optional place for us to store the results.
6123    CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
6124    FETCH_AFTER is true if returning the result of the operation.
6125    FETCH_AFTER is false if returning the value before the operation.
6126    IGNORE is true if the result is not used.
6127    EXT_CALL is the correct builtin for an external call if this cannot be
6128    resolved to an instruction sequence.  */
6129 
6130 static rtx
expand_builtin_atomic_fetch_op(machine_mode mode,tree exp,rtx target,enum rtx_code code,bool fetch_after,bool ignore,enum built_in_function ext_call)6131 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
6132 				enum rtx_code code, bool fetch_after,
6133 				bool ignore, enum built_in_function ext_call)
6134 {
6135   rtx val, mem, ret;
6136   enum memmodel model;
6137   tree fndecl;
6138   tree addr;
6139 
6140   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
6141 
6142   /* Expand the operands.  */
6143   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6144   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
6145 
6146   /* Only try generating instructions if inlining is turned on.  */
6147   if (flag_inline_atomics)
6148     {
6149       ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
6150       if (ret)
6151 	return ret;
6152     }
6153 
6154   /* Return if a different routine isn't needed for the library call.  */
6155   if (ext_call == BUILT_IN_NONE)
6156     return NULL_RTX;
6157 
6158   /* Change the call to the specified function.  */
6159   fndecl = get_callee_fndecl (exp);
6160   addr = CALL_EXPR_FN (exp);
6161   STRIP_NOPS (addr);
6162 
6163   gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
6164   TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
6165 
6166   /* If we will emit code after the call, the call cannot be a tail call.
6167      If it is emitted as a tail call, a barrier is emitted after it, and
6168      then all trailing code is removed.  */
6169   if (!ignore)
6170     CALL_EXPR_TAILCALL (exp) = 0;
6171 
6172   /* Expand the call here so we can emit trailing code.  */
6173   ret = expand_call (exp, target, ignore);
6174 
6175   /* Replace the original function just in case it matters.  */
6176   TREE_OPERAND (addr, 0) = fndecl;
6177 
6178   /* Then issue the arithmetic correction to return the right result.  */
6179   if (!ignore)
6180     {
6181       if (code == NOT)
6182 	{
6183 	  ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
6184 				     OPTAB_LIB_WIDEN);
6185 	  ret = expand_simple_unop (mode, NOT, ret, target, true);
6186 	}
6187       else
6188 	ret = expand_simple_binop (mode, code, ret, val, target, true,
6189 				   OPTAB_LIB_WIDEN);
6190     }
6191   return ret;
6192 }
6193 
6194 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function.  */
6195 
6196 void
expand_ifn_atomic_bit_test_and(gcall * call)6197 expand_ifn_atomic_bit_test_and (gcall *call)
6198 {
6199   tree ptr = gimple_call_arg (call, 0);
6200   tree bit = gimple_call_arg (call, 1);
6201   tree flag = gimple_call_arg (call, 2);
6202   tree lhs = gimple_call_lhs (call);
6203   enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
6204   machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
6205   enum rtx_code code;
6206   optab optab;
6207   class expand_operand ops[5];
6208 
6209   gcc_assert (flag_inline_atomics);
6210 
6211   if (gimple_call_num_args (call) == 4)
6212     model = get_memmodel (gimple_call_arg (call, 3));
6213 
6214   rtx mem = get_builtin_sync_mem (ptr, mode);
6215   rtx val = expand_expr_force_mode (bit, mode);
6216 
6217   switch (gimple_call_internal_fn (call))
6218     {
6219     case IFN_ATOMIC_BIT_TEST_AND_SET:
6220       code = IOR;
6221       optab = atomic_bit_test_and_set_optab;
6222       break;
6223     case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
6224       code = XOR;
6225       optab = atomic_bit_test_and_complement_optab;
6226       break;
6227     case IFN_ATOMIC_BIT_TEST_AND_RESET:
6228       code = AND;
6229       optab = atomic_bit_test_and_reset_optab;
6230       break;
6231     default:
6232       gcc_unreachable ();
6233     }
6234 
6235   if (lhs == NULL_TREE)
6236     {
6237       val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6238 				 val, NULL_RTX, true, OPTAB_DIRECT);
6239       if (code == AND)
6240 	val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6241       expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
6242       return;
6243     }
6244 
6245   rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
6246   enum insn_code icode = direct_optab_handler (optab, mode);
6247   gcc_assert (icode != CODE_FOR_nothing);
6248   create_output_operand (&ops[0], target, mode);
6249   create_fixed_operand (&ops[1], mem);
6250   create_convert_operand_to (&ops[2], val, mode, true);
6251   create_integer_operand (&ops[3], model);
6252   create_integer_operand (&ops[4], integer_onep (flag));
6253   if (maybe_expand_insn (icode, 5, ops))
6254     return;
6255 
6256   rtx bitval = val;
6257   val = expand_simple_binop (mode, ASHIFT, const1_rtx,
6258 			     val, NULL_RTX, true, OPTAB_DIRECT);
6259   rtx maskval = val;
6260   if (code == AND)
6261     val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
6262   rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
6263 				       code, model, false);
6264   if (integer_onep (flag))
6265     {
6266       result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
6267 				    NULL_RTX, true, OPTAB_DIRECT);
6268       result = expand_simple_binop (mode, AND, result, const1_rtx, target,
6269 				    true, OPTAB_DIRECT);
6270     }
6271   else
6272     result = expand_simple_binop (mode, AND, result, maskval, target, true,
6273 				  OPTAB_DIRECT);
6274   if (result != target)
6275     emit_move_insn (target, result);
6276 }
6277 
6278 /* Expand an atomic clear operation.
6279 	void _atomic_clear (BOOL *obj, enum memmodel)
6280    EXP is the call expression.  */
6281 
6282 static rtx
expand_builtin_atomic_clear(tree exp)6283 expand_builtin_atomic_clear (tree exp)
6284 {
6285   machine_mode mode;
6286   rtx mem, ret;
6287   enum memmodel model;
6288 
6289   mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6290   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6291   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6292 
6293   if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
6294     {
6295       location_t loc
6296 	= expansion_point_location_if_in_system_header (input_location);
6297       warning_at (loc, OPT_Winvalid_memory_model,
6298 		  "invalid memory model for %<__atomic_store%>");
6299       model = MEMMODEL_SEQ_CST;
6300     }
6301 
6302   /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
6303      Failing that, a store is issued by __atomic_store.  The only way this can
6304      fail is if the bool type is larger than a word size.  Unlikely, but
6305      handle it anyway for completeness.  Assume a single threaded model since
6306      there is no atomic support in this case, and no barriers are required.  */
6307   ret = expand_atomic_store (mem, const0_rtx, model, true);
6308   if (!ret)
6309     emit_move_insn (mem, const0_rtx);
6310   return const0_rtx;
6311 }
6312 
6313 /* Expand an atomic test_and_set operation.
6314 	bool _atomic_test_and_set (BOOL *obj, enum memmodel)
6315    EXP is the call expression.  */
6316 
6317 static rtx
expand_builtin_atomic_test_and_set(tree exp,rtx target)6318 expand_builtin_atomic_test_and_set (tree exp, rtx target)
6319 {
6320   rtx mem;
6321   enum memmodel model;
6322   machine_mode mode;
6323 
6324   mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
6325   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
6326   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
6327 
6328   return expand_atomic_test_and_set (target, mem, model);
6329 }
6330 
6331 
6332 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
6333    this architecture.  If ARG1 is NULL, use typical alignment for size ARG0.  */
6334 
6335 static tree
fold_builtin_atomic_always_lock_free(tree arg0,tree arg1)6336 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
6337 {
6338   int size;
6339   machine_mode mode;
6340   unsigned int mode_align, type_align;
6341 
6342   if (TREE_CODE (arg0) != INTEGER_CST)
6343     return NULL_TREE;
6344 
6345   /* We need a corresponding integer mode for the access to be lock-free.  */
6346   size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
6347   if (!int_mode_for_size (size, 0).exists (&mode))
6348     return boolean_false_node;
6349 
6350   mode_align = GET_MODE_ALIGNMENT (mode);
6351 
6352   if (TREE_CODE (arg1) == INTEGER_CST)
6353     {
6354       unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
6355 
6356       /* Either this argument is null, or it's a fake pointer encoding
6357          the alignment of the object.  */
6358       val = least_bit_hwi (val);
6359       val *= BITS_PER_UNIT;
6360 
6361       if (val == 0 || mode_align < val)
6362         type_align = mode_align;
6363       else
6364         type_align = val;
6365     }
6366   else
6367     {
6368       tree ttype = TREE_TYPE (arg1);
6369 
6370       /* This function is usually invoked and folded immediately by the front
6371 	 end before anything else has a chance to look at it.  The pointer
6372 	 parameter at this point is usually cast to a void *, so check for that
6373 	 and look past the cast.  */
6374       if (CONVERT_EXPR_P (arg1)
6375 	  && POINTER_TYPE_P (ttype)
6376 	  && VOID_TYPE_P (TREE_TYPE (ttype))
6377 	  && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
6378 	arg1 = TREE_OPERAND (arg1, 0);
6379 
6380       ttype = TREE_TYPE (arg1);
6381       gcc_assert (POINTER_TYPE_P (ttype));
6382 
6383       /* Get the underlying type of the object.  */
6384       ttype = TREE_TYPE (ttype);
6385       type_align = TYPE_ALIGN (ttype);
6386     }
6387 
6388   /* If the object has smaller alignment, the lock free routines cannot
6389      be used.  */
6390   if (type_align < mode_align)
6391     return boolean_false_node;
6392 
6393   /* Check if a compare_and_swap pattern exists for the mode which represents
6394      the required size.  The pattern is not allowed to fail, so the existence
6395      of the pattern indicates support is present.  Also require that an
6396      atomic load exists for the required size.  */
6397   if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
6398     return boolean_true_node;
6399   else
6400     return boolean_false_node;
6401 }
6402 
6403 /* Return true if the parameters to call EXP represent an object which will
6404    always generate lock free instructions.  The first argument represents the
6405    size of the object, and the second parameter is a pointer to the object
6406    itself.  If NULL is passed for the object, then the result is based on
6407    typical alignment for an object of the specified size.  Otherwise return
6408    false.  */
6409 
6410 static rtx
expand_builtin_atomic_always_lock_free(tree exp)6411 expand_builtin_atomic_always_lock_free (tree exp)
6412 {
6413   tree size;
6414   tree arg0 = CALL_EXPR_ARG (exp, 0);
6415   tree arg1 = CALL_EXPR_ARG (exp, 1);
6416 
6417   if (TREE_CODE (arg0) != INTEGER_CST)
6418     {
6419       error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
6420       return const0_rtx;
6421     }
6422 
6423   size = fold_builtin_atomic_always_lock_free (arg0, arg1);
6424   if (size == boolean_true_node)
6425     return const1_rtx;
6426   return const0_rtx;
6427 }
6428 
6429 /* Return a one or zero if it can be determined that object ARG1 of size ARG
6430    is lock free on this architecture.  */
6431 
6432 static tree
fold_builtin_atomic_is_lock_free(tree arg0,tree arg1)6433 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
6434 {
6435   if (!flag_inline_atomics)
6436     return NULL_TREE;
6437 
6438   /* If it isn't always lock free, don't generate a result.  */
6439   if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
6440     return boolean_true_node;
6441 
6442   return NULL_TREE;
6443 }
6444 
6445 /* Return true if the parameters to call EXP represent an object which will
6446    always generate lock free instructions.  The first argument represents the
6447    size of the object, and the second parameter is a pointer to the object
6448    itself.  If NULL is passed for the object, then the result is based on
6449    typical alignment for an object of the specified size.  Otherwise return
6450    NULL*/
6451 
6452 static rtx
expand_builtin_atomic_is_lock_free(tree exp)6453 expand_builtin_atomic_is_lock_free (tree exp)
6454 {
6455   tree size;
6456   tree arg0 = CALL_EXPR_ARG (exp, 0);
6457   tree arg1 = CALL_EXPR_ARG (exp, 1);
6458 
6459   if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6460     {
6461       error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
6462       return NULL_RTX;
6463     }
6464 
6465   if (!flag_inline_atomics)
6466     return NULL_RTX;
6467 
6468   /* If the value is known at compile time, return the RTX for it.  */
6469   size = fold_builtin_atomic_is_lock_free (arg0, arg1);
6470   if (size == boolean_true_node)
6471     return const1_rtx;
6472 
6473   return NULL_RTX;
6474 }
6475 
6476 /* Expand the __atomic_thread_fence intrinsic:
6477    	void __atomic_thread_fence (enum memmodel)
6478    EXP is the CALL_EXPR.  */
6479 
6480 static void
expand_builtin_atomic_thread_fence(tree exp)6481 expand_builtin_atomic_thread_fence (tree exp)
6482 {
6483   enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6484   expand_mem_thread_fence (model);
6485 }
6486 
6487 /* Expand the __atomic_signal_fence intrinsic:
6488    	void __atomic_signal_fence (enum memmodel)
6489    EXP is the CALL_EXPR.  */
6490 
6491 static void
expand_builtin_atomic_signal_fence(tree exp)6492 expand_builtin_atomic_signal_fence (tree exp)
6493 {
6494   enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
6495   expand_mem_signal_fence (model);
6496 }
6497 
6498 /* Expand the __sync_synchronize intrinsic.  */
6499 
6500 static void
expand_builtin_sync_synchronize(void)6501 expand_builtin_sync_synchronize (void)
6502 {
6503   expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
6504 }
6505 
6506 static rtx
expand_builtin_thread_pointer(tree exp,rtx target)6507 expand_builtin_thread_pointer (tree exp, rtx target)
6508 {
6509   enum insn_code icode;
6510   if (!validate_arglist (exp, VOID_TYPE))
6511     return const0_rtx;
6512   icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
6513   if (icode != CODE_FOR_nothing)
6514     {
6515       class expand_operand op;
6516       /* If the target is not sutitable then create a new target. */
6517       if (target == NULL_RTX
6518 	  || !REG_P (target)
6519 	  || GET_MODE (target) != Pmode)
6520 	target = gen_reg_rtx (Pmode);
6521       create_output_operand (&op, target, Pmode);
6522       expand_insn (icode, 1, &op);
6523       return target;
6524     }
6525   error ("%<__builtin_thread_pointer%> is not supported on this target");
6526   return const0_rtx;
6527 }
6528 
6529 static void
expand_builtin_set_thread_pointer(tree exp)6530 expand_builtin_set_thread_pointer (tree exp)
6531 {
6532   enum insn_code icode;
6533   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6534     return;
6535   icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
6536   if (icode != CODE_FOR_nothing)
6537     {
6538       class expand_operand op;
6539       rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
6540 			     Pmode, EXPAND_NORMAL);
6541       create_input_operand (&op, val, Pmode);
6542       expand_insn (icode, 1, &op);
6543       return;
6544     }
6545   error ("%<__builtin_set_thread_pointer%> is not supported on this target");
6546 }
6547 
6548 
6549 /* Emit code to restore the current value of stack.  */
6550 
6551 static void
expand_stack_restore(tree var)6552 expand_stack_restore (tree var)
6553 {
6554   rtx_insn *prev;
6555   rtx sa = expand_normal (var);
6556 
6557   sa = convert_memory_address (Pmode, sa);
6558 
6559   prev = get_last_insn ();
6560   emit_stack_restore (SAVE_BLOCK, sa);
6561 
6562   record_new_stack_level ();
6563 
6564   fixup_args_size_notes (prev, get_last_insn (), 0);
6565 }
6566 
6567 /* Emit code to save the current value of stack.  */
6568 
6569 static rtx
expand_stack_save(void)6570 expand_stack_save (void)
6571 {
6572   rtx ret = NULL_RTX;
6573 
6574   emit_stack_save (SAVE_BLOCK, &ret);
6575   return ret;
6576 }
6577 
6578 /* Emit code to get the openacc gang, worker or vector id or size.  */
6579 
6580 static rtx
expand_builtin_goacc_parlevel_id_size(tree exp,rtx target,int ignore)6581 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
6582 {
6583   const char *name;
6584   rtx fallback_retval;
6585   rtx_insn *(*gen_fn) (rtx, rtx);
6586   switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
6587     {
6588     case BUILT_IN_GOACC_PARLEVEL_ID:
6589       name = "__builtin_goacc_parlevel_id";
6590       fallback_retval = const0_rtx;
6591       gen_fn = targetm.gen_oacc_dim_pos;
6592       break;
6593     case BUILT_IN_GOACC_PARLEVEL_SIZE:
6594       name = "__builtin_goacc_parlevel_size";
6595       fallback_retval = const1_rtx;
6596       gen_fn = targetm.gen_oacc_dim_size;
6597       break;
6598     default:
6599       gcc_unreachable ();
6600     }
6601 
6602   if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
6603     {
6604       error ("%qs only supported in OpenACC code", name);
6605       return const0_rtx;
6606     }
6607 
6608   tree arg = CALL_EXPR_ARG (exp, 0);
6609   if (TREE_CODE (arg) != INTEGER_CST)
6610     {
6611       error ("non-constant argument 0 to %qs", name);
6612       return const0_rtx;
6613     }
6614 
6615   int dim = TREE_INT_CST_LOW (arg);
6616   switch (dim)
6617     {
6618     case GOMP_DIM_GANG:
6619     case GOMP_DIM_WORKER:
6620     case GOMP_DIM_VECTOR:
6621       break;
6622     default:
6623       error ("illegal argument 0 to %qs", name);
6624       return const0_rtx;
6625     }
6626 
6627   if (ignore)
6628     return target;
6629 
6630   if (target == NULL_RTX)
6631     target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6632 
6633   if (!targetm.have_oacc_dim_size ())
6634     {
6635       emit_move_insn (target, fallback_retval);
6636       return target;
6637     }
6638 
6639   rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
6640   emit_insn (gen_fn (reg, GEN_INT (dim)));
6641   if (reg != target)
6642     emit_move_insn (target, reg);
6643 
6644   return target;
6645 }
6646 
6647 /* Expand a string compare operation using a sequence of char comparison
6648    to get rid of the calling overhead, with result going to TARGET if
6649    that's convenient.
6650 
6651    VAR_STR is the variable string source;
6652    CONST_STR is the constant string source;
6653    LENGTH is the number of chars to compare;
6654    CONST_STR_N indicates which source string is the constant string;
6655    IS_MEMCMP indicates whether it's a memcmp or strcmp.
6656 
6657    to: (assume const_str_n is 2, i.e., arg2 is a constant string)
6658 
6659    target = (int) (unsigned char) var_str[0]
6660 	    - (int) (unsigned char) const_str[0];
6661    if (target != 0)
6662      goto ne_label;
6663      ...
6664    target = (int) (unsigned char) var_str[length - 2]
6665 	    - (int) (unsigned char) const_str[length - 2];
6666    if (target != 0)
6667      goto ne_label;
6668    target = (int) (unsigned char) var_str[length - 1]
6669 	    - (int) (unsigned char) const_str[length - 1];
6670    ne_label:
6671   */
6672 
6673 static rtx
inline_string_cmp(rtx target,tree var_str,const char * const_str,unsigned HOST_WIDE_INT length,int const_str_n,machine_mode mode)6674 inline_string_cmp (rtx target, tree var_str, const char *const_str,
6675 		   unsigned HOST_WIDE_INT length,
6676 		   int const_str_n, machine_mode mode)
6677 {
6678   HOST_WIDE_INT offset = 0;
6679   rtx var_rtx_array
6680     = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
6681   rtx var_rtx = NULL_RTX;
6682   rtx const_rtx = NULL_RTX;
6683   rtx result = target ? target : gen_reg_rtx (mode);
6684   rtx_code_label *ne_label = gen_label_rtx ();
6685   tree unit_type_node = unsigned_char_type_node;
6686   scalar_int_mode unit_mode
6687     = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
6688 
6689   start_sequence ();
6690 
6691   for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
6692     {
6693       var_rtx
6694 	= adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
6695       const_rtx = c_readstr (const_str + offset, unit_mode);
6696       rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
6697       rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
6698 
6699       op0 = convert_modes (mode, unit_mode, op0, 1);
6700       op1 = convert_modes (mode, unit_mode, op1, 1);
6701       result = expand_simple_binop (mode, MINUS, op0, op1,
6702 				    result, 1, OPTAB_WIDEN);
6703       if (i < length - 1)
6704 	emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
6705 	    			 mode, true, ne_label);
6706       offset += GET_MODE_SIZE (unit_mode);
6707     }
6708 
6709   emit_label (ne_label);
6710   rtx_insn *insns = get_insns ();
6711   end_sequence ();
6712   emit_insn (insns);
6713 
6714   return result;
6715 }
6716 
6717 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
6718    to TARGET if that's convenient.
6719    If the call is not been inlined, return NULL_RTX.  */
6720 
6721 static rtx
inline_expand_builtin_bytecmp(tree exp,rtx target)6722 inline_expand_builtin_bytecmp (tree exp, rtx target)
6723 {
6724   tree fndecl = get_callee_fndecl (exp);
6725   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6726   bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
6727 
6728   /* Do NOT apply this inlining expansion when optimizing for size or
6729      optimization level below 2.  */
6730   if (optimize < 2 || optimize_insn_for_size_p ())
6731     return NULL_RTX;
6732 
6733   gcc_checking_assert (fcode == BUILT_IN_STRCMP
6734 		       || fcode == BUILT_IN_STRNCMP
6735 		       || fcode == BUILT_IN_MEMCMP);
6736 
6737   /* On a target where the type of the call (int) has same or narrower presicion
6738      than unsigned char, give up the inlining expansion.  */
6739   if (TYPE_PRECISION (unsigned_char_type_node)
6740       >= TYPE_PRECISION (TREE_TYPE (exp)))
6741     return NULL_RTX;
6742 
6743   tree arg1 = CALL_EXPR_ARG (exp, 0);
6744   tree arg2 = CALL_EXPR_ARG (exp, 1);
6745   tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
6746 
6747   unsigned HOST_WIDE_INT len1 = 0;
6748   unsigned HOST_WIDE_INT len2 = 0;
6749   unsigned HOST_WIDE_INT len3 = 0;
6750 
6751   /* Get the object representation of the initializers of ARG1 and ARG2
6752      as strings, provided they refer to constant objects, with their byte
6753      sizes in LEN1 and LEN2, respectively.  */
6754   const char *bytes1 = getbyterep (arg1, &len1);
6755   const char *bytes2 = getbyterep (arg2, &len2);
6756 
6757   /* Fail if neither argument refers to an initialized constant.  */
6758   if (!bytes1 && !bytes2)
6759     return NULL_RTX;
6760 
6761   if (is_ncmp)
6762     {
6763       /* Fail if the memcmp/strncmp bound is not a constant.  */
6764       if (!tree_fits_uhwi_p (len3_tree))
6765 	return NULL_RTX;
6766 
6767       len3 = tree_to_uhwi (len3_tree);
6768 
6769       if (fcode == BUILT_IN_MEMCMP)
6770 	{
6771 	  /* Fail if the memcmp bound is greater than the size of either
6772 	     of the two constant objects.  */
6773 	  if ((bytes1 && len1 < len3)
6774 	      || (bytes2 && len2 < len3))
6775 	    return NULL_RTX;
6776 	}
6777     }
6778 
6779   if (fcode != BUILT_IN_MEMCMP)
6780     {
6781       /* For string functions (i.e., strcmp and strncmp) reduce LEN1
6782 	 and LEN2 to the length of the nul-terminated string stored
6783 	 in each.  */
6784       if (bytes1 != NULL)
6785 	len1 = strnlen (bytes1, len1) + 1;
6786       if (bytes2 != NULL)
6787 	len2 = strnlen (bytes2, len2) + 1;
6788     }
6789 
6790   /* See inline_string_cmp.  */
6791   int const_str_n;
6792   if (!len1)
6793     const_str_n = 2;
6794   else if (!len2)
6795     const_str_n = 1;
6796   else if (len2 > len1)
6797     const_str_n = 1;
6798   else
6799     const_str_n = 2;
6800 
6801   /* For strncmp only, compute the new bound as the smallest of
6802      the lengths of the two strings (plus 1) and the bound provided
6803      to the function.  */
6804   unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
6805   if (is_ncmp && len3 < bound)
6806     bound = len3;
6807 
6808   /* If the bound of the comparison is larger than the threshold,
6809      do nothing.  */
6810   if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
6811     return NULL_RTX;
6812 
6813   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6814 
6815   /* Now, start inline expansion the call.  */
6816   return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
6817 			    (const_str_n == 1) ? bytes1 : bytes2, bound,
6818 			    const_str_n, mode);
6819 }
6820 
6821 /* Expand a call to __builtin_speculation_safe_value_<N>.  MODE
6822    represents the size of the first argument to that call, or VOIDmode
6823    if the argument is a pointer.  IGNORE will be true if the result
6824    isn't used.  */
6825 static rtx
expand_speculation_safe_value(machine_mode mode,tree exp,rtx target,bool ignore)6826 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
6827 			       bool ignore)
6828 {
6829   rtx val, failsafe;
6830   unsigned nargs = call_expr_nargs (exp);
6831 
6832   tree arg0 = CALL_EXPR_ARG (exp, 0);
6833 
6834   if (mode == VOIDmode)
6835     {
6836       mode = TYPE_MODE (TREE_TYPE (arg0));
6837       gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
6838     }
6839 
6840   val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
6841 
6842   /* An optional second argument can be used as a failsafe value on
6843      some machines.  If it isn't present, then the failsafe value is
6844      assumed to be 0.  */
6845   if (nargs > 1)
6846     {
6847       tree arg1 = CALL_EXPR_ARG (exp, 1);
6848       failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
6849     }
6850   else
6851     failsafe = const0_rtx;
6852 
6853   /* If the result isn't used, the behavior is undefined.  It would be
6854      nice to emit a warning here, but path splitting means this might
6855      happen with legitimate code.  So simply drop the builtin
6856      expansion in that case; we've handled any side-effects above.  */
6857   if (ignore)
6858     return const0_rtx;
6859 
6860   /* If we don't have a suitable target, create one to hold the result.  */
6861   if (target == NULL || GET_MODE (target) != mode)
6862     target = gen_reg_rtx (mode);
6863 
6864   if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
6865     val = convert_modes (mode, VOIDmode, val, false);
6866 
6867   return targetm.speculation_safe_value (mode, target, val, failsafe);
6868 }
6869 
6870 /* Expand an expression EXP that calls a built-in function,
6871    with result going to TARGET if that's convenient
6872    (and in mode MODE if that's convenient).
6873    SUBTARGET may be used as the target for computing one of EXP's operands.
6874    IGNORE is nonzero if the value is to be ignored.  */
6875 
6876 rtx
expand_builtin(tree exp,rtx target,rtx subtarget,machine_mode mode,int ignore)6877 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
6878 		int ignore)
6879 {
6880   tree fndecl = get_callee_fndecl (exp);
6881   machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
6882   int flags;
6883 
6884   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6885     return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6886 
6887   /* When ASan is enabled, we don't want to expand some memory/string
6888      builtins and rely on libsanitizer's hooks.  This allows us to avoid
6889      redundant checks and be sure, that possible overflow will be detected
6890      by ASan.  */
6891 
6892   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6893   if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
6894     return expand_call (exp, target, ignore);
6895 
6896   /* When not optimizing, generate calls to library functions for a certain
6897      set of builtins.  */
6898   if (!optimize
6899       && !called_as_built_in (fndecl)
6900       && fcode != BUILT_IN_FORK
6901       && fcode != BUILT_IN_EXECL
6902       && fcode != BUILT_IN_EXECV
6903       && fcode != BUILT_IN_EXECLP
6904       && fcode != BUILT_IN_EXECLE
6905       && fcode != BUILT_IN_EXECVP
6906       && fcode != BUILT_IN_EXECVE
6907       && fcode != BUILT_IN_CLEAR_CACHE
6908       && !ALLOCA_FUNCTION_CODE_P (fcode)
6909       && fcode != BUILT_IN_FREE)
6910     return expand_call (exp, target, ignore);
6911 
6912   /* The built-in function expanders test for target == const0_rtx
6913      to determine whether the function's result will be ignored.  */
6914   if (ignore)
6915     target = const0_rtx;
6916 
6917   /* If the result of a pure or const built-in function is ignored, and
6918      none of its arguments are volatile, we can avoid expanding the
6919      built-in call and just evaluate the arguments for side-effects.  */
6920   if (target == const0_rtx
6921       && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
6922       && !(flags & ECF_LOOPING_CONST_OR_PURE))
6923     {
6924       bool volatilep = false;
6925       tree arg;
6926       call_expr_arg_iterator iter;
6927 
6928       FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6929 	if (TREE_THIS_VOLATILE (arg))
6930 	  {
6931 	    volatilep = true;
6932 	    break;
6933 	  }
6934 
6935       if (! volatilep)
6936 	{
6937 	  FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
6938 	    expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
6939 	  return const0_rtx;
6940 	}
6941     }
6942 
6943   switch (fcode)
6944     {
6945     CASE_FLT_FN (BUILT_IN_FABS):
6946     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
6947     case BUILT_IN_FABSD32:
6948     case BUILT_IN_FABSD64:
6949     case BUILT_IN_FABSD128:
6950       target = expand_builtin_fabs (exp, target, subtarget);
6951       if (target)
6952 	return target;
6953       break;
6954 
6955     CASE_FLT_FN (BUILT_IN_COPYSIGN):
6956     CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
6957       target = expand_builtin_copysign (exp, target, subtarget);
6958       if (target)
6959 	return target;
6960       break;
6961 
6962       /* Just do a normal library call if we were unable to fold
6963 	 the values.  */
6964     CASE_FLT_FN (BUILT_IN_CABS):
6965       break;
6966 
6967     CASE_FLT_FN (BUILT_IN_FMA):
6968     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
6969       target = expand_builtin_mathfn_ternary (exp, target, subtarget);
6970       if (target)
6971 	return target;
6972       break;
6973 
6974     CASE_FLT_FN (BUILT_IN_ILOGB):
6975       if (! flag_unsafe_math_optimizations)
6976 	break;
6977       gcc_fallthrough ();
6978     CASE_FLT_FN (BUILT_IN_ISINF):
6979     CASE_FLT_FN (BUILT_IN_FINITE):
6980     case BUILT_IN_ISFINITE:
6981     case BUILT_IN_ISNORMAL:
6982       target = expand_builtin_interclass_mathfn (exp, target);
6983       if (target)
6984 	return target;
6985       break;
6986 
6987     CASE_FLT_FN (BUILT_IN_ICEIL):
6988     CASE_FLT_FN (BUILT_IN_LCEIL):
6989     CASE_FLT_FN (BUILT_IN_LLCEIL):
6990     CASE_FLT_FN (BUILT_IN_LFLOOR):
6991     CASE_FLT_FN (BUILT_IN_IFLOOR):
6992     CASE_FLT_FN (BUILT_IN_LLFLOOR):
6993       target = expand_builtin_int_roundingfn (exp, target);
6994       if (target)
6995 	return target;
6996       break;
6997 
6998     CASE_FLT_FN (BUILT_IN_IRINT):
6999     CASE_FLT_FN (BUILT_IN_LRINT):
7000     CASE_FLT_FN (BUILT_IN_LLRINT):
7001     CASE_FLT_FN (BUILT_IN_IROUND):
7002     CASE_FLT_FN (BUILT_IN_LROUND):
7003     CASE_FLT_FN (BUILT_IN_LLROUND):
7004       target = expand_builtin_int_roundingfn_2 (exp, target);
7005       if (target)
7006 	return target;
7007       break;
7008 
7009     CASE_FLT_FN (BUILT_IN_POWI):
7010       target = expand_builtin_powi (exp, target);
7011       if (target)
7012 	return target;
7013       break;
7014 
7015     CASE_FLT_FN (BUILT_IN_CEXPI):
7016       target = expand_builtin_cexpi (exp, target);
7017       gcc_assert (target);
7018       return target;
7019 
7020     CASE_FLT_FN (BUILT_IN_SIN):
7021     CASE_FLT_FN (BUILT_IN_COS):
7022       if (! flag_unsafe_math_optimizations)
7023 	break;
7024       target = expand_builtin_mathfn_3 (exp, target, subtarget);
7025       if (target)
7026 	return target;
7027       break;
7028 
7029     CASE_FLT_FN (BUILT_IN_SINCOS):
7030       if (! flag_unsafe_math_optimizations)
7031 	break;
7032       target = expand_builtin_sincos (exp);
7033       if (target)
7034 	return target;
7035       break;
7036 
7037     case BUILT_IN_APPLY_ARGS:
7038       return expand_builtin_apply_args ();
7039 
7040       /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7041 	 FUNCTION with a copy of the parameters described by
7042 	 ARGUMENTS, and ARGSIZE.  It returns a block of memory
7043 	 allocated on the stack into which is stored all the registers
7044 	 that might possibly be used for returning the result of a
7045 	 function.  ARGUMENTS is the value returned by
7046 	 __builtin_apply_args.  ARGSIZE is the number of bytes of
7047 	 arguments that must be copied.  ??? How should this value be
7048 	 computed?  We'll also need a safe worst case value for varargs
7049 	 functions.  */
7050     case BUILT_IN_APPLY:
7051       if (!validate_arglist (exp, POINTER_TYPE,
7052 			     POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
7053 	  && !validate_arglist (exp, REFERENCE_TYPE,
7054 				POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7055 	return const0_rtx;
7056       else
7057 	{
7058 	  rtx ops[3];
7059 
7060 	  ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
7061 	  ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
7062 	  ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
7063 
7064 	  return expand_builtin_apply (ops[0], ops[1], ops[2]);
7065 	}
7066 
7067       /* __builtin_return (RESULT) causes the function to return the
7068 	 value described by RESULT.  RESULT is address of the block of
7069 	 memory returned by __builtin_apply.  */
7070     case BUILT_IN_RETURN:
7071       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7072 	expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
7073       return const0_rtx;
7074 
7075     case BUILT_IN_SAVEREGS:
7076       return expand_builtin_saveregs ();
7077 
7078     case BUILT_IN_VA_ARG_PACK:
7079       /* All valid uses of __builtin_va_arg_pack () are removed during
7080 	 inlining.  */
7081       error ("invalid use of %<__builtin_va_arg_pack ()%>");
7082       return const0_rtx;
7083 
7084     case BUILT_IN_VA_ARG_PACK_LEN:
7085       /* All valid uses of __builtin_va_arg_pack_len () are removed during
7086 	 inlining.  */
7087       error ("invalid use of %<__builtin_va_arg_pack_len ()%>");
7088       return const0_rtx;
7089 
7090       /* Return the address of the first anonymous stack arg.  */
7091     case BUILT_IN_NEXT_ARG:
7092       if (fold_builtin_next_arg (exp, false))
7093 	return const0_rtx;
7094       return expand_builtin_next_arg ();
7095 
7096     case BUILT_IN_CLEAR_CACHE:
7097       expand_builtin___clear_cache (exp);
7098       return const0_rtx;
7099 
7100     case BUILT_IN_CLASSIFY_TYPE:
7101       return expand_builtin_classify_type (exp);
7102 
7103     case BUILT_IN_CONSTANT_P:
7104       return const0_rtx;
7105 
7106     case BUILT_IN_FRAME_ADDRESS:
7107     case BUILT_IN_RETURN_ADDRESS:
7108       return expand_builtin_frame_address (fndecl, exp);
7109 
7110     /* Returns the address of the area where the structure is returned.
7111        0 otherwise.  */
7112     case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
7113       if (call_expr_nargs (exp) != 0
7114 	  || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
7115 	  || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
7116 	return const0_rtx;
7117       else
7118 	return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7119 
7120     CASE_BUILT_IN_ALLOCA:
7121       target = expand_builtin_alloca (exp);
7122       if (target)
7123 	return target;
7124       break;
7125 
7126     case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
7127       return expand_asan_emit_allocas_unpoison (exp);
7128 
7129     case BUILT_IN_STACK_SAVE:
7130       return expand_stack_save ();
7131 
7132     case BUILT_IN_STACK_RESTORE:
7133       expand_stack_restore (CALL_EXPR_ARG (exp, 0));
7134       return const0_rtx;
7135 
7136     case BUILT_IN_BSWAP16:
7137     case BUILT_IN_BSWAP32:
7138     case BUILT_IN_BSWAP64:
7139     case BUILT_IN_BSWAP128:
7140       target = expand_builtin_bswap (target_mode, exp, target, subtarget);
7141       if (target)
7142 	return target;
7143       break;
7144 
7145     CASE_INT_FN (BUILT_IN_FFS):
7146       target = expand_builtin_unop (target_mode, exp, target,
7147 				    subtarget, ffs_optab);
7148       if (target)
7149 	return target;
7150       break;
7151 
7152     CASE_INT_FN (BUILT_IN_CLZ):
7153       target = expand_builtin_unop (target_mode, exp, target,
7154 				    subtarget, clz_optab);
7155       if (target)
7156 	return target;
7157       break;
7158 
7159     CASE_INT_FN (BUILT_IN_CTZ):
7160       target = expand_builtin_unop (target_mode, exp, target,
7161 				    subtarget, ctz_optab);
7162       if (target)
7163 	return target;
7164       break;
7165 
7166     CASE_INT_FN (BUILT_IN_CLRSB):
7167       target = expand_builtin_unop (target_mode, exp, target,
7168 				    subtarget, clrsb_optab);
7169       if (target)
7170 	return target;
7171       break;
7172 
7173     CASE_INT_FN (BUILT_IN_POPCOUNT):
7174       target = expand_builtin_unop (target_mode, exp, target,
7175 				    subtarget, popcount_optab);
7176       if (target)
7177 	return target;
7178       break;
7179 
7180     CASE_INT_FN (BUILT_IN_PARITY):
7181       target = expand_builtin_unop (target_mode, exp, target,
7182 				    subtarget, parity_optab);
7183       if (target)
7184 	return target;
7185       break;
7186 
7187     case BUILT_IN_STRLEN:
7188       target = expand_builtin_strlen (exp, target, target_mode);
7189       if (target)
7190 	return target;
7191       break;
7192 
7193     case BUILT_IN_STRNLEN:
7194       target = expand_builtin_strnlen (exp, target, target_mode);
7195       if (target)
7196 	return target;
7197       break;
7198 
7199     case BUILT_IN_STRCPY:
7200       target = expand_builtin_strcpy (exp, target);
7201       if (target)
7202 	return target;
7203       break;
7204 
7205     case BUILT_IN_STRNCPY:
7206       target = expand_builtin_strncpy (exp, target);
7207       if (target)
7208 	return target;
7209       break;
7210 
7211     case BUILT_IN_STPCPY:
7212       target = expand_builtin_stpcpy (exp, target, mode);
7213       if (target)
7214 	return target;
7215       break;
7216 
7217     case BUILT_IN_MEMCPY:
7218       target = expand_builtin_memcpy (exp, target);
7219       if (target)
7220 	return target;
7221       break;
7222 
7223     case BUILT_IN_MEMMOVE:
7224       target = expand_builtin_memmove (exp, target);
7225       if (target)
7226 	return target;
7227       break;
7228 
7229     case BUILT_IN_MEMPCPY:
7230       target = expand_builtin_mempcpy (exp, target);
7231       if (target)
7232 	return target;
7233       break;
7234 
7235     case BUILT_IN_MEMSET:
7236       target = expand_builtin_memset (exp, target, mode);
7237       if (target)
7238 	return target;
7239       break;
7240 
7241     case BUILT_IN_BZERO:
7242       target = expand_builtin_bzero (exp);
7243       if (target)
7244 	return target;
7245       break;
7246 
7247     /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7248        back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
7249        when changing it to a strcmp call.  */
7250     case BUILT_IN_STRCMP_EQ:
7251       target = expand_builtin_memcmp (exp, target, true);
7252       if (target)
7253 	return target;
7254 
7255       /* Change this call back to a BUILT_IN_STRCMP.  */
7256       TREE_OPERAND (exp, 1)
7257 	= build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
7258 
7259       /* Delete the last parameter.  */
7260       unsigned int i;
7261       vec<tree, va_gc> *arg_vec;
7262       vec_alloc (arg_vec, 2);
7263       for (i = 0; i < 2; i++)
7264 	arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
7265       exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
7266       /* FALLTHROUGH */
7267 
7268     case BUILT_IN_STRCMP:
7269       target = expand_builtin_strcmp (exp, target);
7270       if (target)
7271 	return target;
7272       break;
7273 
7274     /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
7275        back to a BUILT_IN_STRNCMP.  */
7276     case BUILT_IN_STRNCMP_EQ:
7277       target = expand_builtin_memcmp (exp, target, true);
7278       if (target)
7279 	return target;
7280 
7281       /* Change it back to a BUILT_IN_STRNCMP.  */
7282       TREE_OPERAND (exp, 1)
7283 	= build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
7284       /* FALLTHROUGH */
7285 
7286     case BUILT_IN_STRNCMP:
7287       target = expand_builtin_strncmp (exp, target, mode);
7288       if (target)
7289 	return target;
7290       break;
7291 
7292     case BUILT_IN_BCMP:
7293     case BUILT_IN_MEMCMP:
7294     case BUILT_IN_MEMCMP_EQ:
7295       target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
7296       if (target)
7297 	return target;
7298       if (fcode == BUILT_IN_MEMCMP_EQ)
7299 	{
7300 	  tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
7301 	  TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
7302 	}
7303       break;
7304 
7305     case BUILT_IN_SETJMP:
7306       /* This should have been lowered to the builtins below.  */
7307       gcc_unreachable ();
7308 
7309     case BUILT_IN_SETJMP_SETUP:
7310       /* __builtin_setjmp_setup is passed a pointer to an array of five words
7311           and the receiver label.  */
7312       if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7313 	{
7314 	  rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7315 				      VOIDmode, EXPAND_NORMAL);
7316 	  tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
7317 	  rtx_insn *label_r = label_rtx (label);
7318 
7319 	  /* This is copied from the handling of non-local gotos.  */
7320 	  expand_builtin_setjmp_setup (buf_addr, label_r);
7321 	  nonlocal_goto_handler_labels
7322 	    = gen_rtx_INSN_LIST (VOIDmode, label_r,
7323 				 nonlocal_goto_handler_labels);
7324 	  /* ??? Do not let expand_label treat us as such since we would
7325 	     not want to be both on the list of non-local labels and on
7326 	     the list of forced labels.  */
7327 	  FORCED_LABEL (label) = 0;
7328 	  return const0_rtx;
7329 	}
7330       break;
7331 
7332     case BUILT_IN_SETJMP_RECEIVER:
7333        /* __builtin_setjmp_receiver is passed the receiver label.  */
7334       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7335 	{
7336 	  tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
7337 	  rtx_insn *label_r = label_rtx (label);
7338 
7339 	  expand_builtin_setjmp_receiver (label_r);
7340 	  return const0_rtx;
7341 	}
7342       break;
7343 
7344       /* __builtin_longjmp is passed a pointer to an array of five words.
7345 	 It's similar to the C library longjmp function but works with
7346 	 __builtin_setjmp above.  */
7347     case BUILT_IN_LONGJMP:
7348       if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7349 	{
7350 	  rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
7351 				      VOIDmode, EXPAND_NORMAL);
7352 	  rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
7353 
7354 	  if (value != const1_rtx)
7355 	    {
7356 	      error ("%<__builtin_longjmp%> second argument must be 1");
7357 	      return const0_rtx;
7358 	    }
7359 
7360 	  expand_builtin_longjmp (buf_addr, value);
7361 	  return const0_rtx;
7362 	}
7363       break;
7364 
7365     case BUILT_IN_NONLOCAL_GOTO:
7366       target = expand_builtin_nonlocal_goto (exp);
7367       if (target)
7368 	return target;
7369       break;
7370 
7371       /* This updates the setjmp buffer that is its argument with the value
7372 	 of the current stack pointer.  */
7373     case BUILT_IN_UPDATE_SETJMP_BUF:
7374       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
7375 	{
7376 	  rtx buf_addr
7377 	    = expand_normal (CALL_EXPR_ARG (exp, 0));
7378 
7379 	  expand_builtin_update_setjmp_buf (buf_addr);
7380 	  return const0_rtx;
7381 	}
7382       break;
7383 
7384     case BUILT_IN_TRAP:
7385       expand_builtin_trap ();
7386       return const0_rtx;
7387 
7388     case BUILT_IN_UNREACHABLE:
7389       expand_builtin_unreachable ();
7390       return const0_rtx;
7391 
7392     CASE_FLT_FN (BUILT_IN_SIGNBIT):
7393     case BUILT_IN_SIGNBITD32:
7394     case BUILT_IN_SIGNBITD64:
7395     case BUILT_IN_SIGNBITD128:
7396       target = expand_builtin_signbit (exp, target);
7397       if (target)
7398 	return target;
7399       break;
7400 
7401       /* Various hooks for the DWARF 2 __throw routine.  */
7402     case BUILT_IN_UNWIND_INIT:
7403       expand_builtin_unwind_init ();
7404       return const0_rtx;
7405     case BUILT_IN_DWARF_CFA:
7406       return virtual_cfa_rtx;
7407 #ifdef DWARF2_UNWIND_INFO
7408     case BUILT_IN_DWARF_SP_COLUMN:
7409       return expand_builtin_dwarf_sp_column ();
7410     case BUILT_IN_INIT_DWARF_REG_SIZES:
7411       expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
7412       return const0_rtx;
7413 #endif
7414     case BUILT_IN_FROB_RETURN_ADDR:
7415       return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
7416     case BUILT_IN_EXTRACT_RETURN_ADDR:
7417       return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
7418     case BUILT_IN_EH_RETURN:
7419       expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
7420 				CALL_EXPR_ARG (exp, 1));
7421       return const0_rtx;
7422     case BUILT_IN_EH_RETURN_DATA_REGNO:
7423       return expand_builtin_eh_return_data_regno (exp);
7424     case BUILT_IN_EXTEND_POINTER:
7425       return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
7426     case BUILT_IN_EH_POINTER:
7427       return expand_builtin_eh_pointer (exp);
7428     case BUILT_IN_EH_FILTER:
7429       return expand_builtin_eh_filter (exp);
7430     case BUILT_IN_EH_COPY_VALUES:
7431       return expand_builtin_eh_copy_values (exp);
7432 
7433     case BUILT_IN_VA_START:
7434       return expand_builtin_va_start (exp);
7435     case BUILT_IN_VA_END:
7436       return expand_builtin_va_end (exp);
7437     case BUILT_IN_VA_COPY:
7438       return expand_builtin_va_copy (exp);
7439     case BUILT_IN_EXPECT:
7440       return expand_builtin_expect (exp, target);
7441     case BUILT_IN_EXPECT_WITH_PROBABILITY:
7442       return expand_builtin_expect_with_probability (exp, target);
7443     case BUILT_IN_ASSUME_ALIGNED:
7444       return expand_builtin_assume_aligned (exp, target);
7445     case BUILT_IN_PREFETCH:
7446       expand_builtin_prefetch (exp);
7447       return const0_rtx;
7448 
7449     case BUILT_IN_INIT_TRAMPOLINE:
7450       return expand_builtin_init_trampoline (exp, true);
7451     case BUILT_IN_INIT_HEAP_TRAMPOLINE:
7452       return expand_builtin_init_trampoline (exp, false);
7453     case BUILT_IN_ADJUST_TRAMPOLINE:
7454       return expand_builtin_adjust_trampoline (exp);
7455 
7456     case BUILT_IN_INIT_DESCRIPTOR:
7457       return expand_builtin_init_descriptor (exp);
7458     case BUILT_IN_ADJUST_DESCRIPTOR:
7459       return expand_builtin_adjust_descriptor (exp);
7460 
7461     case BUILT_IN_FORK:
7462     case BUILT_IN_EXECL:
7463     case BUILT_IN_EXECV:
7464     case BUILT_IN_EXECLP:
7465     case BUILT_IN_EXECLE:
7466     case BUILT_IN_EXECVP:
7467     case BUILT_IN_EXECVE:
7468       target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
7469       if (target)
7470 	return target;
7471       break;
7472 
7473     case BUILT_IN_SYNC_FETCH_AND_ADD_1:
7474     case BUILT_IN_SYNC_FETCH_AND_ADD_2:
7475     case BUILT_IN_SYNC_FETCH_AND_ADD_4:
7476     case BUILT_IN_SYNC_FETCH_AND_ADD_8:
7477     case BUILT_IN_SYNC_FETCH_AND_ADD_16:
7478       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
7479       target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
7480       if (target)
7481 	return target;
7482       break;
7483 
7484     case BUILT_IN_SYNC_FETCH_AND_SUB_1:
7485     case BUILT_IN_SYNC_FETCH_AND_SUB_2:
7486     case BUILT_IN_SYNC_FETCH_AND_SUB_4:
7487     case BUILT_IN_SYNC_FETCH_AND_SUB_8:
7488     case BUILT_IN_SYNC_FETCH_AND_SUB_16:
7489       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
7490       target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
7491       if (target)
7492 	return target;
7493       break;
7494 
7495     case BUILT_IN_SYNC_FETCH_AND_OR_1:
7496     case BUILT_IN_SYNC_FETCH_AND_OR_2:
7497     case BUILT_IN_SYNC_FETCH_AND_OR_4:
7498     case BUILT_IN_SYNC_FETCH_AND_OR_8:
7499     case BUILT_IN_SYNC_FETCH_AND_OR_16:
7500       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
7501       target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
7502       if (target)
7503 	return target;
7504       break;
7505 
7506     case BUILT_IN_SYNC_FETCH_AND_AND_1:
7507     case BUILT_IN_SYNC_FETCH_AND_AND_2:
7508     case BUILT_IN_SYNC_FETCH_AND_AND_4:
7509     case BUILT_IN_SYNC_FETCH_AND_AND_8:
7510     case BUILT_IN_SYNC_FETCH_AND_AND_16:
7511       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
7512       target = expand_builtin_sync_operation (mode, exp, AND, false, target);
7513       if (target)
7514 	return target;
7515       break;
7516 
7517     case BUILT_IN_SYNC_FETCH_AND_XOR_1:
7518     case BUILT_IN_SYNC_FETCH_AND_XOR_2:
7519     case BUILT_IN_SYNC_FETCH_AND_XOR_4:
7520     case BUILT_IN_SYNC_FETCH_AND_XOR_8:
7521     case BUILT_IN_SYNC_FETCH_AND_XOR_16:
7522       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
7523       target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
7524       if (target)
7525 	return target;
7526       break;
7527 
7528     case BUILT_IN_SYNC_FETCH_AND_NAND_1:
7529     case BUILT_IN_SYNC_FETCH_AND_NAND_2:
7530     case BUILT_IN_SYNC_FETCH_AND_NAND_4:
7531     case BUILT_IN_SYNC_FETCH_AND_NAND_8:
7532     case BUILT_IN_SYNC_FETCH_AND_NAND_16:
7533       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
7534       target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
7535       if (target)
7536 	return target;
7537       break;
7538 
7539     case BUILT_IN_SYNC_ADD_AND_FETCH_1:
7540     case BUILT_IN_SYNC_ADD_AND_FETCH_2:
7541     case BUILT_IN_SYNC_ADD_AND_FETCH_4:
7542     case BUILT_IN_SYNC_ADD_AND_FETCH_8:
7543     case BUILT_IN_SYNC_ADD_AND_FETCH_16:
7544       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
7545       target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
7546       if (target)
7547 	return target;
7548       break;
7549 
7550     case BUILT_IN_SYNC_SUB_AND_FETCH_1:
7551     case BUILT_IN_SYNC_SUB_AND_FETCH_2:
7552     case BUILT_IN_SYNC_SUB_AND_FETCH_4:
7553     case BUILT_IN_SYNC_SUB_AND_FETCH_8:
7554     case BUILT_IN_SYNC_SUB_AND_FETCH_16:
7555       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
7556       target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
7557       if (target)
7558 	return target;
7559       break;
7560 
7561     case BUILT_IN_SYNC_OR_AND_FETCH_1:
7562     case BUILT_IN_SYNC_OR_AND_FETCH_2:
7563     case BUILT_IN_SYNC_OR_AND_FETCH_4:
7564     case BUILT_IN_SYNC_OR_AND_FETCH_8:
7565     case BUILT_IN_SYNC_OR_AND_FETCH_16:
7566       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
7567       target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
7568       if (target)
7569 	return target;
7570       break;
7571 
7572     case BUILT_IN_SYNC_AND_AND_FETCH_1:
7573     case BUILT_IN_SYNC_AND_AND_FETCH_2:
7574     case BUILT_IN_SYNC_AND_AND_FETCH_4:
7575     case BUILT_IN_SYNC_AND_AND_FETCH_8:
7576     case BUILT_IN_SYNC_AND_AND_FETCH_16:
7577       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
7578       target = expand_builtin_sync_operation (mode, exp, AND, true, target);
7579       if (target)
7580 	return target;
7581       break;
7582 
7583     case BUILT_IN_SYNC_XOR_AND_FETCH_1:
7584     case BUILT_IN_SYNC_XOR_AND_FETCH_2:
7585     case BUILT_IN_SYNC_XOR_AND_FETCH_4:
7586     case BUILT_IN_SYNC_XOR_AND_FETCH_8:
7587     case BUILT_IN_SYNC_XOR_AND_FETCH_16:
7588       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
7589       target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
7590       if (target)
7591 	return target;
7592       break;
7593 
7594     case BUILT_IN_SYNC_NAND_AND_FETCH_1:
7595     case BUILT_IN_SYNC_NAND_AND_FETCH_2:
7596     case BUILT_IN_SYNC_NAND_AND_FETCH_4:
7597     case BUILT_IN_SYNC_NAND_AND_FETCH_8:
7598     case BUILT_IN_SYNC_NAND_AND_FETCH_16:
7599       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
7600       target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
7601       if (target)
7602 	return target;
7603       break;
7604 
7605     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
7606     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
7607     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
7608     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
7609     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
7610       if (mode == VOIDmode)
7611 	mode = TYPE_MODE (boolean_type_node);
7612       if (!target || !register_operand (target, mode))
7613 	target = gen_reg_rtx (mode);
7614 
7615       mode = get_builtin_sync_mode
7616 				(fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
7617       target = expand_builtin_compare_and_swap (mode, exp, true, target);
7618       if (target)
7619 	return target;
7620       break;
7621 
7622     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
7623     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
7624     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
7625     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
7626     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
7627       mode = get_builtin_sync_mode
7628 				(fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
7629       target = expand_builtin_compare_and_swap (mode, exp, false, target);
7630       if (target)
7631 	return target;
7632       break;
7633 
7634     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
7635     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
7636     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
7637     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
7638     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
7639       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
7640       target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
7641       if (target)
7642 	return target;
7643       break;
7644 
7645     case BUILT_IN_SYNC_LOCK_RELEASE_1:
7646     case BUILT_IN_SYNC_LOCK_RELEASE_2:
7647     case BUILT_IN_SYNC_LOCK_RELEASE_4:
7648     case BUILT_IN_SYNC_LOCK_RELEASE_8:
7649     case BUILT_IN_SYNC_LOCK_RELEASE_16:
7650       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
7651       expand_builtin_sync_lock_release (mode, exp);
7652       return const0_rtx;
7653 
7654     case BUILT_IN_SYNC_SYNCHRONIZE:
7655       expand_builtin_sync_synchronize ();
7656       return const0_rtx;
7657 
7658     case BUILT_IN_ATOMIC_EXCHANGE_1:
7659     case BUILT_IN_ATOMIC_EXCHANGE_2:
7660     case BUILT_IN_ATOMIC_EXCHANGE_4:
7661     case BUILT_IN_ATOMIC_EXCHANGE_8:
7662     case BUILT_IN_ATOMIC_EXCHANGE_16:
7663       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
7664       target = expand_builtin_atomic_exchange (mode, exp, target);
7665       if (target)
7666 	return target;
7667       break;
7668 
7669     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
7670     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
7671     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
7672     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
7673     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
7674       {
7675 	unsigned int nargs, z;
7676 	vec<tree, va_gc> *vec;
7677 
7678 	mode =
7679 	    get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
7680 	target = expand_builtin_atomic_compare_exchange (mode, exp, target);
7681 	if (target)
7682 	  return target;
7683 
7684 	/* If this is turned into an external library call, the weak parameter
7685 	   must be dropped to match the expected parameter list.  */
7686 	nargs = call_expr_nargs (exp);
7687 	vec_alloc (vec, nargs - 1);
7688 	for (z = 0; z < 3; z++)
7689 	  vec->quick_push (CALL_EXPR_ARG (exp, z));
7690 	/* Skip the boolean weak parameter.  */
7691 	for (z = 4; z < 6; z++)
7692 	  vec->quick_push (CALL_EXPR_ARG (exp, z));
7693 	exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
7694 	break;
7695       }
7696 
7697     case BUILT_IN_ATOMIC_LOAD_1:
7698     case BUILT_IN_ATOMIC_LOAD_2:
7699     case BUILT_IN_ATOMIC_LOAD_4:
7700     case BUILT_IN_ATOMIC_LOAD_8:
7701     case BUILT_IN_ATOMIC_LOAD_16:
7702       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
7703       target = expand_builtin_atomic_load (mode, exp, target);
7704       if (target)
7705 	return target;
7706       break;
7707 
7708     case BUILT_IN_ATOMIC_STORE_1:
7709     case BUILT_IN_ATOMIC_STORE_2:
7710     case BUILT_IN_ATOMIC_STORE_4:
7711     case BUILT_IN_ATOMIC_STORE_8:
7712     case BUILT_IN_ATOMIC_STORE_16:
7713       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
7714       target = expand_builtin_atomic_store (mode, exp);
7715       if (target)
7716 	return const0_rtx;
7717       break;
7718 
7719     case BUILT_IN_ATOMIC_ADD_FETCH_1:
7720     case BUILT_IN_ATOMIC_ADD_FETCH_2:
7721     case BUILT_IN_ATOMIC_ADD_FETCH_4:
7722     case BUILT_IN_ATOMIC_ADD_FETCH_8:
7723     case BUILT_IN_ATOMIC_ADD_FETCH_16:
7724       {
7725 	enum built_in_function lib;
7726 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
7727 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
7728 				       (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
7729 	target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
7730 						 ignore, lib);
7731 	if (target)
7732 	  return target;
7733 	break;
7734       }
7735     case BUILT_IN_ATOMIC_SUB_FETCH_1:
7736     case BUILT_IN_ATOMIC_SUB_FETCH_2:
7737     case BUILT_IN_ATOMIC_SUB_FETCH_4:
7738     case BUILT_IN_ATOMIC_SUB_FETCH_8:
7739     case BUILT_IN_ATOMIC_SUB_FETCH_16:
7740       {
7741 	enum built_in_function lib;
7742 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
7743 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
7744 				       (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
7745 	target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
7746 						 ignore, lib);
7747 	if (target)
7748 	  return target;
7749 	break;
7750       }
7751     case BUILT_IN_ATOMIC_AND_FETCH_1:
7752     case BUILT_IN_ATOMIC_AND_FETCH_2:
7753     case BUILT_IN_ATOMIC_AND_FETCH_4:
7754     case BUILT_IN_ATOMIC_AND_FETCH_8:
7755     case BUILT_IN_ATOMIC_AND_FETCH_16:
7756       {
7757 	enum built_in_function lib;
7758 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
7759 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
7760 				       (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
7761 	target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
7762 						 ignore, lib);
7763 	if (target)
7764 	  return target;
7765 	break;
7766       }
7767     case BUILT_IN_ATOMIC_NAND_FETCH_1:
7768     case BUILT_IN_ATOMIC_NAND_FETCH_2:
7769     case BUILT_IN_ATOMIC_NAND_FETCH_4:
7770     case BUILT_IN_ATOMIC_NAND_FETCH_8:
7771     case BUILT_IN_ATOMIC_NAND_FETCH_16:
7772       {
7773 	enum built_in_function lib;
7774 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
7775 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
7776 				       (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
7777 	target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
7778 						 ignore, lib);
7779 	if (target)
7780 	  return target;
7781 	break;
7782       }
7783     case BUILT_IN_ATOMIC_XOR_FETCH_1:
7784     case BUILT_IN_ATOMIC_XOR_FETCH_2:
7785     case BUILT_IN_ATOMIC_XOR_FETCH_4:
7786     case BUILT_IN_ATOMIC_XOR_FETCH_8:
7787     case BUILT_IN_ATOMIC_XOR_FETCH_16:
7788       {
7789 	enum built_in_function lib;
7790 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
7791 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
7792 				       (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
7793 	target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
7794 						 ignore, lib);
7795 	if (target)
7796 	  return target;
7797 	break;
7798       }
7799     case BUILT_IN_ATOMIC_OR_FETCH_1:
7800     case BUILT_IN_ATOMIC_OR_FETCH_2:
7801     case BUILT_IN_ATOMIC_OR_FETCH_4:
7802     case BUILT_IN_ATOMIC_OR_FETCH_8:
7803     case BUILT_IN_ATOMIC_OR_FETCH_16:
7804       {
7805 	enum built_in_function lib;
7806 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
7807 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
7808 				       (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
7809 	target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
7810 						 ignore, lib);
7811 	if (target)
7812 	  return target;
7813 	break;
7814       }
7815     case BUILT_IN_ATOMIC_FETCH_ADD_1:
7816     case BUILT_IN_ATOMIC_FETCH_ADD_2:
7817     case BUILT_IN_ATOMIC_FETCH_ADD_4:
7818     case BUILT_IN_ATOMIC_FETCH_ADD_8:
7819     case BUILT_IN_ATOMIC_FETCH_ADD_16:
7820       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
7821       target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
7822 					       ignore, BUILT_IN_NONE);
7823       if (target)
7824 	return target;
7825       break;
7826 
7827     case BUILT_IN_ATOMIC_FETCH_SUB_1:
7828     case BUILT_IN_ATOMIC_FETCH_SUB_2:
7829     case BUILT_IN_ATOMIC_FETCH_SUB_4:
7830     case BUILT_IN_ATOMIC_FETCH_SUB_8:
7831     case BUILT_IN_ATOMIC_FETCH_SUB_16:
7832       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
7833       target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
7834 					       ignore, BUILT_IN_NONE);
7835       if (target)
7836 	return target;
7837       break;
7838 
7839     case BUILT_IN_ATOMIC_FETCH_AND_1:
7840     case BUILT_IN_ATOMIC_FETCH_AND_2:
7841     case BUILT_IN_ATOMIC_FETCH_AND_4:
7842     case BUILT_IN_ATOMIC_FETCH_AND_8:
7843     case BUILT_IN_ATOMIC_FETCH_AND_16:
7844       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
7845       target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
7846 					       ignore, BUILT_IN_NONE);
7847       if (target)
7848 	return target;
7849       break;
7850 
7851     case BUILT_IN_ATOMIC_FETCH_NAND_1:
7852     case BUILT_IN_ATOMIC_FETCH_NAND_2:
7853     case BUILT_IN_ATOMIC_FETCH_NAND_4:
7854     case BUILT_IN_ATOMIC_FETCH_NAND_8:
7855     case BUILT_IN_ATOMIC_FETCH_NAND_16:
7856       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
7857       target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
7858 					       ignore, BUILT_IN_NONE);
7859       if (target)
7860 	return target;
7861       break;
7862 
7863     case BUILT_IN_ATOMIC_FETCH_XOR_1:
7864     case BUILT_IN_ATOMIC_FETCH_XOR_2:
7865     case BUILT_IN_ATOMIC_FETCH_XOR_4:
7866     case BUILT_IN_ATOMIC_FETCH_XOR_8:
7867     case BUILT_IN_ATOMIC_FETCH_XOR_16:
7868       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
7869       target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
7870 					       ignore, BUILT_IN_NONE);
7871       if (target)
7872 	return target;
7873       break;
7874 
7875     case BUILT_IN_ATOMIC_FETCH_OR_1:
7876     case BUILT_IN_ATOMIC_FETCH_OR_2:
7877     case BUILT_IN_ATOMIC_FETCH_OR_4:
7878     case BUILT_IN_ATOMIC_FETCH_OR_8:
7879     case BUILT_IN_ATOMIC_FETCH_OR_16:
7880       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
7881       target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
7882 					       ignore, BUILT_IN_NONE);
7883       if (target)
7884 	return target;
7885       break;
7886 
7887     case BUILT_IN_ATOMIC_TEST_AND_SET:
7888       return expand_builtin_atomic_test_and_set (exp, target);
7889 
7890     case BUILT_IN_ATOMIC_CLEAR:
7891       return expand_builtin_atomic_clear (exp);
7892 
7893     case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
7894       return expand_builtin_atomic_always_lock_free (exp);
7895 
7896     case BUILT_IN_ATOMIC_IS_LOCK_FREE:
7897       target = expand_builtin_atomic_is_lock_free (exp);
7898       if (target)
7899         return target;
7900       break;
7901 
7902     case BUILT_IN_ATOMIC_THREAD_FENCE:
7903       expand_builtin_atomic_thread_fence (exp);
7904       return const0_rtx;
7905 
7906     case BUILT_IN_ATOMIC_SIGNAL_FENCE:
7907       expand_builtin_atomic_signal_fence (exp);
7908       return const0_rtx;
7909 
7910     case BUILT_IN_OBJECT_SIZE:
7911       return expand_builtin_object_size (exp);
7912 
7913     case BUILT_IN_MEMCPY_CHK:
7914     case BUILT_IN_MEMPCPY_CHK:
7915     case BUILT_IN_MEMMOVE_CHK:
7916     case BUILT_IN_MEMSET_CHK:
7917       target = expand_builtin_memory_chk (exp, target, mode, fcode);
7918       if (target)
7919 	return target;
7920       break;
7921 
7922     case BUILT_IN_STRCPY_CHK:
7923     case BUILT_IN_STPCPY_CHK:
7924     case BUILT_IN_STRNCPY_CHK:
7925     case BUILT_IN_STPNCPY_CHK:
7926     case BUILT_IN_STRCAT_CHK:
7927     case BUILT_IN_STRNCAT_CHK:
7928     case BUILT_IN_SNPRINTF_CHK:
7929     case BUILT_IN_VSNPRINTF_CHK:
7930       maybe_emit_chk_warning (exp, fcode);
7931       break;
7932 
7933     case BUILT_IN_SPRINTF_CHK:
7934     case BUILT_IN_VSPRINTF_CHK:
7935       maybe_emit_sprintf_chk_warning (exp, fcode);
7936       break;
7937 
7938     case BUILT_IN_THREAD_POINTER:
7939       return expand_builtin_thread_pointer (exp, target);
7940 
7941     case BUILT_IN_SET_THREAD_POINTER:
7942       expand_builtin_set_thread_pointer (exp);
7943       return const0_rtx;
7944 
7945     case BUILT_IN_ACC_ON_DEVICE:
7946       /* Do library call, if we failed to expand the builtin when
7947 	 folding.  */
7948       break;
7949 
7950     case BUILT_IN_GOACC_PARLEVEL_ID:
7951     case BUILT_IN_GOACC_PARLEVEL_SIZE:
7952       return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
7953 
7954     case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
7955       return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
7956 
7957     case BUILT_IN_SPECULATION_SAFE_VALUE_1:
7958     case BUILT_IN_SPECULATION_SAFE_VALUE_2:
7959     case BUILT_IN_SPECULATION_SAFE_VALUE_4:
7960     case BUILT_IN_SPECULATION_SAFE_VALUE_8:
7961     case BUILT_IN_SPECULATION_SAFE_VALUE_16:
7962       mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
7963       return expand_speculation_safe_value (mode, exp, target, ignore);
7964 
7965     default:	/* just do library call, if unknown builtin */
7966       break;
7967     }
7968 
7969   /* The switch statement above can drop through to cause the function
7970      to be called normally.  */
7971   return expand_call (exp, target, ignore);
7972 }
7973 
7974 /* Determine whether a tree node represents a call to a built-in
7975    function.  If the tree T is a call to a built-in function with
7976    the right number of arguments of the appropriate types, return
7977    the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
7978    Otherwise the return value is END_BUILTINS.  */
7979 
7980 enum built_in_function
builtin_mathfn_code(const_tree t)7981 builtin_mathfn_code (const_tree t)
7982 {
7983   const_tree fndecl, arg, parmlist;
7984   const_tree argtype, parmtype;
7985   const_call_expr_arg_iterator iter;
7986 
7987   if (TREE_CODE (t) != CALL_EXPR)
7988     return END_BUILTINS;
7989 
7990   fndecl = get_callee_fndecl (t);
7991   if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
7992       return END_BUILTINS;
7993 
7994   parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
7995   init_const_call_expr_arg_iterator (t, &iter);
7996   for (; parmlist; parmlist = TREE_CHAIN (parmlist))
7997     {
7998       /* If a function doesn't take a variable number of arguments,
7999 	 the last element in the list will have type `void'.  */
8000       parmtype = TREE_VALUE (parmlist);
8001       if (VOID_TYPE_P (parmtype))
8002 	{
8003 	  if (more_const_call_expr_args_p (&iter))
8004 	    return END_BUILTINS;
8005 	  return DECL_FUNCTION_CODE (fndecl);
8006 	}
8007 
8008       if (! more_const_call_expr_args_p (&iter))
8009 	return END_BUILTINS;
8010 
8011       arg = next_const_call_expr_arg (&iter);
8012       argtype = TREE_TYPE (arg);
8013 
8014       if (SCALAR_FLOAT_TYPE_P (parmtype))
8015 	{
8016 	  if (! SCALAR_FLOAT_TYPE_P (argtype))
8017 	    return END_BUILTINS;
8018 	}
8019       else if (COMPLEX_FLOAT_TYPE_P (parmtype))
8020 	{
8021 	  if (! COMPLEX_FLOAT_TYPE_P (argtype))
8022 	    return END_BUILTINS;
8023 	}
8024       else if (POINTER_TYPE_P (parmtype))
8025 	{
8026 	  if (! POINTER_TYPE_P (argtype))
8027 	    return END_BUILTINS;
8028 	}
8029       else if (INTEGRAL_TYPE_P (parmtype))
8030 	{
8031 	  if (! INTEGRAL_TYPE_P (argtype))
8032 	    return END_BUILTINS;
8033 	}
8034       else
8035 	return END_BUILTINS;
8036     }
8037 
8038   /* Variable-length argument list.  */
8039   return DECL_FUNCTION_CODE (fndecl);
8040 }
8041 
8042 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
8043    evaluate to a constant.  */
8044 
8045 static tree
fold_builtin_constant_p(tree arg)8046 fold_builtin_constant_p (tree arg)
8047 {
8048   /* We return 1 for a numeric type that's known to be a constant
8049      value at compile-time or for an aggregate type that's a
8050      literal constant.  */
8051   STRIP_NOPS (arg);
8052 
8053   /* If we know this is a constant, emit the constant of one.  */
8054   if (CONSTANT_CLASS_P (arg)
8055       || (TREE_CODE (arg) == CONSTRUCTOR
8056 	  && TREE_CONSTANT (arg)))
8057     return integer_one_node;
8058   if (TREE_CODE (arg) == ADDR_EXPR)
8059     {
8060        tree op = TREE_OPERAND (arg, 0);
8061        if (TREE_CODE (op) == STRING_CST
8062 	   || (TREE_CODE (op) == ARRAY_REF
8063 	       && integer_zerop (TREE_OPERAND (op, 1))
8064 	       && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
8065 	 return integer_one_node;
8066     }
8067 
8068   /* If this expression has side effects, show we don't know it to be a
8069      constant.  Likewise if it's a pointer or aggregate type since in
8070      those case we only want literals, since those are only optimized
8071      when generating RTL, not later.
8072      And finally, if we are compiling an initializer, not code, we
8073      need to return a definite result now; there's not going to be any
8074      more optimization done.  */
8075   if (TREE_SIDE_EFFECTS (arg)
8076       || AGGREGATE_TYPE_P (TREE_TYPE (arg))
8077       || POINTER_TYPE_P (TREE_TYPE (arg))
8078       || cfun == 0
8079       || folding_initializer
8080       || force_folding_builtin_constant_p)
8081     return integer_zero_node;
8082 
8083   return NULL_TREE;
8084 }
8085 
8086 /* Create builtin_expect or builtin_expect_with_probability
8087    with PRED and EXPECTED as its arguments and return it as a truthvalue.
8088    Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
8089    builtin_expect_with_probability instead uses third argument as PROBABILITY
8090    value.  */
8091 
8092 static tree
build_builtin_expect_predicate(location_t loc,tree pred,tree expected,tree predictor,tree probability)8093 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
8094 				tree predictor, tree probability)
8095 {
8096   tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
8097 
8098   fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
8099 			      : BUILT_IN_EXPECT_WITH_PROBABILITY);
8100   arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
8101   ret_type = TREE_TYPE (TREE_TYPE (fn));
8102   pred_type = TREE_VALUE (arg_types);
8103   expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
8104 
8105   pred = fold_convert_loc (loc, pred_type, pred);
8106   expected = fold_convert_loc (loc, expected_type, expected);
8107 
8108   if (probability)
8109     call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
8110   else
8111     call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
8112 				     predictor);
8113 
8114   return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
8115 		 build_int_cst (ret_type, 0));
8116 }
8117 
8118 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3.  Return
8119    NULL_TREE if no simplification is possible.  */
8120 
8121 tree
fold_builtin_expect(location_t loc,tree arg0,tree arg1,tree arg2,tree arg3)8122 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
8123 		     tree arg3)
8124 {
8125   tree inner, fndecl, inner_arg0;
8126   enum tree_code code;
8127 
8128   /* Distribute the expected value over short-circuiting operators.
8129      See through the cast from truthvalue_type_node to long.  */
8130   inner_arg0 = arg0;
8131   while (CONVERT_EXPR_P (inner_arg0)
8132 	 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
8133 	 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
8134     inner_arg0 = TREE_OPERAND (inner_arg0, 0);
8135 
8136   /* If this is a builtin_expect within a builtin_expect keep the
8137      inner one.  See through a comparison against a constant.  It
8138      might have been added to create a thruthvalue.  */
8139   inner = inner_arg0;
8140 
8141   if (COMPARISON_CLASS_P (inner)
8142       && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
8143     inner = TREE_OPERAND (inner, 0);
8144 
8145   if (TREE_CODE (inner) == CALL_EXPR
8146       && (fndecl = get_callee_fndecl (inner))
8147       && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
8148 	  || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
8149     return arg0;
8150 
8151   inner = inner_arg0;
8152   code = TREE_CODE (inner);
8153   if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
8154     {
8155       tree op0 = TREE_OPERAND (inner, 0);
8156       tree op1 = TREE_OPERAND (inner, 1);
8157       arg1 = save_expr (arg1);
8158 
8159       op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
8160       op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
8161       inner = build2 (code, TREE_TYPE (inner), op0, op1);
8162 
8163       return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
8164     }
8165 
8166   /* If the argument isn't invariant then there's nothing else we can do.  */
8167   if (!TREE_CONSTANT (inner_arg0))
8168     return NULL_TREE;
8169 
8170   /* If we expect that a comparison against the argument will fold to
8171      a constant return the constant.  In practice, this means a true
8172      constant or the address of a non-weak symbol.  */
8173   inner = inner_arg0;
8174   STRIP_NOPS (inner);
8175   if (TREE_CODE (inner) == ADDR_EXPR)
8176     {
8177       do
8178 	{
8179 	  inner = TREE_OPERAND (inner, 0);
8180 	}
8181       while (TREE_CODE (inner) == COMPONENT_REF
8182 	     || TREE_CODE (inner) == ARRAY_REF);
8183       if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
8184 	return NULL_TREE;
8185     }
8186 
8187   /* Otherwise, ARG0 already has the proper type for the return value.  */
8188   return arg0;
8189 }
8190 
8191 /* Fold a call to __builtin_classify_type with argument ARG.  */
8192 
8193 static tree
fold_builtin_classify_type(tree arg)8194 fold_builtin_classify_type (tree arg)
8195 {
8196   if (arg == 0)
8197     return build_int_cst (integer_type_node, no_type_class);
8198 
8199   return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
8200 }
8201 
8202 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
8203    ARG.  */
8204 
8205 static tree
fold_builtin_strlen(location_t loc,tree expr,tree type,tree arg)8206 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
8207 {
8208   if (!validate_arg (arg, POINTER_TYPE))
8209     return NULL_TREE;
8210   else
8211     {
8212       c_strlen_data lendata = { };
8213       tree len = c_strlen (arg, 0, &lendata);
8214 
8215       if (len)
8216 	return fold_convert_loc (loc, type, len);
8217 
8218       /* TODO: Move this to gimple-ssa-warn-access once the pass runs
8219 	 also early enough to detect invalid reads in multimensional
8220 	 arrays and struct members.  */
8221       if (!lendata.decl)
8222 	 c_strlen (arg, 1, &lendata);
8223 
8224       if (lendata.decl)
8225 	{
8226 	  if (EXPR_HAS_LOCATION (arg))
8227 	    loc = EXPR_LOCATION (arg);
8228 	  else if (loc == UNKNOWN_LOCATION)
8229 	    loc = input_location;
8230 	  warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
8231 	}
8232 
8233       return NULL_TREE;
8234     }
8235 }
8236 
8237 /* Fold a call to __builtin_inf or __builtin_huge_val.  */
8238 
8239 static tree
fold_builtin_inf(location_t loc,tree type,int warn)8240 fold_builtin_inf (location_t loc, tree type, int warn)
8241 {
8242   REAL_VALUE_TYPE real;
8243 
8244   /* __builtin_inff is intended to be usable to define INFINITY on all
8245      targets.  If an infinity is not available, INFINITY expands "to a
8246      positive constant of type float that overflows at translation
8247      time", footnote "In this case, using INFINITY will violate the
8248      constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
8249      Thus we pedwarn to ensure this constraint violation is
8250      diagnosed.  */
8251   if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
8252     pedwarn (loc, 0, "target format does not support infinity");
8253 
8254   real_inf (&real);
8255   return build_real (type, real);
8256 }
8257 
8258 /* Fold function call to builtin sincos, sincosf, or sincosl.  Return
8259    NULL_TREE if no simplification can be made.  */
8260 
8261 static tree
fold_builtin_sincos(location_t loc,tree arg0,tree arg1,tree arg2)8262 fold_builtin_sincos (location_t loc,
8263 		     tree arg0, tree arg1, tree arg2)
8264 {
8265   tree type;
8266   tree fndecl, call = NULL_TREE;
8267 
8268   if (!validate_arg (arg0, REAL_TYPE)
8269       || !validate_arg (arg1, POINTER_TYPE)
8270       || !validate_arg (arg2, POINTER_TYPE))
8271     return NULL_TREE;
8272 
8273   type = TREE_TYPE (arg0);
8274 
8275   /* Calculate the result when the argument is a constant.  */
8276   built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
8277   if (fn == END_BUILTINS)
8278     return NULL_TREE;
8279 
8280   /* Canonicalize sincos to cexpi.  */
8281   if (TREE_CODE (arg0) == REAL_CST)
8282     {
8283       tree complex_type = build_complex_type (type);
8284       call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
8285     }
8286   if (!call)
8287     {
8288       if (!targetm.libc_has_function (function_c99_math_complex, type)
8289 	  || !builtin_decl_implicit_p (fn))
8290 	return NULL_TREE;
8291       fndecl = builtin_decl_explicit (fn);
8292       call = build_call_expr_loc (loc, fndecl, 1, arg0);
8293       call = builtin_save_expr (call);
8294     }
8295 
8296   tree ptype = build_pointer_type (type);
8297   arg1 = fold_convert (ptype, arg1);
8298   arg2 = fold_convert (ptype, arg2);
8299   return build2 (COMPOUND_EXPR, void_type_node,
8300 		 build2 (MODIFY_EXPR, void_type_node,
8301 			 build_fold_indirect_ref_loc (loc, arg1),
8302 			 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
8303 		 build2 (MODIFY_EXPR, void_type_node,
8304 			 build_fold_indirect_ref_loc (loc, arg2),
8305 			 fold_build1_loc (loc, REALPART_EXPR, type, call)));
8306 }
8307 
8308 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
8309    Return NULL_TREE if no simplification can be made.  */
8310 
8311 static tree
fold_builtin_memcmp(location_t loc,tree arg1,tree arg2,tree len)8312 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
8313 {
8314   if (!validate_arg (arg1, POINTER_TYPE)
8315       || !validate_arg (arg2, POINTER_TYPE)
8316       || !validate_arg (len, INTEGER_TYPE))
8317     return NULL_TREE;
8318 
8319   /* If the LEN parameter is zero, return zero.  */
8320   if (integer_zerop (len))
8321     return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
8322 			      arg1, arg2);
8323 
8324   /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
8325   if (operand_equal_p (arg1, arg2, 0))
8326     return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
8327 
8328   /* If len parameter is one, return an expression corresponding to
8329      (*(const unsigned char*)arg1 - (const unsigned char*)arg2).  */
8330   if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
8331     {
8332       tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
8333       tree cst_uchar_ptr_node
8334 	= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
8335 
8336       tree ind1
8337 	= fold_convert_loc (loc, integer_type_node,
8338 			    build1 (INDIRECT_REF, cst_uchar_node,
8339 				    fold_convert_loc (loc,
8340 						      cst_uchar_ptr_node,
8341 						      arg1)));
8342       tree ind2
8343 	= fold_convert_loc (loc, integer_type_node,
8344 			    build1 (INDIRECT_REF, cst_uchar_node,
8345 				    fold_convert_loc (loc,
8346 						      cst_uchar_ptr_node,
8347 						      arg2)));
8348       return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
8349     }
8350 
8351   return NULL_TREE;
8352 }
8353 
8354 /* Fold a call to builtin isascii with argument ARG.  */
8355 
8356 static tree
fold_builtin_isascii(location_t loc,tree arg)8357 fold_builtin_isascii (location_t loc, tree arg)
8358 {
8359   if (!validate_arg (arg, INTEGER_TYPE))
8360     return NULL_TREE;
8361   else
8362     {
8363       /* Transform isascii(c) -> ((c & ~0x7f) == 0).  */
8364       arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
8365 			 build_int_cst (integer_type_node,
8366 					~ (unsigned HOST_WIDE_INT) 0x7f));
8367       return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
8368 			      arg, integer_zero_node);
8369     }
8370 }
8371 
8372 /* Fold a call to builtin toascii with argument ARG.  */
8373 
8374 static tree
fold_builtin_toascii(location_t loc,tree arg)8375 fold_builtin_toascii (location_t loc, tree arg)
8376 {
8377   if (!validate_arg (arg, INTEGER_TYPE))
8378     return NULL_TREE;
8379 
8380   /* Transform toascii(c) -> (c & 0x7f).  */
8381   return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
8382 			  build_int_cst (integer_type_node, 0x7f));
8383 }
8384 
8385 /* Fold a call to builtin isdigit with argument ARG.  */
8386 
8387 static tree
fold_builtin_isdigit(location_t loc,tree arg)8388 fold_builtin_isdigit (location_t loc, tree arg)
8389 {
8390   if (!validate_arg (arg, INTEGER_TYPE))
8391     return NULL_TREE;
8392   else
8393     {
8394       /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9.  */
8395       /* According to the C standard, isdigit is unaffected by locale.
8396 	 However, it definitely is affected by the target character set.  */
8397       unsigned HOST_WIDE_INT target_digit0
8398 	= lang_hooks.to_target_charset ('0');
8399 
8400       if (target_digit0 == 0)
8401 	return NULL_TREE;
8402 
8403       arg = fold_convert_loc (loc, unsigned_type_node, arg);
8404       arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
8405 			 build_int_cst (unsigned_type_node, target_digit0));
8406       return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
8407 			  build_int_cst (unsigned_type_node, 9));
8408     }
8409 }
8410 
8411 /* Fold a call to fabs, fabsf or fabsl with argument ARG.  */
8412 
8413 static tree
fold_builtin_fabs(location_t loc,tree arg,tree type)8414 fold_builtin_fabs (location_t loc, tree arg, tree type)
8415 {
8416   if (!validate_arg (arg, REAL_TYPE))
8417     return NULL_TREE;
8418 
8419   arg = fold_convert_loc (loc, type, arg);
8420   return fold_build1_loc (loc, ABS_EXPR, type, arg);
8421 }
8422 
8423 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG.  */
8424 
8425 static tree
fold_builtin_abs(location_t loc,tree arg,tree type)8426 fold_builtin_abs (location_t loc, tree arg, tree type)
8427 {
8428   if (!validate_arg (arg, INTEGER_TYPE))
8429     return NULL_TREE;
8430 
8431   arg = fold_convert_loc (loc, type, arg);
8432   return fold_build1_loc (loc, ABS_EXPR, type, arg);
8433 }
8434 
8435 /* Fold a call to builtin carg(a+bi) -> atan2(b,a).  */
8436 
8437 static tree
fold_builtin_carg(location_t loc,tree arg,tree type)8438 fold_builtin_carg (location_t loc, tree arg, tree type)
8439 {
8440   if (validate_arg (arg, COMPLEX_TYPE)
8441       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
8442     {
8443       tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
8444 
8445       if (atan2_fn)
8446         {
8447   	  tree new_arg = builtin_save_expr (arg);
8448 	  tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
8449 	  tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
8450 	  return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
8451 	}
8452     }
8453 
8454   return NULL_TREE;
8455 }
8456 
8457 /* Fold a call to builtin frexp, we can assume the base is 2.  */
8458 
8459 static tree
fold_builtin_frexp(location_t loc,tree arg0,tree arg1,tree rettype)8460 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
8461 {
8462   if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8463     return NULL_TREE;
8464 
8465   STRIP_NOPS (arg0);
8466 
8467   if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8468     return NULL_TREE;
8469 
8470   arg1 = build_fold_indirect_ref_loc (loc, arg1);
8471 
8472   /* Proceed if a valid pointer type was passed in.  */
8473   if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
8474     {
8475       const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8476       tree frac, exp;
8477 
8478       switch (value->cl)
8479       {
8480       case rvc_zero:
8481 	/* For +-0, return (*exp = 0, +-0).  */
8482 	exp = integer_zero_node;
8483 	frac = arg0;
8484 	break;
8485       case rvc_nan:
8486       case rvc_inf:
8487 	/* For +-NaN or +-Inf, *exp is unspecified, return arg0.  */
8488 	return omit_one_operand_loc (loc, rettype, arg0, arg1);
8489       case rvc_normal:
8490 	{
8491 	  /* Since the frexp function always expects base 2, and in
8492 	     GCC normalized significands are already in the range
8493 	     [0.5, 1.0), we have exactly what frexp wants.  */
8494 	  REAL_VALUE_TYPE frac_rvt = *value;
8495 	  SET_REAL_EXP (&frac_rvt, 0);
8496 	  frac = build_real (rettype, frac_rvt);
8497 	  exp = build_int_cst (integer_type_node, REAL_EXP (value));
8498 	}
8499 	break;
8500       default:
8501 	gcc_unreachable ();
8502       }
8503 
8504       /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8505       arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
8506       TREE_SIDE_EFFECTS (arg1) = 1;
8507       return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
8508     }
8509 
8510   return NULL_TREE;
8511 }
8512 
8513 /* Fold a call to builtin modf.  */
8514 
8515 static tree
fold_builtin_modf(location_t loc,tree arg0,tree arg1,tree rettype)8516 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
8517 {
8518   if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
8519     return NULL_TREE;
8520 
8521   STRIP_NOPS (arg0);
8522 
8523   if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
8524     return NULL_TREE;
8525 
8526   arg1 = build_fold_indirect_ref_loc (loc, arg1);
8527 
8528   /* Proceed if a valid pointer type was passed in.  */
8529   if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
8530     {
8531       const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
8532       REAL_VALUE_TYPE trunc, frac;
8533 
8534       switch (value->cl)
8535       {
8536       case rvc_nan:
8537       case rvc_zero:
8538 	/* For +-NaN or +-0, return (*arg1 = arg0, arg0).  */
8539 	trunc = frac = *value;
8540 	break;
8541       case rvc_inf:
8542 	/* For +-Inf, return (*arg1 = arg0, +-0).  */
8543 	frac = dconst0;
8544 	frac.sign = value->sign;
8545 	trunc = *value;
8546 	break;
8547       case rvc_normal:
8548 	/* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)).  */
8549 	real_trunc (&trunc, VOIDmode, value);
8550 	real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
8551 	/* If the original number was negative and already
8552 	   integral, then the fractional part is -0.0.  */
8553 	if (value->sign && frac.cl == rvc_zero)
8554 	  frac.sign = value->sign;
8555 	break;
8556       }
8557 
8558       /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
8559       arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
8560 			  build_real (rettype, trunc));
8561       TREE_SIDE_EFFECTS (arg1) = 1;
8562       return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
8563 			  build_real (rettype, frac));
8564     }
8565 
8566   return NULL_TREE;
8567 }
8568 
8569 /* Given a location LOC, an interclass builtin function decl FNDECL
8570    and its single argument ARG, return an folded expression computing
8571    the same, or NULL_TREE if we either couldn't or didn't want to fold
8572    (the latter happen if there's an RTL instruction available).  */
8573 
8574 static tree
fold_builtin_interclass_mathfn(location_t loc,tree fndecl,tree arg)8575 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
8576 {
8577   machine_mode mode;
8578 
8579   if (!validate_arg (arg, REAL_TYPE))
8580     return NULL_TREE;
8581 
8582   if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
8583     return NULL_TREE;
8584 
8585   mode = TYPE_MODE (TREE_TYPE (arg));
8586 
8587   bool is_ibm_extended = MODE_COMPOSITE_P (mode);
8588 
8589   /* If there is no optab, try generic code.  */
8590   switch (DECL_FUNCTION_CODE (fndecl))
8591     {
8592       tree result;
8593 
8594     CASE_FLT_FN (BUILT_IN_ISINF):
8595       {
8596 	/* isinf(x) -> isgreater(fabs(x),DBL_MAX).  */
8597 	tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8598 	tree type = TREE_TYPE (arg);
8599 	REAL_VALUE_TYPE r;
8600 	char buf[128];
8601 
8602 	if (is_ibm_extended)
8603 	  {
8604 	    /* NaN and Inf are encoded in the high-order double value
8605 	       only.  The low-order value is not significant.  */
8606 	    type = double_type_node;
8607 	    mode = DFmode;
8608 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8609 	  }
8610 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8611 	real_from_string (&r, buf);
8612 	result = build_call_expr (isgr_fn, 2,
8613 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
8614 				  build_real (type, r));
8615 	return result;
8616       }
8617     CASE_FLT_FN (BUILT_IN_FINITE):
8618     case BUILT_IN_ISFINITE:
8619       {
8620 	/* isfinite(x) -> islessequal(fabs(x),DBL_MAX).  */
8621 	tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8622 	tree type = TREE_TYPE (arg);
8623 	REAL_VALUE_TYPE r;
8624 	char buf[128];
8625 
8626 	if (is_ibm_extended)
8627 	  {
8628 	    /* NaN and Inf are encoded in the high-order double value
8629 	       only.  The low-order value is not significant.  */
8630 	    type = double_type_node;
8631 	    mode = DFmode;
8632 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8633 	  }
8634 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8635 	real_from_string (&r, buf);
8636 	result = build_call_expr (isle_fn, 2,
8637 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
8638 				  build_real (type, r));
8639 	/*result = fold_build2_loc (loc, UNGT_EXPR,
8640 				  TREE_TYPE (TREE_TYPE (fndecl)),
8641 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
8642 				  build_real (type, r));
8643 	result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
8644 				  TREE_TYPE (TREE_TYPE (fndecl)),
8645 				  result);*/
8646 	return result;
8647       }
8648     case BUILT_IN_ISNORMAL:
8649       {
8650 	/* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
8651 	   islessequal(fabs(x),DBL_MAX).  */
8652 	tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
8653 	tree type = TREE_TYPE (arg);
8654 	tree orig_arg, max_exp, min_exp;
8655 	machine_mode orig_mode = mode;
8656 	REAL_VALUE_TYPE rmax, rmin;
8657 	char buf[128];
8658 
8659 	orig_arg = arg = builtin_save_expr (arg);
8660 	if (is_ibm_extended)
8661 	  {
8662 	    /* Use double to test the normal range of IBM extended
8663 	       precision.  Emin for IBM extended precision is
8664 	       different to emin for IEEE double, being 53 higher
8665 	       since the low double exponent is at least 53 lower
8666 	       than the high double exponent.  */
8667 	    type = double_type_node;
8668 	    mode = DFmode;
8669 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
8670 	  }
8671 	arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
8672 
8673 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
8674 	real_from_string (&rmax, buf);
8675 	sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
8676 	real_from_string (&rmin, buf);
8677 	max_exp = build_real (type, rmax);
8678 	min_exp = build_real (type, rmin);
8679 
8680 	max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
8681 	if (is_ibm_extended)
8682 	  {
8683 	    /* Testing the high end of the range is done just using
8684 	       the high double, using the same test as isfinite().
8685 	       For the subnormal end of the range we first test the
8686 	       high double, then if its magnitude is equal to the
8687 	       limit of 0x1p-969, we test whether the low double is
8688 	       non-zero and opposite sign to the high double.  */
8689 	    tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
8690 	    tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
8691 	    tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
8692 	    tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
8693 				       arg, min_exp);
8694 	    tree as_complex = build1 (VIEW_CONVERT_EXPR,
8695 				      complex_double_type_node, orig_arg);
8696 	    tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
8697 	    tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
8698 	    tree zero = build_real (type, dconst0);
8699 	    tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
8700 	    tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
8701 	    tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
8702 	    tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
8703 				      fold_build3 (COND_EXPR,
8704 						   integer_type_node,
8705 						   hilt, logt, lolt));
8706 	    eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
8707 				  eq_min, ok_lo);
8708 	    min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
8709 				   gt_min, eq_min);
8710 	  }
8711 	else
8712 	  {
8713 	    tree const isge_fn
8714 	      = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
8715 	    min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
8716 	  }
8717 	result = fold_build2 (BIT_AND_EXPR, integer_type_node,
8718 			      max_exp, min_exp);
8719 	return result;
8720       }
8721     default:
8722       break;
8723     }
8724 
8725   return NULL_TREE;
8726 }
8727 
8728 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
8729    ARG is the argument for the call.  */
8730 
8731 static tree
fold_builtin_classify(location_t loc,tree fndecl,tree arg,int builtin_index)8732 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
8733 {
8734   tree type = TREE_TYPE (TREE_TYPE (fndecl));
8735 
8736   if (!validate_arg (arg, REAL_TYPE))
8737     return NULL_TREE;
8738 
8739   switch (builtin_index)
8740     {
8741     case BUILT_IN_ISINF:
8742       if (tree_expr_infinite_p (arg))
8743 	return omit_one_operand_loc (loc, type, integer_one_node, arg);
8744       if (!tree_expr_maybe_infinite_p (arg))
8745 	return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8746       return NULL_TREE;
8747 
8748     case BUILT_IN_ISINF_SIGN:
8749       {
8750 	/* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
8751 	/* In a boolean context, GCC will fold the inner COND_EXPR to
8752 	   1.  So e.g. "if (isinf_sign(x))" would be folded to just
8753 	   "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
8754 	tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
8755 	tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
8756 	tree tmp = NULL_TREE;
8757 
8758 	arg = builtin_save_expr (arg);
8759 
8760 	if (signbit_fn && isinf_fn)
8761 	  {
8762 	    tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
8763 	    tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
8764 
8765 	    signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8766 					signbit_call, integer_zero_node);
8767 	    isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
8768 				      isinf_call, integer_zero_node);
8769 
8770 	    tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
8771 			       integer_minus_one_node, integer_one_node);
8772 	    tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8773 			       isinf_call, tmp,
8774 			       integer_zero_node);
8775 	  }
8776 
8777 	return tmp;
8778       }
8779 
8780     case BUILT_IN_ISFINITE:
8781       if (tree_expr_finite_p (arg))
8782 	return omit_one_operand_loc (loc, type, integer_one_node, arg);
8783       if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
8784 	return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8785       return NULL_TREE;
8786 
8787     case BUILT_IN_ISNAN:
8788       if (tree_expr_nan_p (arg))
8789 	return omit_one_operand_loc (loc, type, integer_one_node, arg);
8790       if (!tree_expr_maybe_nan_p (arg))
8791 	return omit_one_operand_loc (loc, type, integer_zero_node, arg);
8792 
8793       {
8794 	bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
8795 	if (is_ibm_extended)
8796 	  {
8797 	    /* NaN and Inf are encoded in the high-order double value
8798 	       only.  The low-order value is not significant.  */
8799 	    arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
8800 	  }
8801       }
8802       arg = builtin_save_expr (arg);
8803       return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
8804 
8805     default:
8806       gcc_unreachable ();
8807     }
8808 }
8809 
8810 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
8811    This builtin will generate code to return the appropriate floating
8812    point classification depending on the value of the floating point
8813    number passed in.  The possible return values must be supplied as
8814    int arguments to the call in the following order: FP_NAN, FP_INFINITE,
8815    FP_NORMAL, FP_SUBNORMAL and FP_ZERO.  The ellipses is for exactly
8816    one floating point argument which is "type generic".  */
8817 
8818 static tree
fold_builtin_fpclassify(location_t loc,tree * args,int nargs)8819 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
8820 {
8821   tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
8822     arg, type, res, tmp;
8823   machine_mode mode;
8824   REAL_VALUE_TYPE r;
8825   char buf[128];
8826 
8827   /* Verify the required arguments in the original call.  */
8828   if (nargs != 6
8829       || !validate_arg (args[0], INTEGER_TYPE)
8830       || !validate_arg (args[1], INTEGER_TYPE)
8831       || !validate_arg (args[2], INTEGER_TYPE)
8832       || !validate_arg (args[3], INTEGER_TYPE)
8833       || !validate_arg (args[4], INTEGER_TYPE)
8834       || !validate_arg (args[5], REAL_TYPE))
8835     return NULL_TREE;
8836 
8837   fp_nan = args[0];
8838   fp_infinite = args[1];
8839   fp_normal = args[2];
8840   fp_subnormal = args[3];
8841   fp_zero = args[4];
8842   arg = args[5];
8843   type = TREE_TYPE (arg);
8844   mode = TYPE_MODE (type);
8845   arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
8846 
8847   /* fpclassify(x) ->
8848        isnan(x) ? FP_NAN :
8849          (fabs(x) == Inf ? FP_INFINITE :
8850 	   (fabs(x) >= DBL_MIN ? FP_NORMAL :
8851 	     (x == 0 ? FP_ZERO : FP_SUBNORMAL))).  */
8852 
8853   tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8854 		     build_real (type, dconst0));
8855   res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
8856 		     tmp, fp_zero, fp_subnormal);
8857 
8858   sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
8859   real_from_string (&r, buf);
8860   tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
8861 		     arg, build_real (type, r));
8862   res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
8863 
8864   if (tree_expr_maybe_infinite_p (arg))
8865     {
8866       real_inf (&r);
8867       tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
8868 			 build_real (type, r));
8869       res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
8870 			 fp_infinite, res);
8871     }
8872 
8873   if (tree_expr_maybe_nan_p (arg))
8874     {
8875       tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
8876       res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
8877     }
8878 
8879   return res;
8880 }
8881 
8882 /* Fold a call to an unordered comparison function such as
8883    __builtin_isgreater().  FNDECL is the FUNCTION_DECL for the function
8884    being called and ARG0 and ARG1 are the arguments for the call.
8885    UNORDERED_CODE and ORDERED_CODE are comparison codes that give
8886    the opposite of the desired result.  UNORDERED_CODE is used
8887    for modes that can hold NaNs and ORDERED_CODE is used for
8888    the rest.  */
8889 
8890 static tree
fold_builtin_unordered_cmp(location_t loc,tree fndecl,tree arg0,tree arg1,enum tree_code unordered_code,enum tree_code ordered_code)8891 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
8892 			    enum tree_code unordered_code,
8893 			    enum tree_code ordered_code)
8894 {
8895   tree type = TREE_TYPE (TREE_TYPE (fndecl));
8896   enum tree_code code;
8897   tree type0, type1;
8898   enum tree_code code0, code1;
8899   tree cmp_type = NULL_TREE;
8900 
8901   type0 = TREE_TYPE (arg0);
8902   type1 = TREE_TYPE (arg1);
8903 
8904   code0 = TREE_CODE (type0);
8905   code1 = TREE_CODE (type1);
8906 
8907   if (code0 == REAL_TYPE && code1 == REAL_TYPE)
8908     /* Choose the wider of two real types.  */
8909     cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
8910       ? type0 : type1;
8911   else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
8912     cmp_type = type0;
8913   else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
8914     cmp_type = type1;
8915 
8916   arg0 = fold_convert_loc (loc, cmp_type, arg0);
8917   arg1 = fold_convert_loc (loc, cmp_type, arg1);
8918 
8919   if (unordered_code == UNORDERED_EXPR)
8920     {
8921       if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
8922 	return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
8923       if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
8924 	return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
8925       return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
8926     }
8927 
8928   code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
8929 	 ? unordered_code : ordered_code;
8930   return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
8931 		      fold_build2_loc (loc, code, type, arg0, arg1));
8932 }
8933 
8934 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
8935    arithmetics if it can never overflow, or into internal functions that
8936    return both result of arithmetics and overflowed boolean flag in
8937    a complex integer result, or some other check for overflow.
8938    Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
8939    checking part of that.  */
8940 
8941 static tree
fold_builtin_arith_overflow(location_t loc,enum built_in_function fcode,tree arg0,tree arg1,tree arg2)8942 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
8943 			     tree arg0, tree arg1, tree arg2)
8944 {
8945   enum internal_fn ifn = IFN_LAST;
8946   /* The code of the expression corresponding to the built-in.  */
8947   enum tree_code opcode = ERROR_MARK;
8948   bool ovf_only = false;
8949 
8950   switch (fcode)
8951     {
8952     case BUILT_IN_ADD_OVERFLOW_P:
8953       ovf_only = true;
8954       /* FALLTHRU */
8955     case BUILT_IN_ADD_OVERFLOW:
8956     case BUILT_IN_SADD_OVERFLOW:
8957     case BUILT_IN_SADDL_OVERFLOW:
8958     case BUILT_IN_SADDLL_OVERFLOW:
8959     case BUILT_IN_UADD_OVERFLOW:
8960     case BUILT_IN_UADDL_OVERFLOW:
8961     case BUILT_IN_UADDLL_OVERFLOW:
8962       opcode = PLUS_EXPR;
8963       ifn = IFN_ADD_OVERFLOW;
8964       break;
8965     case BUILT_IN_SUB_OVERFLOW_P:
8966       ovf_only = true;
8967       /* FALLTHRU */
8968     case BUILT_IN_SUB_OVERFLOW:
8969     case BUILT_IN_SSUB_OVERFLOW:
8970     case BUILT_IN_SSUBL_OVERFLOW:
8971     case BUILT_IN_SSUBLL_OVERFLOW:
8972     case BUILT_IN_USUB_OVERFLOW:
8973     case BUILT_IN_USUBL_OVERFLOW:
8974     case BUILT_IN_USUBLL_OVERFLOW:
8975       opcode = MINUS_EXPR;
8976       ifn = IFN_SUB_OVERFLOW;
8977       break;
8978     case BUILT_IN_MUL_OVERFLOW_P:
8979       ovf_only = true;
8980       /* FALLTHRU */
8981     case BUILT_IN_MUL_OVERFLOW:
8982     case BUILT_IN_SMUL_OVERFLOW:
8983     case BUILT_IN_SMULL_OVERFLOW:
8984     case BUILT_IN_SMULLL_OVERFLOW:
8985     case BUILT_IN_UMUL_OVERFLOW:
8986     case BUILT_IN_UMULL_OVERFLOW:
8987     case BUILT_IN_UMULLL_OVERFLOW:
8988       opcode = MULT_EXPR;
8989       ifn = IFN_MUL_OVERFLOW;
8990       break;
8991     default:
8992       gcc_unreachable ();
8993     }
8994 
8995   /* For the "generic" overloads, the first two arguments can have different
8996      types and the last argument determines the target type to use to check
8997      for overflow.  The arguments of the other overloads all have the same
8998      type.  */
8999   tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
9000 
9001   /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
9002      arguments are constant, attempt to fold the built-in call into a constant
9003      expression indicating whether or not it detected an overflow.  */
9004   if (ovf_only
9005       && TREE_CODE (arg0) == INTEGER_CST
9006       && TREE_CODE (arg1) == INTEGER_CST)
9007     /* Perform the computation in the target type and check for overflow.  */
9008     return omit_one_operand_loc (loc, boolean_type_node,
9009 				 arith_overflowed_p (opcode, type, arg0, arg1)
9010 				 ? boolean_true_node : boolean_false_node,
9011 				 arg2);
9012 
9013   tree intres, ovfres;
9014   if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9015     {
9016       intres = fold_binary_loc (loc, opcode, type,
9017 				fold_convert_loc (loc, type, arg0),
9018 				fold_convert_loc (loc, type, arg1));
9019       if (TREE_OVERFLOW (intres))
9020 	intres = drop_tree_overflow (intres);
9021       ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
9022 		? boolean_true_node : boolean_false_node);
9023     }
9024   else
9025     {
9026       tree ctype = build_complex_type (type);
9027       tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
9028 						arg0, arg1);
9029       tree tgt = save_expr (call);
9030       intres = build1_loc (loc, REALPART_EXPR, type, tgt);
9031       ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
9032       ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
9033     }
9034 
9035   if (ovf_only)
9036     return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
9037 
9038   tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
9039   tree store
9040     = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
9041   return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
9042 }
9043 
9044 /* Fold a call to __builtin_FILE to a constant string.  */
9045 
9046 static inline tree
fold_builtin_FILE(location_t loc)9047 fold_builtin_FILE (location_t loc)
9048 {
9049   if (const char *fname = LOCATION_FILE (loc))
9050     {
9051       /* The documentation says this builtin is equivalent to the preprocessor
9052 	 __FILE__ macro so it appears appropriate to use the same file prefix
9053 	 mappings.  */
9054       fname = remap_macro_filename (fname);
9055     return build_string_literal (strlen (fname) + 1, fname);
9056     }
9057 
9058   return build_string_literal (1, "");
9059 }
9060 
9061 /* Fold a call to __builtin_FUNCTION to a constant string.  */
9062 
9063 static inline tree
fold_builtin_FUNCTION()9064 fold_builtin_FUNCTION ()
9065 {
9066   const char *name = "";
9067 
9068   if (current_function_decl)
9069     name = lang_hooks.decl_printable_name (current_function_decl, 0);
9070 
9071   return build_string_literal (strlen (name) + 1, name);
9072 }
9073 
9074 /* Fold a call to __builtin_LINE to an integer constant.  */
9075 
9076 static inline tree
fold_builtin_LINE(location_t loc,tree type)9077 fold_builtin_LINE (location_t loc, tree type)
9078 {
9079   return build_int_cst (type, LOCATION_LINE (loc));
9080 }
9081 
9082 /* Fold a call to built-in function FNDECL with 0 arguments.
9083    This function returns NULL_TREE if no simplification was possible.  */
9084 
9085 static tree
fold_builtin_0(location_t loc,tree fndecl)9086 fold_builtin_0 (location_t loc, tree fndecl)
9087 {
9088   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9089   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9090   switch (fcode)
9091     {
9092     case BUILT_IN_FILE:
9093       return fold_builtin_FILE (loc);
9094 
9095     case BUILT_IN_FUNCTION:
9096       return fold_builtin_FUNCTION ();
9097 
9098     case BUILT_IN_LINE:
9099       return fold_builtin_LINE (loc, type);
9100 
9101     CASE_FLT_FN (BUILT_IN_INF):
9102     CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
9103     case BUILT_IN_INFD32:
9104     case BUILT_IN_INFD64:
9105     case BUILT_IN_INFD128:
9106       return fold_builtin_inf (loc, type, true);
9107 
9108     CASE_FLT_FN (BUILT_IN_HUGE_VAL):
9109     CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
9110       return fold_builtin_inf (loc, type, false);
9111 
9112     case BUILT_IN_CLASSIFY_TYPE:
9113       return fold_builtin_classify_type (NULL_TREE);
9114 
9115     default:
9116       break;
9117     }
9118   return NULL_TREE;
9119 }
9120 
9121 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
9122    This function returns NULL_TREE if no simplification was possible.  */
9123 
9124 static tree
fold_builtin_1(location_t loc,tree expr,tree fndecl,tree arg0)9125 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
9126 {
9127   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9128   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9129 
9130   if (TREE_CODE (arg0) == ERROR_MARK)
9131     return NULL_TREE;
9132 
9133   if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
9134     return ret;
9135 
9136   switch (fcode)
9137     {
9138     case BUILT_IN_CONSTANT_P:
9139       {
9140 	tree val = fold_builtin_constant_p (arg0);
9141 
9142 	/* Gimplification will pull the CALL_EXPR for the builtin out of
9143 	   an if condition.  When not optimizing, we'll not CSE it back.
9144 	   To avoid link error types of regressions, return false now.  */
9145 	if (!val && !optimize)
9146 	  val = integer_zero_node;
9147 
9148 	return val;
9149       }
9150 
9151     case BUILT_IN_CLASSIFY_TYPE:
9152       return fold_builtin_classify_type (arg0);
9153 
9154     case BUILT_IN_STRLEN:
9155       return fold_builtin_strlen (loc, expr, type, arg0);
9156 
9157     CASE_FLT_FN (BUILT_IN_FABS):
9158     CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9159     case BUILT_IN_FABSD32:
9160     case BUILT_IN_FABSD64:
9161     case BUILT_IN_FABSD128:
9162       return fold_builtin_fabs (loc, arg0, type);
9163 
9164     case BUILT_IN_ABS:
9165     case BUILT_IN_LABS:
9166     case BUILT_IN_LLABS:
9167     case BUILT_IN_IMAXABS:
9168       return fold_builtin_abs (loc, arg0, type);
9169 
9170     CASE_FLT_FN (BUILT_IN_CONJ):
9171       if (validate_arg (arg0, COMPLEX_TYPE)
9172 	&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9173 	return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
9174     break;
9175 
9176     CASE_FLT_FN (BUILT_IN_CREAL):
9177       if (validate_arg (arg0, COMPLEX_TYPE)
9178 	&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9179 	return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
9180     break;
9181 
9182     CASE_FLT_FN (BUILT_IN_CIMAG):
9183       if (validate_arg (arg0, COMPLEX_TYPE)
9184 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
9185 	return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
9186     break;
9187 
9188     CASE_FLT_FN (BUILT_IN_CARG):
9189       return fold_builtin_carg (loc, arg0, type);
9190 
9191     case BUILT_IN_ISASCII:
9192       return fold_builtin_isascii (loc, arg0);
9193 
9194     case BUILT_IN_TOASCII:
9195       return fold_builtin_toascii (loc, arg0);
9196 
9197     case BUILT_IN_ISDIGIT:
9198       return fold_builtin_isdigit (loc, arg0);
9199 
9200     CASE_FLT_FN (BUILT_IN_FINITE):
9201     case BUILT_IN_FINITED32:
9202     case BUILT_IN_FINITED64:
9203     case BUILT_IN_FINITED128:
9204     case BUILT_IN_ISFINITE:
9205       {
9206 	tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
9207 	if (ret)
9208 	  return ret;
9209 	return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9210       }
9211 
9212     CASE_FLT_FN (BUILT_IN_ISINF):
9213     case BUILT_IN_ISINFD32:
9214     case BUILT_IN_ISINFD64:
9215     case BUILT_IN_ISINFD128:
9216       {
9217 	tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
9218 	if (ret)
9219 	  return ret;
9220 	return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9221       }
9222 
9223     case BUILT_IN_ISNORMAL:
9224       return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
9225 
9226     case BUILT_IN_ISINF_SIGN:
9227       return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
9228 
9229     CASE_FLT_FN (BUILT_IN_ISNAN):
9230     case BUILT_IN_ISNAND32:
9231     case BUILT_IN_ISNAND64:
9232     case BUILT_IN_ISNAND128:
9233       return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
9234 
9235     case BUILT_IN_FREE:
9236       if (integer_zerop (arg0))
9237 	return build_empty_stmt (loc);
9238       break;
9239 
9240     default:
9241       break;
9242     }
9243 
9244   return NULL_TREE;
9245 
9246 }
9247 
9248 /* Folds a call EXPR (which may be null) to built-in function FNDECL
9249    with 2 arguments, ARG0 and ARG1.  This function returns NULL_TREE
9250    if no simplification was possible.  */
9251 
9252 static tree
fold_builtin_2(location_t loc,tree expr,tree fndecl,tree arg0,tree arg1)9253 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
9254 {
9255   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9256   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9257 
9258   if (TREE_CODE (arg0) == ERROR_MARK
9259       || TREE_CODE (arg1) == ERROR_MARK)
9260     return NULL_TREE;
9261 
9262   if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
9263     return ret;
9264 
9265   switch (fcode)
9266     {
9267     CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
9268     CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
9269       if (validate_arg (arg0, REAL_TYPE)
9270 	  && validate_arg (arg1, POINTER_TYPE))
9271 	return do_mpfr_lgamma_r (arg0, arg1, type);
9272     break;
9273 
9274     CASE_FLT_FN (BUILT_IN_FREXP):
9275       return fold_builtin_frexp (loc, arg0, arg1, type);
9276 
9277     CASE_FLT_FN (BUILT_IN_MODF):
9278       return fold_builtin_modf (loc, arg0, arg1, type);
9279 
9280     case BUILT_IN_STRSPN:
9281       return fold_builtin_strspn (loc, expr, arg0, arg1);
9282 
9283     case BUILT_IN_STRCSPN:
9284       return fold_builtin_strcspn (loc, expr, arg0, arg1);
9285 
9286     case BUILT_IN_STRPBRK:
9287       return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
9288 
9289     case BUILT_IN_EXPECT:
9290       return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
9291 
9292     case BUILT_IN_ISGREATER:
9293       return fold_builtin_unordered_cmp (loc, fndecl,
9294 					 arg0, arg1, UNLE_EXPR, LE_EXPR);
9295     case BUILT_IN_ISGREATEREQUAL:
9296       return fold_builtin_unordered_cmp (loc, fndecl,
9297 					 arg0, arg1, UNLT_EXPR, LT_EXPR);
9298     case BUILT_IN_ISLESS:
9299       return fold_builtin_unordered_cmp (loc, fndecl,
9300 					 arg0, arg1, UNGE_EXPR, GE_EXPR);
9301     case BUILT_IN_ISLESSEQUAL:
9302       return fold_builtin_unordered_cmp (loc, fndecl,
9303 					 arg0, arg1, UNGT_EXPR, GT_EXPR);
9304     case BUILT_IN_ISLESSGREATER:
9305       return fold_builtin_unordered_cmp (loc, fndecl,
9306 					 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
9307     case BUILT_IN_ISUNORDERED:
9308       return fold_builtin_unordered_cmp (loc, fndecl,
9309 					 arg0, arg1, UNORDERED_EXPR,
9310 					 NOP_EXPR);
9311 
9312       /* We do the folding for va_start in the expander.  */
9313     case BUILT_IN_VA_START:
9314       break;
9315 
9316     case BUILT_IN_OBJECT_SIZE:
9317       return fold_builtin_object_size (arg0, arg1);
9318 
9319     case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
9320       return fold_builtin_atomic_always_lock_free (arg0, arg1);
9321 
9322     case BUILT_IN_ATOMIC_IS_LOCK_FREE:
9323       return fold_builtin_atomic_is_lock_free (arg0, arg1);
9324 
9325     default:
9326       break;
9327     }
9328   return NULL_TREE;
9329 }
9330 
9331 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
9332    and ARG2.
9333    This function returns NULL_TREE if no simplification was possible.  */
9334 
9335 static tree
fold_builtin_3(location_t loc,tree fndecl,tree arg0,tree arg1,tree arg2)9336 fold_builtin_3 (location_t loc, tree fndecl,
9337 		tree arg0, tree arg1, tree arg2)
9338 {
9339   tree type = TREE_TYPE (TREE_TYPE (fndecl));
9340   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9341 
9342   if (TREE_CODE (arg0) == ERROR_MARK
9343       || TREE_CODE (arg1) == ERROR_MARK
9344       || TREE_CODE (arg2) == ERROR_MARK)
9345     return NULL_TREE;
9346 
9347   if (tree ret = fold_const_call (as_combined_fn (fcode), type,
9348 				  arg0, arg1, arg2))
9349     return ret;
9350 
9351   switch (fcode)
9352     {
9353 
9354     CASE_FLT_FN (BUILT_IN_SINCOS):
9355       return fold_builtin_sincos (loc, arg0, arg1, arg2);
9356 
9357     CASE_FLT_FN (BUILT_IN_REMQUO):
9358       if (validate_arg (arg0, REAL_TYPE)
9359 	  && validate_arg (arg1, REAL_TYPE)
9360 	  && validate_arg (arg2, POINTER_TYPE))
9361 	return do_mpfr_remquo (arg0, arg1, arg2);
9362     break;
9363 
9364     case BUILT_IN_MEMCMP:
9365       return fold_builtin_memcmp (loc, arg0, arg1, arg2);
9366 
9367     case BUILT_IN_EXPECT:
9368       return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
9369 
9370     case BUILT_IN_EXPECT_WITH_PROBABILITY:
9371       return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
9372 
9373     case BUILT_IN_ADD_OVERFLOW:
9374     case BUILT_IN_SUB_OVERFLOW:
9375     case BUILT_IN_MUL_OVERFLOW:
9376     case BUILT_IN_ADD_OVERFLOW_P:
9377     case BUILT_IN_SUB_OVERFLOW_P:
9378     case BUILT_IN_MUL_OVERFLOW_P:
9379     case BUILT_IN_SADD_OVERFLOW:
9380     case BUILT_IN_SADDL_OVERFLOW:
9381     case BUILT_IN_SADDLL_OVERFLOW:
9382     case BUILT_IN_SSUB_OVERFLOW:
9383     case BUILT_IN_SSUBL_OVERFLOW:
9384     case BUILT_IN_SSUBLL_OVERFLOW:
9385     case BUILT_IN_SMUL_OVERFLOW:
9386     case BUILT_IN_SMULL_OVERFLOW:
9387     case BUILT_IN_SMULLL_OVERFLOW:
9388     case BUILT_IN_UADD_OVERFLOW:
9389     case BUILT_IN_UADDL_OVERFLOW:
9390     case BUILT_IN_UADDLL_OVERFLOW:
9391     case BUILT_IN_USUB_OVERFLOW:
9392     case BUILT_IN_USUBL_OVERFLOW:
9393     case BUILT_IN_USUBLL_OVERFLOW:
9394     case BUILT_IN_UMUL_OVERFLOW:
9395     case BUILT_IN_UMULL_OVERFLOW:
9396     case BUILT_IN_UMULLL_OVERFLOW:
9397       return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
9398 
9399     default:
9400       break;
9401     }
9402   return NULL_TREE;
9403 }
9404 
9405 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
9406    ARGS is an array of NARGS arguments.  IGNORE is true if the result
9407    of the function call is ignored.  This function returns NULL_TREE
9408    if no simplification was possible.  */
9409 
9410 static tree
fold_builtin_n(location_t loc,tree expr,tree fndecl,tree * args,int nargs,bool)9411 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
9412 		int nargs, bool)
9413 {
9414   tree ret = NULL_TREE;
9415 
9416   switch (nargs)
9417     {
9418     case 0:
9419       ret = fold_builtin_0 (loc, fndecl);
9420       break;
9421     case 1:
9422       ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
9423       break;
9424     case 2:
9425       ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
9426       break;
9427     case 3:
9428       ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
9429       break;
9430     default:
9431       ret = fold_builtin_varargs (loc, fndecl, args, nargs);
9432       break;
9433     }
9434   if (ret)
9435     {
9436       ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9437       SET_EXPR_LOCATION (ret, loc);
9438       return ret;
9439     }
9440   return NULL_TREE;
9441 }
9442 
9443 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
9444    list ARGS along with N new arguments in NEWARGS.  SKIP is the number
9445    of arguments in ARGS to be omitted.  OLDNARGS is the number of
9446    elements in ARGS.  */
9447 
9448 static tree
rewrite_call_expr_valist(location_t loc,int oldnargs,tree * args,int skip,tree fndecl,int n,va_list newargs)9449 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
9450 			  int skip, tree fndecl, int n, va_list newargs)
9451 {
9452   int nargs = oldnargs - skip + n;
9453   tree *buffer;
9454 
9455   if (n > 0)
9456     {
9457       int i, j;
9458 
9459       buffer = XALLOCAVEC (tree, nargs);
9460       for (i = 0; i < n; i++)
9461 	buffer[i] = va_arg (newargs, tree);
9462       for (j = skip; j < oldnargs; j++, i++)
9463 	buffer[i] = args[j];
9464     }
9465   else
9466     buffer = args + skip;
9467 
9468   return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
9469 }
9470 
9471 /* Return true if FNDECL shouldn't be folded right now.
9472    If a built-in function has an inline attribute always_inline
9473    wrapper, defer folding it after always_inline functions have
9474    been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
9475    might not be performed.  */
9476 
9477 bool
avoid_folding_inline_builtin(tree fndecl)9478 avoid_folding_inline_builtin (tree fndecl)
9479 {
9480   return (DECL_DECLARED_INLINE_P (fndecl)
9481 	  && DECL_DISREGARD_INLINE_LIMITS (fndecl)
9482 	  && cfun
9483 	  && !cfun->always_inline_functions_inlined
9484 	  && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
9485 }
9486 
9487 /* A wrapper function for builtin folding that prevents warnings for
9488    "statement without effect" and the like, caused by removing the
9489    call node earlier than the warning is generated.  */
9490 
9491 tree
fold_call_expr(location_t loc,tree exp,bool ignore)9492 fold_call_expr (location_t loc, tree exp, bool ignore)
9493 {
9494   tree ret = NULL_TREE;
9495   tree fndecl = get_callee_fndecl (exp);
9496   if (fndecl && fndecl_built_in_p (fndecl)
9497       /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
9498 	 yet.  Defer folding until we see all the arguments
9499 	 (after inlining).  */
9500       && !CALL_EXPR_VA_ARG_PACK (exp))
9501     {
9502       int nargs = call_expr_nargs (exp);
9503 
9504       /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
9505 	 instead last argument is __builtin_va_arg_pack ().  Defer folding
9506 	 even in that case, until arguments are finalized.  */
9507       if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
9508 	{
9509 	  tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
9510 	  if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9511 	    return NULL_TREE;
9512 	}
9513 
9514       if (avoid_folding_inline_builtin (fndecl))
9515 	return NULL_TREE;
9516 
9517       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9518         return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
9519 				     CALL_EXPR_ARGP (exp), ignore);
9520       else
9521 	{
9522 	  tree *args = CALL_EXPR_ARGP (exp);
9523 	  ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
9524 	  if (ret)
9525 	    return ret;
9526 	}
9527     }
9528   return NULL_TREE;
9529 }
9530 
9531 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
9532    N arguments are passed in the array ARGARRAY.  Return a folded
9533    expression or NULL_TREE if no simplification was possible.  */
9534 
9535 tree
fold_builtin_call_array(location_t loc,tree,tree fn,int n,tree * argarray)9536 fold_builtin_call_array (location_t loc, tree,
9537 			 tree fn,
9538 			 int n,
9539 			 tree *argarray)
9540 {
9541   if (TREE_CODE (fn) != ADDR_EXPR)
9542     return NULL_TREE;
9543 
9544   tree fndecl = TREE_OPERAND (fn, 0);
9545   if (TREE_CODE (fndecl) == FUNCTION_DECL
9546       && fndecl_built_in_p (fndecl))
9547     {
9548       /* If last argument is __builtin_va_arg_pack (), arguments to this
9549 	 function are not finalized yet.  Defer folding until they are.  */
9550       if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
9551 	{
9552 	  tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
9553 	  if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
9554 	    return NULL_TREE;
9555 	}
9556       if (avoid_folding_inline_builtin (fndecl))
9557 	return NULL_TREE;
9558       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9559 	return targetm.fold_builtin (fndecl, n, argarray, false);
9560       else
9561 	return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
9562     }
9563 
9564   return NULL_TREE;
9565 }
9566 
9567 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
9568    along with N new arguments specified as the "..." parameters.  SKIP
9569    is the number of arguments in EXP to be omitted.  This function is used
9570    to do varargs-to-varargs transformations.  */
9571 
9572 static tree
rewrite_call_expr(location_t loc,tree exp,int skip,tree fndecl,int n,...)9573 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
9574 {
9575   va_list ap;
9576   tree t;
9577 
9578   va_start (ap, n);
9579   t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
9580 				CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
9581   va_end (ap);
9582 
9583   return t;
9584 }
9585 
9586 /* Validate a single argument ARG against a tree code CODE representing
9587    a type.  Return true when argument is valid.  */
9588 
9589 static bool
validate_arg(const_tree arg,enum tree_code code)9590 validate_arg (const_tree arg, enum tree_code code)
9591 {
9592   if (!arg)
9593     return false;
9594   else if (code == POINTER_TYPE)
9595     return POINTER_TYPE_P (TREE_TYPE (arg));
9596   else if (code == INTEGER_TYPE)
9597     return INTEGRAL_TYPE_P (TREE_TYPE (arg));
9598   return code == TREE_CODE (TREE_TYPE (arg));
9599 }
9600 
9601 /* This function validates the types of a function call argument list
9602    against a specified list of tree_codes.  If the last specifier is a 0,
9603    that represents an ellipses, otherwise the last specifier must be a
9604    VOID_TYPE.
9605 
9606    This is the GIMPLE version of validate_arglist.  Eventually we want to
9607    completely convert builtins.c to work from GIMPLEs and the tree based
9608    validate_arglist will then be removed.  */
9609 
9610 bool
validate_gimple_arglist(const gcall * call,...)9611 validate_gimple_arglist (const gcall *call, ...)
9612 {
9613   enum tree_code code;
9614   bool res = 0;
9615   va_list ap;
9616   const_tree arg;
9617   size_t i;
9618 
9619   va_start (ap, call);
9620   i = 0;
9621 
9622   do
9623     {
9624       code = (enum tree_code) va_arg (ap, int);
9625       switch (code)
9626 	{
9627 	case 0:
9628 	  /* This signifies an ellipses, any further arguments are all ok.  */
9629 	  res = true;
9630 	  goto end;
9631 	case VOID_TYPE:
9632 	  /* This signifies an endlink, if no arguments remain, return
9633 	     true, otherwise return false.  */
9634 	  res = (i == gimple_call_num_args (call));
9635 	  goto end;
9636 	default:
9637 	  /* If no parameters remain or the parameter's code does not
9638 	     match the specified code, return false.  Otherwise continue
9639 	     checking any remaining arguments.  */
9640 	  arg = gimple_call_arg (call, i++);
9641 	  if (!validate_arg (arg, code))
9642 	    goto end;
9643 	  break;
9644 	}
9645     }
9646   while (1);
9647 
9648   /* We need gotos here since we can only have one VA_CLOSE in a
9649      function.  */
9650  end: ;
9651   va_end (ap);
9652 
9653   return res;
9654 }
9655 
9656 /* Default target-specific builtin expander that does nothing.  */
9657 
9658 rtx
default_expand_builtin(tree exp ATTRIBUTE_UNUSED,rtx target ATTRIBUTE_UNUSED,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)9659 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
9660 			rtx target ATTRIBUTE_UNUSED,
9661 			rtx subtarget ATTRIBUTE_UNUSED,
9662 			machine_mode mode ATTRIBUTE_UNUSED,
9663 			int ignore ATTRIBUTE_UNUSED)
9664 {
9665   return NULL_RTX;
9666 }
9667 
9668 /* Returns true is EXP represents data that would potentially reside
9669    in a readonly section.  */
9670 
9671 bool
readonly_data_expr(tree exp)9672 readonly_data_expr (tree exp)
9673 {
9674   STRIP_NOPS (exp);
9675 
9676   if (TREE_CODE (exp) != ADDR_EXPR)
9677     return false;
9678 
9679   exp = get_base_address (TREE_OPERAND (exp, 0));
9680   if (!exp)
9681     return false;
9682 
9683   /* Make sure we call decl_readonly_section only for trees it
9684      can handle (since it returns true for everything it doesn't
9685      understand).  */
9686   if (TREE_CODE (exp) == STRING_CST
9687       || TREE_CODE (exp) == CONSTRUCTOR
9688       || (VAR_P (exp) && TREE_STATIC (exp)))
9689     return decl_readonly_section (exp, 0);
9690   else
9691     return false;
9692 }
9693 
9694 /* Simplify a call to the strpbrk builtin.  S1 and S2 are the arguments
9695    to the call, and TYPE is its return type.
9696 
9697    Return NULL_TREE if no simplification was possible, otherwise return the
9698    simplified form of the call as a tree.
9699 
9700    The simplified form may be a constant or other expression which
9701    computes the same value, but in a more efficient manner (including
9702    calls to other builtin functions).
9703 
9704    The call may contain arguments which need to be evaluated, but
9705    which are not useful to determine the result of the call.  In
9706    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
9707    COMPOUND_EXPR will be an argument which must be evaluated.
9708    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
9709    COMPOUND_EXPR in the chain will contain the tree for the simplified
9710    form of the builtin function call.  */
9711 
9712 static tree
fold_builtin_strpbrk(location_t loc,tree,tree s1,tree s2,tree type)9713 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
9714 {
9715   if (!validate_arg (s1, POINTER_TYPE)
9716       || !validate_arg (s2, POINTER_TYPE))
9717     return NULL_TREE;
9718 
9719   tree fn;
9720   const char *p1, *p2;
9721 
9722   p2 = c_getstr (s2);
9723   if (p2 == NULL)
9724     return NULL_TREE;
9725 
9726   p1 = c_getstr (s1);
9727   if (p1 != NULL)
9728     {
9729       const char *r = strpbrk (p1, p2);
9730       tree tem;
9731 
9732       if (r == NULL)
9733 	return build_int_cst (TREE_TYPE (s1), 0);
9734 
9735       /* Return an offset into the constant string argument.  */
9736       tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
9737       return fold_convert_loc (loc, type, tem);
9738     }
9739 
9740   if (p2[0] == '\0')
9741     /* strpbrk(x, "") == NULL.
9742        Evaluate and ignore s1 in case it had side-effects.  */
9743     return omit_one_operand_loc (loc, type, integer_zero_node, s1);
9744 
9745   if (p2[1] != '\0')
9746     return NULL_TREE;  /* Really call strpbrk.  */
9747 
9748   fn = builtin_decl_implicit (BUILT_IN_STRCHR);
9749   if (!fn)
9750     return NULL_TREE;
9751 
9752   /* New argument list transforming strpbrk(s1, s2) to
9753      strchr(s1, s2[0]).  */
9754   return build_call_expr_loc (loc, fn, 2, s1,
9755 			      build_int_cst (integer_type_node, p2[0]));
9756 }
9757 
9758 /* Simplify a call to the strspn builtin.  S1 and S2 are the arguments
9759    to the call.
9760 
9761    Return NULL_TREE if no simplification was possible, otherwise return the
9762    simplified form of the call as a tree.
9763 
9764    The simplified form may be a constant or other expression which
9765    computes the same value, but in a more efficient manner (including
9766    calls to other builtin functions).
9767 
9768    The call may contain arguments which need to be evaluated, but
9769    which are not useful to determine the result of the call.  In
9770    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
9771    COMPOUND_EXPR will be an argument which must be evaluated.
9772    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
9773    COMPOUND_EXPR in the chain will contain the tree for the simplified
9774    form of the builtin function call.  */
9775 
9776 static tree
fold_builtin_strspn(location_t loc,tree expr,tree s1,tree s2)9777 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
9778 {
9779   if (!validate_arg (s1, POINTER_TYPE)
9780       || !validate_arg (s2, POINTER_TYPE))
9781     return NULL_TREE;
9782 
9783   if (!check_nul_terminated_array (expr, s1)
9784       || !check_nul_terminated_array (expr, s2))
9785     return NULL_TREE;
9786 
9787   const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
9788 
9789   /* If either argument is "", return NULL_TREE.  */
9790   if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
9791     /* Evaluate and ignore both arguments in case either one has
9792        side-effects.  */
9793     return omit_two_operands_loc (loc, size_type_node, size_zero_node,
9794 				  s1, s2);
9795   return NULL_TREE;
9796 }
9797 
9798 /* Simplify a call to the strcspn builtin.  S1 and S2 are the arguments
9799    to the call.
9800 
9801    Return NULL_TREE if no simplification was possible, otherwise return the
9802    simplified form of the call as a tree.
9803 
9804    The simplified form may be a constant or other expression which
9805    computes the same value, but in a more efficient manner (including
9806    calls to other builtin functions).
9807 
9808    The call may contain arguments which need to be evaluated, but
9809    which are not useful to determine the result of the call.  In
9810    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
9811    COMPOUND_EXPR will be an argument which must be evaluated.
9812    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
9813    COMPOUND_EXPR in the chain will contain the tree for the simplified
9814    form of the builtin function call.  */
9815 
9816 static tree
fold_builtin_strcspn(location_t loc,tree expr,tree s1,tree s2)9817 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
9818 {
9819   if (!validate_arg (s1, POINTER_TYPE)
9820       || !validate_arg (s2, POINTER_TYPE))
9821     return NULL_TREE;
9822 
9823   if (!check_nul_terminated_array (expr, s1)
9824       || !check_nul_terminated_array (expr, s2))
9825     return NULL_TREE;
9826 
9827   /* If the first argument is "", return NULL_TREE.  */
9828   const char *p1 = c_getstr (s1);
9829   if (p1 && *p1 == '\0')
9830     {
9831       /* Evaluate and ignore argument s2 in case it has
9832 	 side-effects.  */
9833       return omit_one_operand_loc (loc, size_type_node,
9834 				   size_zero_node, s2);
9835     }
9836 
9837   /* If the second argument is "", return __builtin_strlen(s1).  */
9838   const char *p2 = c_getstr (s2);
9839   if (p2 && *p2 == '\0')
9840     {
9841       tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
9842 
9843       /* If the replacement _DECL isn't initialized, don't do the
9844 	 transformation.  */
9845       if (!fn)
9846 	return NULL_TREE;
9847 
9848       return build_call_expr_loc (loc, fn, 1, s1);
9849     }
9850   return NULL_TREE;
9851 }
9852 
9853 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
9854    produced.  False otherwise.  This is done so that we don't output the error
9855    or warning twice or three times.  */
9856 
9857 bool
fold_builtin_next_arg(tree exp,bool va_start_p)9858 fold_builtin_next_arg (tree exp, bool va_start_p)
9859 {
9860   tree fntype = TREE_TYPE (current_function_decl);
9861   int nargs = call_expr_nargs (exp);
9862   tree arg;
9863   /* There is good chance the current input_location points inside the
9864      definition of the va_start macro (perhaps on the token for
9865      builtin) in a system header, so warnings will not be emitted.
9866      Use the location in real source code.  */
9867   location_t current_location =
9868     linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
9869 					      NULL);
9870 
9871   if (!stdarg_p (fntype))
9872     {
9873       error ("%<va_start%> used in function with fixed arguments");
9874       return true;
9875     }
9876 
9877   if (va_start_p)
9878     {
9879       if (va_start_p && (nargs != 2))
9880 	{
9881 	  error ("wrong number of arguments to function %<va_start%>");
9882 	  return true;
9883 	}
9884       arg = CALL_EXPR_ARG (exp, 1);
9885     }
9886   /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
9887      when we checked the arguments and if needed issued a warning.  */
9888   else
9889     {
9890       if (nargs == 0)
9891 	{
9892 	  /* Evidently an out of date version of <stdarg.h>; can't validate
9893 	     va_start's second argument, but can still work as intended.  */
9894 	  warning_at (current_location,
9895 		      OPT_Wvarargs,
9896 		   "%<__builtin_next_arg%> called without an argument");
9897 	  return true;
9898 	}
9899       else if (nargs > 1)
9900 	{
9901 	  error ("wrong number of arguments to function %<__builtin_next_arg%>");
9902 	  return true;
9903 	}
9904       arg = CALL_EXPR_ARG (exp, 0);
9905     }
9906 
9907   if (TREE_CODE (arg) == SSA_NAME
9908       && SSA_NAME_VAR (arg))
9909     arg = SSA_NAME_VAR (arg);
9910 
9911   /* We destructively modify the call to be __builtin_va_start (ap, 0)
9912      or __builtin_next_arg (0) the first time we see it, after checking
9913      the arguments and if needed issuing a warning.  */
9914   if (!integer_zerop (arg))
9915     {
9916       tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9917 
9918       /* Strip off all nops for the sake of the comparison.  This
9919 	 is not quite the same as STRIP_NOPS.  It does more.
9920 	 We must also strip off INDIRECT_EXPR for C++ reference
9921 	 parameters.  */
9922       while (CONVERT_EXPR_P (arg)
9923 	     || TREE_CODE (arg) == INDIRECT_REF)
9924 	arg = TREE_OPERAND (arg, 0);
9925       if (arg != last_parm)
9926 	{
9927 	  /* FIXME: Sometimes with the tree optimizers we can get the
9928 	     not the last argument even though the user used the last
9929 	     argument.  We just warn and set the arg to be the last
9930 	     argument so that we will get wrong-code because of
9931 	     it.  */
9932 	  warning_at (current_location,
9933 		      OPT_Wvarargs,
9934 		      "second parameter of %<va_start%> not last named argument");
9935 	}
9936 
9937       /* Undefined by C99 7.15.1.4p4 (va_start):
9938          "If the parameter parmN is declared with the register storage
9939          class, with a function or array type, or with a type that is
9940          not compatible with the type that results after application of
9941          the default argument promotions, the behavior is undefined."
9942       */
9943       else if (DECL_REGISTER (arg))
9944 	{
9945 	  warning_at (current_location,
9946 		      OPT_Wvarargs,
9947 		      "undefined behavior when second parameter of "
9948 		      "%<va_start%> is declared with %<register%> storage");
9949 	}
9950 
9951       /* We want to verify the second parameter just once before the tree
9952 	 optimizers are run and then avoid keeping it in the tree,
9953 	 as otherwise we could warn even for correct code like:
9954 	 void foo (int i, ...)
9955 	 { va_list ap; i++; va_start (ap, i); va_end (ap); }  */
9956       if (va_start_p)
9957 	CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9958       else
9959 	CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9960     }
9961   return false;
9962 }
9963 
9964 
9965 /* Expand a call EXP to __builtin_object_size.  */
9966 
9967 static rtx
expand_builtin_object_size(tree exp)9968 expand_builtin_object_size (tree exp)
9969 {
9970   tree ost;
9971   int object_size_type;
9972   tree fndecl = get_callee_fndecl (exp);
9973 
9974   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9975     {
9976       error ("first argument of %qD must be a pointer, second integer constant",
9977 	     fndecl);
9978       expand_builtin_trap ();
9979       return const0_rtx;
9980     }
9981 
9982   ost = CALL_EXPR_ARG (exp, 1);
9983   STRIP_NOPS (ost);
9984 
9985   if (TREE_CODE (ost) != INTEGER_CST
9986       || tree_int_cst_sgn (ost) < 0
9987       || compare_tree_int (ost, 3) > 0)
9988     {
9989       error ("last argument of %qD is not integer constant between 0 and 3",
9990 	      fndecl);
9991       expand_builtin_trap ();
9992       return const0_rtx;
9993     }
9994 
9995   object_size_type = tree_to_shwi (ost);
9996 
9997   return object_size_type < 2 ? constm1_rtx : const0_rtx;
9998 }
9999 
10000 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
10001    FCODE is the BUILT_IN_* to use.
10002    Return NULL_RTX if we failed; the caller should emit a normal call,
10003    otherwise try to get the result in TARGET, if convenient (and in
10004    mode MODE if that's convenient).  */
10005 
10006 static rtx
expand_builtin_memory_chk(tree exp,rtx target,machine_mode mode,enum built_in_function fcode)10007 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
10008 			   enum built_in_function fcode)
10009 {
10010   if (!validate_arglist (exp,
10011 			 POINTER_TYPE,
10012 			 fcode == BUILT_IN_MEMSET_CHK
10013 			 ? INTEGER_TYPE : POINTER_TYPE,
10014 			 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
10015     return NULL_RTX;
10016 
10017   tree dest = CALL_EXPR_ARG (exp, 0);
10018   tree src = CALL_EXPR_ARG (exp, 1);
10019   tree len = CALL_EXPR_ARG (exp, 2);
10020   tree size = CALL_EXPR_ARG (exp, 3);
10021 
10022   /* FIXME: Set access mode to write only for memset et al.  */
10023   bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
10024 				/*srcstr=*/NULL_TREE, size, access_read_write);
10025 
10026   if (!tree_fits_uhwi_p (size))
10027     return NULL_RTX;
10028 
10029   if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
10030     {
10031       /* Avoid transforming the checking call to an ordinary one when
10032 	 an overflow has been detected or when the call couldn't be
10033 	 validated because the size is not constant.  */
10034       if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
10035 	return NULL_RTX;
10036 
10037       tree fn = NULL_TREE;
10038       /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
10039 	 mem{cpy,pcpy,move,set} is available.  */
10040       switch (fcode)
10041 	{
10042 	case BUILT_IN_MEMCPY_CHK:
10043 	  fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
10044 	  break;
10045 	case BUILT_IN_MEMPCPY_CHK:
10046 	  fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
10047 	  break;
10048 	case BUILT_IN_MEMMOVE_CHK:
10049 	  fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
10050 	  break;
10051 	case BUILT_IN_MEMSET_CHK:
10052 	  fn = builtin_decl_explicit (BUILT_IN_MEMSET);
10053 	  break;
10054 	default:
10055 	  break;
10056 	}
10057 
10058       if (! fn)
10059 	return NULL_RTX;
10060 
10061       fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
10062       gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10063       CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10064       return expand_expr (fn, target, mode, EXPAND_NORMAL);
10065     }
10066   else if (fcode == BUILT_IN_MEMSET_CHK)
10067     return NULL_RTX;
10068   else
10069     {
10070       unsigned int dest_align = get_pointer_alignment (dest);
10071 
10072       /* If DEST is not a pointer type, call the normal function.  */
10073       if (dest_align == 0)
10074 	return NULL_RTX;
10075 
10076       /* If SRC and DEST are the same (and not volatile), do nothing.  */
10077       if (operand_equal_p (src, dest, 0))
10078 	{
10079 	  tree expr;
10080 
10081 	  if (fcode != BUILT_IN_MEMPCPY_CHK)
10082 	    {
10083 	      /* Evaluate and ignore LEN in case it has side-effects.  */
10084 	      expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
10085 	      return expand_expr (dest, target, mode, EXPAND_NORMAL);
10086 	    }
10087 
10088 	  expr = fold_build_pointer_plus (dest, len);
10089 	  return expand_expr (expr, target, mode, EXPAND_NORMAL);
10090 	}
10091 
10092       /* __memmove_chk special case.  */
10093       if (fcode == BUILT_IN_MEMMOVE_CHK)
10094 	{
10095 	  unsigned int src_align = get_pointer_alignment (src);
10096 
10097 	  if (src_align == 0)
10098 	    return NULL_RTX;
10099 
10100 	  /* If src is categorized for a readonly section we can use
10101 	     normal __memcpy_chk.  */
10102 	  if (readonly_data_expr (src))
10103 	    {
10104 	      tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
10105 	      if (!fn)
10106 		return NULL_RTX;
10107 	      fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
10108 					  dest, src, len, size);
10109 	      gcc_assert (TREE_CODE (fn) == CALL_EXPR);
10110 	      CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
10111 	      return expand_expr (fn, target, mode, EXPAND_NORMAL);
10112 	    }
10113 	}
10114       return NULL_RTX;
10115     }
10116 }
10117 
10118 /* Emit warning if a buffer overflow is detected at compile time.  */
10119 
10120 static void
maybe_emit_chk_warning(tree exp,enum built_in_function fcode)10121 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
10122 {
10123   /* The source string.  */
10124   tree srcstr = NULL_TREE;
10125   /* The size of the destination object returned by __builtin_object_size.  */
10126   tree objsize = NULL_TREE;
10127   /* The string that is being concatenated with (as in __strcat_chk)
10128      or null if it isn't.  */
10129   tree catstr = NULL_TREE;
10130   /* The maximum length of the source sequence in a bounded operation
10131      (such as __strncat_chk) or null if the operation isn't bounded
10132      (such as __strcat_chk).  */
10133   tree maxread = NULL_TREE;
10134   /* The exact size of the access (such as in __strncpy_chk).  */
10135   tree size = NULL_TREE;
10136   /* The access by the function that's checked.  Except for snprintf
10137      both writing and reading is checked.  */
10138   access_mode mode = access_read_write;
10139 
10140   switch (fcode)
10141     {
10142     case BUILT_IN_STRCPY_CHK:
10143     case BUILT_IN_STPCPY_CHK:
10144       srcstr = CALL_EXPR_ARG (exp, 1);
10145       objsize = CALL_EXPR_ARG (exp, 2);
10146       break;
10147 
10148     case BUILT_IN_STRCAT_CHK:
10149       /* For __strcat_chk the warning will be emitted only if overflowing
10150 	 by at least strlen (dest) + 1 bytes.  */
10151       catstr = CALL_EXPR_ARG (exp, 0);
10152       srcstr = CALL_EXPR_ARG (exp, 1);
10153       objsize = CALL_EXPR_ARG (exp, 2);
10154       break;
10155 
10156     case BUILT_IN_STRNCAT_CHK:
10157       catstr = CALL_EXPR_ARG (exp, 0);
10158       srcstr = CALL_EXPR_ARG (exp, 1);
10159       maxread = CALL_EXPR_ARG (exp, 2);
10160       objsize = CALL_EXPR_ARG (exp, 3);
10161       break;
10162 
10163     case BUILT_IN_STRNCPY_CHK:
10164     case BUILT_IN_STPNCPY_CHK:
10165       srcstr = CALL_EXPR_ARG (exp, 1);
10166       size = CALL_EXPR_ARG (exp, 2);
10167       objsize = CALL_EXPR_ARG (exp, 3);
10168       break;
10169 
10170     case BUILT_IN_SNPRINTF_CHK:
10171     case BUILT_IN_VSNPRINTF_CHK:
10172       maxread = CALL_EXPR_ARG (exp, 1);
10173       objsize = CALL_EXPR_ARG (exp, 3);
10174       /* The only checked access the write to the destination.  */
10175       mode = access_write_only;
10176       break;
10177     default:
10178       gcc_unreachable ();
10179     }
10180 
10181   if (catstr && maxread)
10182     {
10183       /* Check __strncat_chk.  There is no way to determine the length
10184 	 of the string to which the source string is being appended so
10185 	 just warn when the length of the source string is not known.  */
10186       check_strncat_sizes (exp, objsize);
10187       return;
10188     }
10189 
10190   check_access (exp, size, maxread, srcstr, objsize, mode);
10191 }
10192 
10193 /* Emit warning if a buffer overflow is detected at compile time
10194    in __sprintf_chk/__vsprintf_chk calls.  */
10195 
10196 static void
maybe_emit_sprintf_chk_warning(tree exp,enum built_in_function fcode)10197 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
10198 {
10199   tree size, len, fmt;
10200   const char *fmt_str;
10201   int nargs = call_expr_nargs (exp);
10202 
10203   /* Verify the required arguments in the original call.  */
10204 
10205   if (nargs < 4)
10206     return;
10207   size = CALL_EXPR_ARG (exp, 2);
10208   fmt = CALL_EXPR_ARG (exp, 3);
10209 
10210   if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
10211     return;
10212 
10213   /* Check whether the format is a literal string constant.  */
10214   fmt_str = c_getstr (fmt);
10215   if (fmt_str == NULL)
10216     return;
10217 
10218   if (!init_target_chars ())
10219     return;
10220 
10221   /* If the format doesn't contain % args or %%, we know its size.  */
10222   if (strchr (fmt_str, target_percent) == 0)
10223     len = build_int_cstu (size_type_node, strlen (fmt_str));
10224   /* If the format is "%s" and first ... argument is a string literal,
10225      we know it too.  */
10226   else if (fcode == BUILT_IN_SPRINTF_CHK
10227 	   && strcmp (fmt_str, target_percent_s) == 0)
10228     {
10229       tree arg;
10230 
10231       if (nargs < 5)
10232 	return;
10233       arg = CALL_EXPR_ARG (exp, 4);
10234       if (! POINTER_TYPE_P (TREE_TYPE (arg)))
10235 	return;
10236 
10237       len = c_strlen (arg, 1);
10238       if (!len || ! tree_fits_uhwi_p (len))
10239 	return;
10240     }
10241   else
10242     return;
10243 
10244   /* Add one for the terminating nul.  */
10245   len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
10246 
10247   check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
10248 		access_write_only);
10249 }
10250 
10251 /* Fold a call to __builtin_object_size with arguments PTR and OST,
10252    if possible.  */
10253 
10254 static tree
fold_builtin_object_size(tree ptr,tree ost)10255 fold_builtin_object_size (tree ptr, tree ost)
10256 {
10257   unsigned HOST_WIDE_INT bytes;
10258   int object_size_type;
10259 
10260   if (!validate_arg (ptr, POINTER_TYPE)
10261       || !validate_arg (ost, INTEGER_TYPE))
10262     return NULL_TREE;
10263 
10264   STRIP_NOPS (ost);
10265 
10266   if (TREE_CODE (ost) != INTEGER_CST
10267       || tree_int_cst_sgn (ost) < 0
10268       || compare_tree_int (ost, 3) > 0)
10269     return NULL_TREE;
10270 
10271   object_size_type = tree_to_shwi (ost);
10272 
10273   /* __builtin_object_size doesn't evaluate side-effects in its arguments;
10274      if there are any side-effects, it returns (size_t) -1 for types 0 and 1
10275      and (size_t) 0 for types 2 and 3.  */
10276   if (TREE_SIDE_EFFECTS (ptr))
10277     return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
10278 
10279   if (TREE_CODE (ptr) == ADDR_EXPR)
10280     {
10281       compute_builtin_object_size (ptr, object_size_type, &bytes);
10282       if (wi::fits_to_tree_p (bytes, size_type_node))
10283 	return build_int_cstu (size_type_node, bytes);
10284     }
10285   else if (TREE_CODE (ptr) == SSA_NAME)
10286     {
10287       /* If object size is not known yet, delay folding until
10288        later.  Maybe subsequent passes will help determining
10289        it.  */
10290       if (compute_builtin_object_size (ptr, object_size_type, &bytes)
10291 	  && wi::fits_to_tree_p (bytes, size_type_node))
10292 	return build_int_cstu (size_type_node, bytes);
10293     }
10294 
10295   return NULL_TREE;
10296 }
10297 
10298 /* Builtins with folding operations that operate on "..." arguments
10299    need special handling; we need to store the arguments in a convenient
10300    data structure before attempting any folding.  Fortunately there are
10301    only a few builtins that fall into this category.  FNDECL is the
10302    function, EXP is the CALL_EXPR for the call.  */
10303 
10304 static tree
fold_builtin_varargs(location_t loc,tree fndecl,tree * args,int nargs)10305 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
10306 {
10307   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
10308   tree ret = NULL_TREE;
10309 
10310   switch (fcode)
10311     {
10312     case BUILT_IN_FPCLASSIFY:
10313       ret = fold_builtin_fpclassify (loc, args, nargs);
10314       break;
10315 
10316     default:
10317       break;
10318     }
10319   if (ret)
10320     {
10321       ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
10322       SET_EXPR_LOCATION (ret, loc);
10323       suppress_warning (ret);
10324       return ret;
10325     }
10326   return NULL_TREE;
10327 }
10328 
10329 /* Initialize format string characters in the target charset.  */
10330 
10331 bool
init_target_chars(void)10332 init_target_chars (void)
10333 {
10334   static bool init;
10335   if (!init)
10336     {
10337       target_newline = lang_hooks.to_target_charset ('\n');
10338       target_percent = lang_hooks.to_target_charset ('%');
10339       target_c = lang_hooks.to_target_charset ('c');
10340       target_s = lang_hooks.to_target_charset ('s');
10341       if (target_newline == 0 || target_percent == 0 || target_c == 0
10342 	  || target_s == 0)
10343 	return false;
10344 
10345       target_percent_c[0] = target_percent;
10346       target_percent_c[1] = target_c;
10347       target_percent_c[2] = '\0';
10348 
10349       target_percent_s[0] = target_percent;
10350       target_percent_s[1] = target_s;
10351       target_percent_s[2] = '\0';
10352 
10353       target_percent_s_newline[0] = target_percent;
10354       target_percent_s_newline[1] = target_s;
10355       target_percent_s_newline[2] = target_newline;
10356       target_percent_s_newline[3] = '\0';
10357 
10358       init = true;
10359     }
10360   return true;
10361 }
10362 
10363 /* Helper function for do_mpfr_arg*().  Ensure M is a normal number
10364    and no overflow/underflow occurred.  INEXACT is true if M was not
10365    exactly calculated.  TYPE is the tree type for the result.  This
10366    function assumes that you cleared the MPFR flags and then
10367    calculated M to see if anything subsequently set a flag prior to
10368    entering this function.  Return NULL_TREE if any checks fail.  */
10369 
10370 static tree
do_mpfr_ckconv(mpfr_srcptr m,tree type,int inexact)10371 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
10372 {
10373   /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10374      overflow/underflow occurred.  If -frounding-math, proceed iff the
10375      result of calling FUNC was exact.  */
10376   if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
10377       && (!flag_rounding_math || !inexact))
10378     {
10379       REAL_VALUE_TYPE rr;
10380 
10381       real_from_mpfr (&rr, m, type, MPFR_RNDN);
10382       /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
10383 	 check for overflow/underflow.  If the REAL_VALUE_TYPE is zero
10384 	 but the mpft_t is not, then we underflowed in the
10385 	 conversion.  */
10386       if (real_isfinite (&rr)
10387 	  && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
10388         {
10389 	  REAL_VALUE_TYPE rmode;
10390 
10391 	  real_convert (&rmode, TYPE_MODE (type), &rr);
10392 	  /* Proceed iff the specified mode can hold the value.  */
10393 	  if (real_identical (&rmode, &rr))
10394 	    return build_real (type, rmode);
10395 	}
10396     }
10397   return NULL_TREE;
10398 }
10399 
10400 /* Helper function for do_mpc_arg*().  Ensure M is a normal complex
10401    number and no overflow/underflow occurred.  INEXACT is true if M
10402    was not exactly calculated.  TYPE is the tree type for the result.
10403    This function assumes that you cleared the MPFR flags and then
10404    calculated M to see if anything subsequently set a flag prior to
10405    entering this function.  Return NULL_TREE if any checks fail, if
10406    FORCE_CONVERT is true, then bypass the checks.  */
10407 
10408 static tree
do_mpc_ckconv(mpc_srcptr m,tree type,int inexact,int force_convert)10409 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
10410 {
10411   /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
10412      overflow/underflow occurred.  If -frounding-math, proceed iff the
10413      result of calling FUNC was exact.  */
10414   if (force_convert
10415       || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
10416 	  && !mpfr_overflow_p () && !mpfr_underflow_p ()
10417 	  && (!flag_rounding_math || !inexact)))
10418     {
10419       REAL_VALUE_TYPE re, im;
10420 
10421       real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
10422       real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
10423       /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
10424 	 check for overflow/underflow.  If the REAL_VALUE_TYPE is zero
10425 	 but the mpft_t is not, then we underflowed in the
10426 	 conversion.  */
10427       if (force_convert
10428 	  || (real_isfinite (&re) && real_isfinite (&im)
10429 	      && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
10430 	      && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
10431         {
10432 	  REAL_VALUE_TYPE re_mode, im_mode;
10433 
10434 	  real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
10435 	  real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
10436 	  /* Proceed iff the specified mode can hold the value.  */
10437 	  if (force_convert
10438 	      || (real_identical (&re_mode, &re)
10439 		  && real_identical (&im_mode, &im)))
10440 	    return build_complex (type, build_real (TREE_TYPE (type), re_mode),
10441 				  build_real (TREE_TYPE (type), im_mode));
10442 	}
10443     }
10444   return NULL_TREE;
10445 }
10446 
10447 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
10448    the pointer *(ARG_QUO) and return the result.  The type is taken
10449    from the type of ARG0 and is used for setting the precision of the
10450    calculation and results.  */
10451 
10452 static tree
do_mpfr_remquo(tree arg0,tree arg1,tree arg_quo)10453 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
10454 {
10455   tree const type = TREE_TYPE (arg0);
10456   tree result = NULL_TREE;
10457 
10458   STRIP_NOPS (arg0);
10459   STRIP_NOPS (arg1);
10460 
10461   /* To proceed, MPFR must exactly represent the target floating point
10462      format, which only happens when the target base equals two.  */
10463   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10464       && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
10465       && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
10466     {
10467       const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
10468       const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
10469 
10470       if (real_isfinite (ra0) && real_isfinite (ra1))
10471         {
10472 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10473 	  const int prec = fmt->p;
10474 	  const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
10475 	  tree result_rem;
10476 	  long integer_quo;
10477 	  mpfr_t m0, m1;
10478 
10479 	  mpfr_inits2 (prec, m0, m1, NULL);
10480 	  mpfr_from_real (m0, ra0, MPFR_RNDN);
10481 	  mpfr_from_real (m1, ra1, MPFR_RNDN);
10482 	  mpfr_clear_flags ();
10483 	  mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
10484 	  /* Remquo is independent of the rounding mode, so pass
10485 	     inexact=0 to do_mpfr_ckconv().  */
10486 	  result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
10487 	  mpfr_clears (m0, m1, NULL);
10488 	  if (result_rem)
10489 	    {
10490 	      /* MPFR calculates quo in the host's long so it may
10491 		 return more bits in quo than the target int can hold
10492 		 if sizeof(host long) > sizeof(target int).  This can
10493 		 happen even for native compilers in LP64 mode.  In
10494 		 these cases, modulo the quo value with the largest
10495 		 number that the target int can hold while leaving one
10496 		 bit for the sign.  */
10497 	      if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
10498 		integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
10499 
10500 	      /* Dereference the quo pointer argument.  */
10501 	      arg_quo = build_fold_indirect_ref (arg_quo);
10502 	      /* Proceed iff a valid pointer type was passed in.  */
10503 	      if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
10504 	        {
10505 		  /* Set the value. */
10506 		  tree result_quo
10507 		    = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
10508 				   build_int_cst (TREE_TYPE (arg_quo),
10509 						  integer_quo));
10510 		  TREE_SIDE_EFFECTS (result_quo) = 1;
10511 		  /* Combine the quo assignment with the rem.  */
10512 		  result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10513 						    result_quo, result_rem));
10514 		}
10515 	    }
10516 	}
10517     }
10518   return result;
10519 }
10520 
10521 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
10522    resulting value as a tree with type TYPE.  The mpfr precision is
10523    set to the precision of TYPE.  We assume that this mpfr function
10524    returns zero if the result could be calculated exactly within the
10525    requested precision.  In addition, the integer pointer represented
10526    by ARG_SG will be dereferenced and set to the appropriate signgam
10527    (-1,1) value.  */
10528 
10529 static tree
do_mpfr_lgamma_r(tree arg,tree arg_sg,tree type)10530 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
10531 {
10532   tree result = NULL_TREE;
10533 
10534   STRIP_NOPS (arg);
10535 
10536   /* To proceed, MPFR must exactly represent the target floating point
10537      format, which only happens when the target base equals two.  Also
10538      verify ARG is a constant and that ARG_SG is an int pointer.  */
10539   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
10540       && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
10541       && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
10542       && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
10543     {
10544       const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
10545 
10546       /* In addition to NaN and Inf, the argument cannot be zero or a
10547 	 negative integer.  */
10548       if (real_isfinite (ra)
10549 	  && ra->cl != rvc_zero
10550 	  && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
10551         {
10552 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
10553 	  const int prec = fmt->p;
10554 	  const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
10555 	  int inexact, sg;
10556 	  mpfr_t m;
10557 	  tree result_lg;
10558 
10559 	  mpfr_init2 (m, prec);
10560 	  mpfr_from_real (m, ra, MPFR_RNDN);
10561 	  mpfr_clear_flags ();
10562 	  inexact = mpfr_lgamma (m, &sg, m, rnd);
10563 	  result_lg = do_mpfr_ckconv (m, type, inexact);
10564 	  mpfr_clear (m);
10565 	  if (result_lg)
10566 	    {
10567 	      tree result_sg;
10568 
10569 	      /* Dereference the arg_sg pointer argument.  */
10570 	      arg_sg = build_fold_indirect_ref (arg_sg);
10571 	      /* Assign the signgam value into *arg_sg. */
10572 	      result_sg = fold_build2 (MODIFY_EXPR,
10573 				       TREE_TYPE (arg_sg), arg_sg,
10574 				       build_int_cst (TREE_TYPE (arg_sg), sg));
10575 	      TREE_SIDE_EFFECTS (result_sg) = 1;
10576 	      /* Combine the signgam assignment with the lgamma result.  */
10577 	      result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
10578 						result_sg, result_lg));
10579 	    }
10580 	}
10581     }
10582 
10583   return result;
10584 }
10585 
10586 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
10587    mpc function FUNC on it and return the resulting value as a tree
10588    with type TYPE.  The mpfr precision is set to the precision of
10589    TYPE.  We assume that function FUNC returns zero if the result
10590    could be calculated exactly within the requested precision.  If
10591    DO_NONFINITE is true, then fold expressions containing Inf or NaN
10592    in the arguments and/or results.  */
10593 
10594 tree
do_mpc_arg2(tree arg0,tree arg1,tree type,int do_nonfinite,int (* func)(mpc_ptr,mpc_srcptr,mpc_srcptr,mpc_rnd_t))10595 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
10596 	     int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
10597 {
10598   tree result = NULL_TREE;
10599 
10600   STRIP_NOPS (arg0);
10601   STRIP_NOPS (arg1);
10602 
10603   /* To proceed, MPFR must exactly represent the target floating point
10604      format, which only happens when the target base equals two.  */
10605   if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
10606       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
10607       && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
10608       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
10609       && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
10610     {
10611       const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
10612       const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
10613       const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
10614       const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
10615 
10616       if (do_nonfinite
10617 	  || (real_isfinite (re0) && real_isfinite (im0)
10618 	      && real_isfinite (re1) && real_isfinite (im1)))
10619         {
10620 	  const struct real_format *const fmt =
10621 	    REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
10622 	  const int prec = fmt->p;
10623 	  const mpfr_rnd_t rnd = fmt->round_towards_zero
10624 				 ? MPFR_RNDZ : MPFR_RNDN;
10625 	  const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
10626 	  int inexact;
10627 	  mpc_t m0, m1;
10628 
10629 	  mpc_init2 (m0, prec);
10630 	  mpc_init2 (m1, prec);
10631 	  mpfr_from_real (mpc_realref (m0), re0, rnd);
10632 	  mpfr_from_real (mpc_imagref (m0), im0, rnd);
10633 	  mpfr_from_real (mpc_realref (m1), re1, rnd);
10634 	  mpfr_from_real (mpc_imagref (m1), im1, rnd);
10635 	  mpfr_clear_flags ();
10636 	  inexact = func (m0, m0, m1, crnd);
10637 	  result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
10638 	  mpc_clear (m0);
10639 	  mpc_clear (m1);
10640 	}
10641     }
10642 
10643   return result;
10644 }
10645 
10646 /* A wrapper function for builtin folding that prevents warnings for
10647    "statement without effect" and the like, caused by removing the
10648    call node earlier than the warning is generated.  */
10649 
10650 tree
fold_call_stmt(gcall * stmt,bool ignore)10651 fold_call_stmt (gcall *stmt, bool ignore)
10652 {
10653   tree ret = NULL_TREE;
10654   tree fndecl = gimple_call_fndecl (stmt);
10655   location_t loc = gimple_location (stmt);
10656   if (fndecl && fndecl_built_in_p (fndecl)
10657       && !gimple_call_va_arg_pack_p (stmt))
10658     {
10659       int nargs = gimple_call_num_args (stmt);
10660       tree *args = (nargs > 0
10661 		    ? gimple_call_arg_ptr (stmt, 0)
10662 		    : &error_mark_node);
10663 
10664       if (avoid_folding_inline_builtin (fndecl))
10665 	return NULL_TREE;
10666       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
10667         {
10668 	  return targetm.fold_builtin (fndecl, nargs, args, ignore);
10669         }
10670       else
10671 	{
10672 	  ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
10673 	  if (ret)
10674 	    {
10675 	      /* Propagate location information from original call to
10676 		 expansion of builtin.  Otherwise things like
10677 		 maybe_emit_chk_warning, that operate on the expansion
10678 		 of a builtin, will use the wrong location information.  */
10679 	      if (gimple_has_location (stmt))
10680                 {
10681 		  tree realret = ret;
10682 		  if (TREE_CODE (ret) == NOP_EXPR)
10683 		    realret = TREE_OPERAND (ret, 0);
10684 		  if (CAN_HAVE_LOCATION_P (realret)
10685 		      && !EXPR_HAS_LOCATION (realret))
10686 		    SET_EXPR_LOCATION (realret, loc);
10687                   return realret;
10688                 }
10689 	      return ret;
10690 	    }
10691 	}
10692     }
10693   return NULL_TREE;
10694 }
10695 
10696 /* Look up the function in builtin_decl that corresponds to DECL
10697    and set ASMSPEC as its user assembler name.  DECL must be a
10698    function decl that declares a builtin.  */
10699 
10700 void
set_builtin_user_assembler_name(tree decl,const char * asmspec)10701 set_builtin_user_assembler_name (tree decl, const char *asmspec)
10702 {
10703   gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
10704 	      && asmspec != 0);
10705 
10706   tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
10707   set_user_assembler_name (builtin, asmspec);
10708 
10709   if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
10710       && INT_TYPE_SIZE < BITS_PER_WORD)
10711     {
10712       scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
10713       set_user_assembler_libfunc ("ffs", asmspec);
10714       set_optab_libfunc (ffs_optab, mode, "ffs");
10715     }
10716 }
10717 
10718 /* Return true if DECL is a builtin that expands to a constant or similarly
10719    simple code.  */
10720 bool
is_simple_builtin(tree decl)10721 is_simple_builtin (tree decl)
10722 {
10723   if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
10724     switch (DECL_FUNCTION_CODE (decl))
10725       {
10726 	/* Builtins that expand to constants.  */
10727       case BUILT_IN_CONSTANT_P:
10728       case BUILT_IN_EXPECT:
10729       case BUILT_IN_OBJECT_SIZE:
10730       case BUILT_IN_UNREACHABLE:
10731 	/* Simple register moves or loads from stack.  */
10732       case BUILT_IN_ASSUME_ALIGNED:
10733       case BUILT_IN_RETURN_ADDRESS:
10734       case BUILT_IN_EXTRACT_RETURN_ADDR:
10735       case BUILT_IN_FROB_RETURN_ADDR:
10736       case BUILT_IN_RETURN:
10737       case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
10738       case BUILT_IN_FRAME_ADDRESS:
10739       case BUILT_IN_VA_END:
10740       case BUILT_IN_STACK_SAVE:
10741       case BUILT_IN_STACK_RESTORE:
10742       case BUILT_IN_DWARF_CFA:
10743 	/* Exception state returns or moves registers around.  */
10744       case BUILT_IN_EH_FILTER:
10745       case BUILT_IN_EH_POINTER:
10746       case BUILT_IN_EH_COPY_VALUES:
10747 	return true;
10748 
10749       default:
10750 	return false;
10751       }
10752 
10753   return false;
10754 }
10755 
10756 /* Return true if DECL is a builtin that is not expensive, i.e., they are
10757    most probably expanded inline into reasonably simple code.  This is a
10758    superset of is_simple_builtin.  */
10759 bool
is_inexpensive_builtin(tree decl)10760 is_inexpensive_builtin (tree decl)
10761 {
10762   if (!decl)
10763     return false;
10764   else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
10765     return true;
10766   else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10767     switch (DECL_FUNCTION_CODE (decl))
10768       {
10769       case BUILT_IN_ABS:
10770       CASE_BUILT_IN_ALLOCA:
10771       case BUILT_IN_BSWAP16:
10772       case BUILT_IN_BSWAP32:
10773       case BUILT_IN_BSWAP64:
10774       case BUILT_IN_BSWAP128:
10775       case BUILT_IN_CLZ:
10776       case BUILT_IN_CLZIMAX:
10777       case BUILT_IN_CLZL:
10778       case BUILT_IN_CLZLL:
10779       case BUILT_IN_CTZ:
10780       case BUILT_IN_CTZIMAX:
10781       case BUILT_IN_CTZL:
10782       case BUILT_IN_CTZLL:
10783       case BUILT_IN_FFS:
10784       case BUILT_IN_FFSIMAX:
10785       case BUILT_IN_FFSL:
10786       case BUILT_IN_FFSLL:
10787       case BUILT_IN_IMAXABS:
10788       case BUILT_IN_FINITE:
10789       case BUILT_IN_FINITEF:
10790       case BUILT_IN_FINITEL:
10791       case BUILT_IN_FINITED32:
10792       case BUILT_IN_FINITED64:
10793       case BUILT_IN_FINITED128:
10794       case BUILT_IN_FPCLASSIFY:
10795       case BUILT_IN_ISFINITE:
10796       case BUILT_IN_ISINF_SIGN:
10797       case BUILT_IN_ISINF:
10798       case BUILT_IN_ISINFF:
10799       case BUILT_IN_ISINFL:
10800       case BUILT_IN_ISINFD32:
10801       case BUILT_IN_ISINFD64:
10802       case BUILT_IN_ISINFD128:
10803       case BUILT_IN_ISNAN:
10804       case BUILT_IN_ISNANF:
10805       case BUILT_IN_ISNANL:
10806       case BUILT_IN_ISNAND32:
10807       case BUILT_IN_ISNAND64:
10808       case BUILT_IN_ISNAND128:
10809       case BUILT_IN_ISNORMAL:
10810       case BUILT_IN_ISGREATER:
10811       case BUILT_IN_ISGREATEREQUAL:
10812       case BUILT_IN_ISLESS:
10813       case BUILT_IN_ISLESSEQUAL:
10814       case BUILT_IN_ISLESSGREATER:
10815       case BUILT_IN_ISUNORDERED:
10816       case BUILT_IN_VA_ARG_PACK:
10817       case BUILT_IN_VA_ARG_PACK_LEN:
10818       case BUILT_IN_VA_COPY:
10819       case BUILT_IN_TRAP:
10820       case BUILT_IN_SAVEREGS:
10821       case BUILT_IN_POPCOUNTL:
10822       case BUILT_IN_POPCOUNTLL:
10823       case BUILT_IN_POPCOUNTIMAX:
10824       case BUILT_IN_POPCOUNT:
10825       case BUILT_IN_PARITYL:
10826       case BUILT_IN_PARITYLL:
10827       case BUILT_IN_PARITYIMAX:
10828       case BUILT_IN_PARITY:
10829       case BUILT_IN_LABS:
10830       case BUILT_IN_LLABS:
10831       case BUILT_IN_PREFETCH:
10832       case BUILT_IN_ACC_ON_DEVICE:
10833 	return true;
10834 
10835       default:
10836 	return is_simple_builtin (decl);
10837       }
10838 
10839   return false;
10840 }
10841 
10842 /* Return true if T is a constant and the value cast to a target char
10843    can be represented by a host char.
10844    Store the casted char constant in *P if so.  */
10845 
10846 bool
target_char_cst_p(tree t,char * p)10847 target_char_cst_p (tree t, char *p)
10848 {
10849   if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
10850     return false;
10851 
10852   *p = (char)tree_to_uhwi (t);
10853   return true;
10854 }
10855 
10856 /* Return true if the builtin DECL is implemented in a standard library.
10857    Otherwise return false which doesn't guarantee it is not (thus the list
10858    of handled builtins below may be incomplete).  */
10859 
10860 bool
builtin_with_linkage_p(tree decl)10861 builtin_with_linkage_p (tree decl)
10862 {
10863   if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
10864     switch (DECL_FUNCTION_CODE (decl))
10865     {
10866       CASE_FLT_FN (BUILT_IN_ACOS):
10867       CASE_FLT_FN (BUILT_IN_ACOSH):
10868       CASE_FLT_FN (BUILT_IN_ASIN):
10869       CASE_FLT_FN (BUILT_IN_ASINH):
10870       CASE_FLT_FN (BUILT_IN_ATAN):
10871       CASE_FLT_FN (BUILT_IN_ATANH):
10872       CASE_FLT_FN (BUILT_IN_ATAN2):
10873       CASE_FLT_FN (BUILT_IN_CBRT):
10874       CASE_FLT_FN (BUILT_IN_CEIL):
10875       CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
10876       CASE_FLT_FN (BUILT_IN_COPYSIGN):
10877       CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
10878       CASE_FLT_FN (BUILT_IN_COS):
10879       CASE_FLT_FN (BUILT_IN_COSH):
10880       CASE_FLT_FN (BUILT_IN_ERF):
10881       CASE_FLT_FN (BUILT_IN_ERFC):
10882       CASE_FLT_FN (BUILT_IN_EXP):
10883       CASE_FLT_FN (BUILT_IN_EXP2):
10884       CASE_FLT_FN (BUILT_IN_EXPM1):
10885       CASE_FLT_FN (BUILT_IN_FABS):
10886       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
10887       CASE_FLT_FN (BUILT_IN_FDIM):
10888       CASE_FLT_FN (BUILT_IN_FLOOR):
10889       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
10890       CASE_FLT_FN (BUILT_IN_FMA):
10891       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
10892       CASE_FLT_FN (BUILT_IN_FMAX):
10893       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
10894       CASE_FLT_FN (BUILT_IN_FMIN):
10895       CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
10896       CASE_FLT_FN (BUILT_IN_FMOD):
10897       CASE_FLT_FN (BUILT_IN_FREXP):
10898       CASE_FLT_FN (BUILT_IN_HYPOT):
10899       CASE_FLT_FN (BUILT_IN_ILOGB):
10900       CASE_FLT_FN (BUILT_IN_LDEXP):
10901       CASE_FLT_FN (BUILT_IN_LGAMMA):
10902       CASE_FLT_FN (BUILT_IN_LLRINT):
10903       CASE_FLT_FN (BUILT_IN_LLROUND):
10904       CASE_FLT_FN (BUILT_IN_LOG):
10905       CASE_FLT_FN (BUILT_IN_LOG10):
10906       CASE_FLT_FN (BUILT_IN_LOG1P):
10907       CASE_FLT_FN (BUILT_IN_LOG2):
10908       CASE_FLT_FN (BUILT_IN_LOGB):
10909       CASE_FLT_FN (BUILT_IN_LRINT):
10910       CASE_FLT_FN (BUILT_IN_LROUND):
10911       CASE_FLT_FN (BUILT_IN_MODF):
10912       CASE_FLT_FN (BUILT_IN_NAN):
10913       CASE_FLT_FN (BUILT_IN_NEARBYINT):
10914       CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
10915       CASE_FLT_FN (BUILT_IN_NEXTAFTER):
10916       CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
10917       CASE_FLT_FN (BUILT_IN_POW):
10918       CASE_FLT_FN (BUILT_IN_REMAINDER):
10919       CASE_FLT_FN (BUILT_IN_REMQUO):
10920       CASE_FLT_FN (BUILT_IN_RINT):
10921       CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
10922       CASE_FLT_FN (BUILT_IN_ROUND):
10923       CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
10924       CASE_FLT_FN (BUILT_IN_SCALBLN):
10925       CASE_FLT_FN (BUILT_IN_SCALBN):
10926       CASE_FLT_FN (BUILT_IN_SIN):
10927       CASE_FLT_FN (BUILT_IN_SINH):
10928       CASE_FLT_FN (BUILT_IN_SINCOS):
10929       CASE_FLT_FN (BUILT_IN_SQRT):
10930       CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
10931       CASE_FLT_FN (BUILT_IN_TAN):
10932       CASE_FLT_FN (BUILT_IN_TANH):
10933       CASE_FLT_FN (BUILT_IN_TGAMMA):
10934       CASE_FLT_FN (BUILT_IN_TRUNC):
10935       CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
10936 	return true;
10937 
10938       case BUILT_IN_STPCPY:
10939       case BUILT_IN_STPNCPY:
10940 	/* stpcpy is both referenced in libiberty's pex-win32.c and provided
10941 	   by libiberty's stpcpy.c for MinGW targets so we need to return true
10942 	   in order to be able to build libiberty in LTO mode for them.  */
10943 	return true;
10944 
10945       default:
10946 	break;
10947     }
10948   return false;
10949 }
10950 
10951 /* Return true if OFFRNG is bounded to a subrange of offset values
10952    valid for the largest possible object.  */
10953 
10954 bool
offset_bounded()10955 access_ref::offset_bounded () const
10956 {
10957   tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
10958   tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
10959   return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
10960 }
10961 
10962 /* If CALLEE has known side effects, fill in INFO and return true.
10963    See tree-ssa-structalias.c:find_func_aliases
10964    for the list of builtins we might need to handle here.  */
10965 
10966 attr_fnspec
builtin_fnspec(tree callee)10967 builtin_fnspec (tree callee)
10968 {
10969   built_in_function code = DECL_FUNCTION_CODE (callee);
10970 
10971   switch (code)
10972     {
10973       /* All the following functions read memory pointed to by
10974 	 their second argument and write memory pointed to by first
10975 	 argument.
10976 	 strcat/strncat additionally reads memory pointed to by the first
10977 	 argument.  */
10978       case BUILT_IN_STRCAT:
10979       case BUILT_IN_STRCAT_CHK:
10980 	return "1cW 1 ";
10981       case BUILT_IN_STRNCAT:
10982       case BUILT_IN_STRNCAT_CHK:
10983 	return "1cW 13";
10984       case BUILT_IN_STRCPY:
10985       case BUILT_IN_STRCPY_CHK:
10986 	return "1cO 1 ";
10987       case BUILT_IN_STPCPY:
10988       case BUILT_IN_STPCPY_CHK:
10989 	return ".cO 1 ";
10990       case BUILT_IN_STRNCPY:
10991       case BUILT_IN_MEMCPY:
10992       case BUILT_IN_MEMMOVE:
10993       case BUILT_IN_TM_MEMCPY:
10994       case BUILT_IN_TM_MEMMOVE:
10995       case BUILT_IN_STRNCPY_CHK:
10996       case BUILT_IN_MEMCPY_CHK:
10997       case BUILT_IN_MEMMOVE_CHK:
10998 	return "1cO313";
10999       case BUILT_IN_MEMPCPY:
11000       case BUILT_IN_MEMPCPY_CHK:
11001 	return ".cO313";
11002       case BUILT_IN_STPNCPY:
11003       case BUILT_IN_STPNCPY_CHK:
11004 	return ".cO313";
11005       case BUILT_IN_BCOPY:
11006 	return ".c23O3";
11007       case BUILT_IN_BZERO:
11008 	return ".cO2";
11009       case BUILT_IN_MEMCMP:
11010       case BUILT_IN_MEMCMP_EQ:
11011       case BUILT_IN_BCMP:
11012       case BUILT_IN_STRNCMP:
11013       case BUILT_IN_STRNCMP_EQ:
11014       case BUILT_IN_STRNCASECMP:
11015 	return ".cR3R3";
11016 
11017       /* The following functions read memory pointed to by their
11018 	 first argument.  */
11019       CASE_BUILT_IN_TM_LOAD (1):
11020       CASE_BUILT_IN_TM_LOAD (2):
11021       CASE_BUILT_IN_TM_LOAD (4):
11022       CASE_BUILT_IN_TM_LOAD (8):
11023       CASE_BUILT_IN_TM_LOAD (FLOAT):
11024       CASE_BUILT_IN_TM_LOAD (DOUBLE):
11025       CASE_BUILT_IN_TM_LOAD (LDOUBLE):
11026       CASE_BUILT_IN_TM_LOAD (M64):
11027       CASE_BUILT_IN_TM_LOAD (M128):
11028       CASE_BUILT_IN_TM_LOAD (M256):
11029       case BUILT_IN_TM_LOG:
11030       case BUILT_IN_TM_LOG_1:
11031       case BUILT_IN_TM_LOG_2:
11032       case BUILT_IN_TM_LOG_4:
11033       case BUILT_IN_TM_LOG_8:
11034       case BUILT_IN_TM_LOG_FLOAT:
11035       case BUILT_IN_TM_LOG_DOUBLE:
11036       case BUILT_IN_TM_LOG_LDOUBLE:
11037       case BUILT_IN_TM_LOG_M64:
11038       case BUILT_IN_TM_LOG_M128:
11039       case BUILT_IN_TM_LOG_M256:
11040 	return ".cR ";
11041 
11042       case BUILT_IN_INDEX:
11043       case BUILT_IN_RINDEX:
11044       case BUILT_IN_STRCHR:
11045       case BUILT_IN_STRLEN:
11046       case BUILT_IN_STRRCHR:
11047 	return ".cR ";
11048       case BUILT_IN_STRNLEN:
11049 	return ".cR2";
11050 
11051       /* These read memory pointed to by the first argument.
11052 	 Allocating memory does not have any side-effects apart from
11053 	 being the definition point for the pointer.
11054 	 Unix98 specifies that errno is set on allocation failure.  */
11055       case BUILT_IN_STRDUP:
11056 	return "mCR ";
11057       case BUILT_IN_STRNDUP:
11058 	return "mCR2";
11059       /* Allocating memory does not have any side-effects apart from
11060 	 being the definition point for the pointer.  */
11061       case BUILT_IN_MALLOC:
11062       case BUILT_IN_ALIGNED_ALLOC:
11063       case BUILT_IN_CALLOC:
11064       case BUILT_IN_GOMP_ALLOC:
11065 	return "mC";
11066       CASE_BUILT_IN_ALLOCA:
11067 	return "mc";
11068       /* These read memory pointed to by the first argument with size
11069 	 in the third argument.  */
11070       case BUILT_IN_MEMCHR:
11071 	return ".cR3";
11072       /* These read memory pointed to by the first and second arguments.  */
11073       case BUILT_IN_STRSTR:
11074       case BUILT_IN_STRPBRK:
11075       case BUILT_IN_STRCASECMP:
11076       case BUILT_IN_STRCSPN:
11077       case BUILT_IN_STRSPN:
11078       case BUILT_IN_STRCMP:
11079       case BUILT_IN_STRCMP_EQ:
11080 	return ".cR R ";
11081       /* Freeing memory kills the pointed-to memory.  More importantly
11082 	 the call has to serve as a barrier for moving loads and stores
11083 	 across it.  */
11084       case BUILT_IN_STACK_RESTORE:
11085       case BUILT_IN_FREE:
11086       case BUILT_IN_GOMP_FREE:
11087 	return ".co ";
11088       case BUILT_IN_VA_END:
11089 	return ".cO ";
11090       /* Realloc serves both as allocation point and deallocation point.  */
11091       case BUILT_IN_REALLOC:
11092 	return ".Cw ";
11093       case BUILT_IN_GAMMA_R:
11094       case BUILT_IN_GAMMAF_R:
11095       case BUILT_IN_GAMMAL_R:
11096       case BUILT_IN_LGAMMA_R:
11097       case BUILT_IN_LGAMMAF_R:
11098       case BUILT_IN_LGAMMAL_R:
11099 	return ".C. Ot";
11100       case BUILT_IN_FREXP:
11101       case BUILT_IN_FREXPF:
11102       case BUILT_IN_FREXPL:
11103       case BUILT_IN_MODF:
11104       case BUILT_IN_MODFF:
11105       case BUILT_IN_MODFL:
11106 	return ".c. Ot";
11107       case BUILT_IN_REMQUO:
11108       case BUILT_IN_REMQUOF:
11109       case BUILT_IN_REMQUOL:
11110 	return ".c. . Ot";
11111       case BUILT_IN_SINCOS:
11112       case BUILT_IN_SINCOSF:
11113       case BUILT_IN_SINCOSL:
11114 	return ".c. OtOt";
11115       case BUILT_IN_MEMSET:
11116       case BUILT_IN_MEMSET_CHK:
11117       case BUILT_IN_TM_MEMSET:
11118 	return "1cO3";
11119       CASE_BUILT_IN_TM_STORE (1):
11120       CASE_BUILT_IN_TM_STORE (2):
11121       CASE_BUILT_IN_TM_STORE (4):
11122       CASE_BUILT_IN_TM_STORE (8):
11123       CASE_BUILT_IN_TM_STORE (FLOAT):
11124       CASE_BUILT_IN_TM_STORE (DOUBLE):
11125       CASE_BUILT_IN_TM_STORE (LDOUBLE):
11126       CASE_BUILT_IN_TM_STORE (M64):
11127       CASE_BUILT_IN_TM_STORE (M128):
11128       CASE_BUILT_IN_TM_STORE (M256):
11129 	return ".cO ";
11130       case BUILT_IN_STACK_SAVE:
11131       case BUILT_IN_RETURN:
11132       case BUILT_IN_EH_POINTER:
11133       case BUILT_IN_EH_FILTER:
11134       case BUILT_IN_UNWIND_RESUME:
11135       case BUILT_IN_CXA_END_CLEANUP:
11136       case BUILT_IN_EH_COPY_VALUES:
11137       case BUILT_IN_FRAME_ADDRESS:
11138       case BUILT_IN_APPLY_ARGS:
11139       case BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT:
11140       case BUILT_IN_ASAN_AFTER_DYNAMIC_INIT:
11141       case BUILT_IN_PREFETCH:
11142       case BUILT_IN_DWARF_CFA:
11143       case BUILT_IN_RETURN_ADDRESS:
11144 	return ".c";
11145       case BUILT_IN_ASSUME_ALIGNED:
11146 	return "1cX ";
11147       /* But posix_memalign stores a pointer into the memory pointed to
11148 	 by its first argument.  */
11149       case BUILT_IN_POSIX_MEMALIGN:
11150 	return ".cOt";
11151 
11152       default:
11153 	return "";
11154     }
11155 }
11156