1 /* Expand builtin functions.
2    Copyright (C) 1988-2016 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* Legacy warning!  Please add no further builtin simplifications here
21    (apart from pure constant folding) - builtin simplifications should go
22    to match.pd or gimple-fold.c instead.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "predict.h"
33 #include "tm_p.h"
34 #include "stringpool.h"
35 #include "tree-ssanames.h"
36 #include "expmed.h"
37 #include "optabs.h"
38 #include "emit-rtl.h"
39 #include "recog.h"
40 #include "diagnostic-core.h"
41 #include "alias.h"
42 #include "fold-const.h"
43 #include "fold-const-call.h"
44 #include "stor-layout.h"
45 #include "calls.h"
46 #include "varasm.h"
47 #include "tree-object-size.h"
48 #include "realmpfr.h"
49 #include "cfgrtl.h"
50 #include "except.h"
51 #include "dojump.h"
52 #include "explow.h"
53 #include "stmt.h"
54 #include "expr.h"
55 #include "libfuncs.h"
56 #include "output.h"
57 #include "typeclass.h"
58 #include "langhooks.h"
59 #include "value-prof.h"
60 #include "builtins.h"
61 #include "asan.h"
62 #include "cilk.h"
63 #include "tree-chkp.h"
64 #include "rtl-chkp.h"
65 #include "internal-fn.h"
66 #include "case-cfn-macros.h"
67 
68 
69 struct target_builtins default_target_builtins;
70 #if SWITCHABLE_TARGET
71 struct target_builtins *this_target_builtins = &default_target_builtins;
72 #endif
73 
74 /* Define the names of the builtin function types and codes.  */
75 const char *const built_in_class_names[BUILT_IN_LAST]
76   = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
77 
78 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
79 const char * built_in_names[(int) END_BUILTINS] =
80 {
81 #include "builtins.def"
82 };
83 
84 /* Setup an array of builtin_info_type, make sure each element decl is
85    initialized to NULL_TREE.  */
86 builtin_info_type builtin_info[(int)END_BUILTINS];
87 
88 /* Non-zero if __builtin_constant_p should be folded right away.  */
89 bool force_folding_builtin_constant_p;
90 
91 static rtx c_readstr (const char *, machine_mode);
92 static int target_char_cast (tree, char *);
93 static rtx get_memory_rtx (tree, tree);
94 static int apply_args_size (void);
95 static int apply_result_size (void);
96 static rtx result_vector (int, rtx);
97 static void expand_builtin_prefetch (tree);
98 static rtx expand_builtin_apply_args (void);
99 static rtx expand_builtin_apply_args_1 (void);
100 static rtx expand_builtin_apply (rtx, rtx, rtx);
101 static void expand_builtin_return (rtx);
102 static enum type_class type_to_class (tree);
103 static rtx expand_builtin_classify_type (tree);
104 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
105 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
106 static rtx expand_builtin_interclass_mathfn (tree, rtx);
107 static rtx expand_builtin_sincos (tree);
108 static rtx expand_builtin_cexpi (tree, rtx);
109 static rtx expand_builtin_int_roundingfn (tree, rtx);
110 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
111 static rtx expand_builtin_next_arg (void);
112 static rtx expand_builtin_va_start (tree);
113 static rtx expand_builtin_va_end (tree);
114 static rtx expand_builtin_va_copy (tree);
115 static rtx expand_builtin_strcmp (tree, rtx);
116 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
117 static rtx builtin_memcpy_read_str (void *, HOST_WIDE_INT, machine_mode);
118 static rtx expand_builtin_memcpy (tree, rtx);
119 static rtx expand_builtin_memcpy_with_bounds (tree, rtx);
120 static rtx expand_builtin_memcpy_args (tree, tree, tree, rtx, tree);
121 static rtx expand_builtin_mempcpy (tree, rtx, machine_mode);
122 static rtx expand_builtin_mempcpy_with_bounds (tree, rtx, machine_mode);
123 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx,
124 					machine_mode, int, tree);
125 static rtx expand_builtin_strcpy (tree, rtx);
126 static rtx expand_builtin_strcpy_args (tree, tree, rtx);
127 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
128 static rtx expand_builtin_strncpy (tree, rtx);
129 static rtx builtin_memset_gen_str (void *, HOST_WIDE_INT, machine_mode);
130 static rtx expand_builtin_memset (tree, rtx, machine_mode);
131 static rtx expand_builtin_memset_with_bounds (tree, rtx, machine_mode);
132 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
133 static rtx expand_builtin_bzero (tree);
134 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
135 static rtx expand_builtin_alloca (tree, bool);
136 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
137 static rtx expand_builtin_frame_address (tree, tree);
138 static tree stabilize_va_list_loc (location_t, tree, int);
139 static rtx expand_builtin_expect (tree, rtx);
140 static tree fold_builtin_constant_p (tree);
141 static tree fold_builtin_classify_type (tree);
142 static tree fold_builtin_strlen (location_t, tree, tree);
143 static tree fold_builtin_inf (location_t, tree, int);
144 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
145 static bool validate_arg (const_tree, enum tree_code code);
146 static rtx expand_builtin_fabs (tree, rtx, rtx);
147 static rtx expand_builtin_signbit (tree, rtx);
148 static tree fold_builtin_strchr (location_t, tree, tree, tree);
149 static tree fold_builtin_memchr (location_t, tree, tree, tree, tree);
150 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
151 static tree fold_builtin_strcmp (location_t, tree, tree);
152 static tree fold_builtin_strncmp (location_t, tree, tree, tree);
153 static tree fold_builtin_isascii (location_t, tree);
154 static tree fold_builtin_toascii (location_t, tree);
155 static tree fold_builtin_isdigit (location_t, tree);
156 static tree fold_builtin_fabs (location_t, tree, tree);
157 static tree fold_builtin_abs (location_t, tree, tree);
158 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
159 					enum tree_code);
160 static tree fold_builtin_0 (location_t, tree);
161 static tree fold_builtin_1 (location_t, tree, tree);
162 static tree fold_builtin_2 (location_t, tree, tree, tree);
163 static tree fold_builtin_3 (location_t, tree, tree, tree, tree);
164 static tree fold_builtin_varargs (location_t, tree, tree*, int);
165 
166 static tree fold_builtin_strpbrk (location_t, tree, tree, tree);
167 static tree fold_builtin_strstr (location_t, tree, tree, tree);
168 static tree fold_builtin_strrchr (location_t, tree, tree, tree);
169 static tree fold_builtin_strspn (location_t, tree, tree);
170 static tree fold_builtin_strcspn (location_t, tree, tree);
171 
172 static rtx expand_builtin_object_size (tree);
173 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
174 				      enum built_in_function);
175 static void maybe_emit_chk_warning (tree, enum built_in_function);
176 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
177 static void maybe_emit_free_warning (tree);
178 static tree fold_builtin_object_size (tree, tree);
179 
180 unsigned HOST_WIDE_INT target_newline;
181 unsigned HOST_WIDE_INT target_percent;
182 static unsigned HOST_WIDE_INT target_c;
183 static unsigned HOST_WIDE_INT target_s;
184 char target_percent_c[3];
185 char target_percent_s[3];
186 char target_percent_s_newline[4];
187 static tree do_mpfr_remquo (tree, tree, tree);
188 static tree do_mpfr_lgamma_r (tree, tree, tree);
189 static void expand_builtin_sync_synchronize (void);
190 
191 /* Return true if NAME starts with __builtin_ or __sync_.  */
192 
193 static bool
is_builtin_name(const char * name)194 is_builtin_name (const char *name)
195 {
196   if (strncmp (name, "__builtin_", 10) == 0)
197     return true;
198   if (strncmp (name, "__sync_", 7) == 0)
199     return true;
200   if (strncmp (name, "__atomic_", 9) == 0)
201     return true;
202   if (flag_cilkplus
203       && (!strcmp (name, "__cilkrts_detach")
204 	  || !strcmp (name, "__cilkrts_pop_frame")))
205     return true;
206   return false;
207 }
208 
209 
210 /* Return true if DECL is a function symbol representing a built-in.  */
211 
212 bool
is_builtin_fn(tree decl)213 is_builtin_fn (tree decl)
214 {
215   return TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl);
216 }
217 
218 /* Return true if NODE should be considered for inline expansion regardless
219    of the optimization level.  This means whenever a function is invoked with
220    its "internal" name, which normally contains the prefix "__builtin".  */
221 
222 bool
called_as_built_in(tree node)223 called_as_built_in (tree node)
224 {
225   /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
226      we want the name used to call the function, not the name it
227      will have. */
228   const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
229   return is_builtin_name (name);
230 }
231 
232 /* Compute values M and N such that M divides (address of EXP - N) and such
233    that N < M.  If these numbers can be determined, store M in alignp and N in
234    *BITPOSP and return true.  Otherwise return false and store BITS_PER_UNIT to
235    *alignp and any bit-offset to *bitposp.
236 
237    Note that the address (and thus the alignment) computed here is based
238    on the address to which a symbol resolves, whereas DECL_ALIGN is based
239    on the address at which an object is actually located.  These two
240    addresses are not always the same.  For example, on ARM targets,
241    the address &foo of a Thumb function foo() has the lowest bit set,
242    whereas foo() itself starts on an even address.
243 
244    If ADDR_P is true we are taking the address of the memory reference EXP
245    and thus cannot rely on the access taking place.  */
246 
247 static bool
get_object_alignment_2(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp,bool addr_p)248 get_object_alignment_2 (tree exp, unsigned int *alignp,
249 			unsigned HOST_WIDE_INT *bitposp, bool addr_p)
250 {
251   HOST_WIDE_INT bitsize, bitpos;
252   tree offset;
253   machine_mode mode;
254   int unsignedp, reversep, volatilep;
255   unsigned int align = BITS_PER_UNIT;
256   bool known_alignment = false;
257 
258   /* Get the innermost object and the constant (bitpos) and possibly
259      variable (offset) offset of the access.  */
260   exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
261 			     &unsignedp, &reversep, &volatilep, true);
262 
263   /* Extract alignment information from the innermost object and
264      possibly adjust bitpos and offset.  */
265   if (TREE_CODE (exp) == FUNCTION_DECL)
266     {
267       /* Function addresses can encode extra information besides their
268 	 alignment.  However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
269 	 allows the low bit to be used as a virtual bit, we know
270 	 that the address itself must be at least 2-byte aligned.  */
271       if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
272 	align = 2 * BITS_PER_UNIT;
273     }
274   else if (TREE_CODE (exp) == LABEL_DECL)
275     ;
276   else if (TREE_CODE (exp) == CONST_DECL)
277     {
278       /* The alignment of a CONST_DECL is determined by its initializer.  */
279       exp = DECL_INITIAL (exp);
280       align = TYPE_ALIGN (TREE_TYPE (exp));
281       if (CONSTANT_CLASS_P (exp))
282 	align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
283 
284       known_alignment = true;
285     }
286   else if (DECL_P (exp))
287     {
288       align = DECL_ALIGN (exp);
289       known_alignment = true;
290     }
291   else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
292     {
293       align = TYPE_ALIGN (TREE_TYPE (exp));
294     }
295   else if (TREE_CODE (exp) == INDIRECT_REF
296 	   || TREE_CODE (exp) == MEM_REF
297 	   || TREE_CODE (exp) == TARGET_MEM_REF)
298     {
299       tree addr = TREE_OPERAND (exp, 0);
300       unsigned ptr_align;
301       unsigned HOST_WIDE_INT ptr_bitpos;
302       unsigned HOST_WIDE_INT ptr_bitmask = ~0;
303 
304       /* If the address is explicitely aligned, handle that.  */
305       if (TREE_CODE (addr) == BIT_AND_EXPR
306 	  && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
307 	{
308 	  ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
309 	  ptr_bitmask *= BITS_PER_UNIT;
310 	  align = ptr_bitmask & -ptr_bitmask;
311 	  addr = TREE_OPERAND (addr, 0);
312 	}
313 
314       known_alignment
315 	= get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
316       align = MAX (ptr_align, align);
317 
318       /* Re-apply explicit alignment to the bitpos.  */
319       ptr_bitpos &= ptr_bitmask;
320 
321       /* The alignment of the pointer operand in a TARGET_MEM_REF
322 	 has to take the variable offset parts into account.  */
323       if (TREE_CODE (exp) == TARGET_MEM_REF)
324 	{
325 	  if (TMR_INDEX (exp))
326 	    {
327 	      unsigned HOST_WIDE_INT step = 1;
328 	      if (TMR_STEP (exp))
329 		step = TREE_INT_CST_LOW (TMR_STEP (exp));
330 	      align = MIN (align, (step & -step) * BITS_PER_UNIT);
331 	    }
332 	  if (TMR_INDEX2 (exp))
333 	    align = BITS_PER_UNIT;
334 	  known_alignment = false;
335 	}
336 
337       /* When EXP is an actual memory reference then we can use
338 	 TYPE_ALIGN of a pointer indirection to derive alignment.
339 	 Do so only if get_pointer_alignment_1 did not reveal absolute
340 	 alignment knowledge and if using that alignment would
341 	 improve the situation.  */
342       if (!addr_p && !known_alignment
343 	  && TYPE_ALIGN (TREE_TYPE (exp)) > align)
344 	align = TYPE_ALIGN (TREE_TYPE (exp));
345       else
346 	{
347 	  /* Else adjust bitpos accordingly.  */
348 	  bitpos += ptr_bitpos;
349 	  if (TREE_CODE (exp) == MEM_REF
350 	      || TREE_CODE (exp) == TARGET_MEM_REF)
351 	    bitpos += mem_ref_offset (exp).to_short_addr () * BITS_PER_UNIT;
352 	}
353     }
354   else if (TREE_CODE (exp) == STRING_CST)
355     {
356       /* STRING_CST are the only constant objects we allow to be not
357          wrapped inside a CONST_DECL.  */
358       align = TYPE_ALIGN (TREE_TYPE (exp));
359       if (CONSTANT_CLASS_P (exp))
360 	align = (unsigned) CONSTANT_ALIGNMENT (exp, align);
361 
362       known_alignment = true;
363     }
364 
365   /* If there is a non-constant offset part extract the maximum
366      alignment that can prevail.  */
367   if (offset)
368     {
369       unsigned int trailing_zeros = tree_ctz (offset);
370       if (trailing_zeros < HOST_BITS_PER_INT)
371 	{
372 	  unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
373 	  if (inner)
374 	    align = MIN (align, inner);
375 	}
376     }
377 
378   *alignp = align;
379   *bitposp = bitpos & (*alignp - 1);
380   return known_alignment;
381 }
382 
383 /* For a memory reference expression EXP compute values M and N such that M
384    divides (&EXP - N) and such that N < M.  If these numbers can be determined,
385    store M in alignp and N in *BITPOSP and return true.  Otherwise return false
386    and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp.  */
387 
388 bool
get_object_alignment_1(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp)389 get_object_alignment_1 (tree exp, unsigned int *alignp,
390 			unsigned HOST_WIDE_INT *bitposp)
391 {
392   return get_object_alignment_2 (exp, alignp, bitposp, false);
393 }
394 
395 /* Return the alignment in bits of EXP, an object.  */
396 
397 unsigned int
get_object_alignment(tree exp)398 get_object_alignment (tree exp)
399 {
400   unsigned HOST_WIDE_INT bitpos = 0;
401   unsigned int align;
402 
403   get_object_alignment_1 (exp, &align, &bitpos);
404 
405   /* align and bitpos now specify known low bits of the pointer.
406      ptr & (align - 1) == bitpos.  */
407 
408   if (bitpos != 0)
409     align = (bitpos & -bitpos);
410   return align;
411 }
412 
413 /* For a pointer valued expression EXP compute values M and N such that M
414    divides (EXP - N) and such that N < M.  If these numbers can be determined,
415    store M in alignp and N in *BITPOSP and return true.  Return false if
416    the results are just a conservative approximation.
417 
418    If EXP is not a pointer, false is returned too.  */
419 
420 bool
get_pointer_alignment_1(tree exp,unsigned int * alignp,unsigned HOST_WIDE_INT * bitposp)421 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
422 			 unsigned HOST_WIDE_INT *bitposp)
423 {
424   STRIP_NOPS (exp);
425 
426   if (TREE_CODE (exp) == ADDR_EXPR)
427     return get_object_alignment_2 (TREE_OPERAND (exp, 0),
428 				   alignp, bitposp, true);
429   else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
430     {
431       unsigned int align;
432       unsigned HOST_WIDE_INT bitpos;
433       bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
434 					  &align, &bitpos);
435       if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
436 	bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
437       else
438 	{
439 	  unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
440 	  if (trailing_zeros < HOST_BITS_PER_INT)
441 	    {
442 	      unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
443 	      if (inner)
444 		align = MIN (align, inner);
445 	    }
446 	}
447       *alignp = align;
448       *bitposp = bitpos & (align - 1);
449       return res;
450     }
451   else if (TREE_CODE (exp) == SSA_NAME
452 	   && POINTER_TYPE_P (TREE_TYPE (exp)))
453     {
454       unsigned int ptr_align, ptr_misalign;
455       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
456 
457       if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
458 	{
459 	  *bitposp = ptr_misalign * BITS_PER_UNIT;
460 	  *alignp = ptr_align * BITS_PER_UNIT;
461 	  /* Make sure to return a sensible alignment when the multiplication
462 	     by BITS_PER_UNIT overflowed.  */
463 	  if (*alignp == 0)
464 	    *alignp = 1u << (HOST_BITS_PER_INT - 1);
465 	  /* We cannot really tell whether this result is an approximation.  */
466 	  return false;
467 	}
468       else
469 	{
470 	  *bitposp = 0;
471 	  *alignp = BITS_PER_UNIT;
472 	  return false;
473 	}
474     }
475   else if (TREE_CODE (exp) == INTEGER_CST)
476     {
477       *alignp = BIGGEST_ALIGNMENT;
478       *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
479 		  & (BIGGEST_ALIGNMENT - 1));
480       return true;
481     }
482 
483   *bitposp = 0;
484   *alignp = BITS_PER_UNIT;
485   return false;
486 }
487 
488 /* Return the alignment in bits of EXP, a pointer valued expression.
489    The alignment returned is, by default, the alignment of the thing that
490    EXP points to.  If it is not a POINTER_TYPE, 0 is returned.
491 
492    Otherwise, look at the expression to see if we can do better, i.e., if the
493    expression is actually pointing at an object whose alignment is tighter.  */
494 
495 unsigned int
get_pointer_alignment(tree exp)496 get_pointer_alignment (tree exp)
497 {
498   unsigned HOST_WIDE_INT bitpos = 0;
499   unsigned int align;
500 
501   get_pointer_alignment_1 (exp, &align, &bitpos);
502 
503   /* align and bitpos now specify known low bits of the pointer.
504      ptr & (align - 1) == bitpos.  */
505 
506   if (bitpos != 0)
507     align = (bitpos & -bitpos);
508 
509   return align;
510 }
511 
512 /* Compute the length of a C string.  TREE_STRING_LENGTH is not the right
513    way, because it could contain a zero byte in the middle.
514    TREE_STRING_LENGTH is the size of the character array, not the string.
515 
516    ONLY_VALUE should be nonzero if the result is not going to be emitted
517    into the instruction stream and zero if it is going to be expanded.
518    E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
519    is returned, otherwise NULL, since
520    len = c_strlen (src, 1); if (len) expand_expr (len, ...); would not
521    evaluate the side-effects.
522 
523    If ONLY_VALUE is two then we do not emit warnings about out-of-bound
524    accesses.  Note that this implies the result is not going to be emitted
525    into the instruction stream.
526 
527    The value returned is of type `ssizetype'.
528 
529    Unfortunately, string_constant can't access the values of const char
530    arrays with initializers, so neither can we do so here.  */
531 
532 tree
c_strlen(tree src,int only_value)533 c_strlen (tree src, int only_value)
534 {
535   tree offset_node;
536   HOST_WIDE_INT offset;
537   int max;
538   const char *ptr;
539   location_t loc;
540 
541   STRIP_NOPS (src);
542   if (TREE_CODE (src) == COND_EXPR
543       && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
544     {
545       tree len1, len2;
546 
547       len1 = c_strlen (TREE_OPERAND (src, 1), only_value);
548       len2 = c_strlen (TREE_OPERAND (src, 2), only_value);
549       if (tree_int_cst_equal (len1, len2))
550 	return len1;
551     }
552 
553   if (TREE_CODE (src) == COMPOUND_EXPR
554       && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
555     return c_strlen (TREE_OPERAND (src, 1), only_value);
556 
557   loc = EXPR_LOC_OR_LOC (src, input_location);
558 
559   src = string_constant (src, &offset_node);
560   if (src == 0)
561     return NULL_TREE;
562 
563   max = TREE_STRING_LENGTH (src) - 1;
564   ptr = TREE_STRING_POINTER (src);
565 
566   if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
567     {
568       /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
569 	 compute the offset to the following null if we don't know where to
570 	 start searching for it.  */
571       int i;
572 
573       for (i = 0; i < max; i++)
574 	if (ptr[i] == 0)
575 	  return NULL_TREE;
576 
577       /* We don't know the starting offset, but we do know that the string
578 	 has no internal zero bytes.  We can assume that the offset falls
579 	 within the bounds of the string; otherwise, the programmer deserves
580 	 what he gets.  Subtract the offset from the length of the string,
581 	 and return that.  This would perhaps not be valid if we were dealing
582 	 with named arrays in addition to literal string constants.  */
583 
584       return size_diffop_loc (loc, size_int (max), offset_node);
585     }
586 
587   /* We have a known offset into the string.  Start searching there for
588      a null character if we can represent it as a single HOST_WIDE_INT.  */
589   if (offset_node == 0)
590     offset = 0;
591   else if (! tree_fits_shwi_p (offset_node))
592     offset = -1;
593   else
594     offset = tree_to_shwi (offset_node);
595 
596   /* If the offset is known to be out of bounds, warn, and call strlen at
597      runtime.  */
598   if (offset < 0 || offset > max)
599     {
600      /* Suppress multiple warnings for propagated constant strings.  */
601       if (only_value != 2
602 	  && !TREE_NO_WARNING (src))
603         {
604           warning_at (loc, 0, "offset outside bounds of constant string");
605           TREE_NO_WARNING (src) = 1;
606         }
607       return NULL_TREE;
608     }
609 
610   /* Use strlen to search for the first zero byte.  Since any strings
611      constructed with build_string will have nulls appended, we win even
612      if we get handed something like (char[4])"abcd".
613 
614      Since OFFSET is our starting index into the string, no further
615      calculation is needed.  */
616   return ssize_int (strlen (ptr + offset));
617 }
618 
619 /* Return a constant integer corresponding to target reading
620    GET_MODE_BITSIZE (MODE) bits from string constant STR.  */
621 
622 static rtx
c_readstr(const char * str,machine_mode mode)623 c_readstr (const char *str, machine_mode mode)
624 {
625   HOST_WIDE_INT ch;
626   unsigned int i, j;
627   HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
628 
629   gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
630   unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
631     / HOST_BITS_PER_WIDE_INT;
632 
633   gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
634   for (i = 0; i < len; i++)
635     tmp[i] = 0;
636 
637   ch = 1;
638   for (i = 0; i < GET_MODE_SIZE (mode); i++)
639     {
640       j = i;
641       if (WORDS_BIG_ENDIAN)
642 	j = GET_MODE_SIZE (mode) - i - 1;
643       if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
644 	  && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
645 	j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
646       j *= BITS_PER_UNIT;
647 
648       if (ch)
649 	ch = (unsigned char) str[i];
650       tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
651     }
652 
653   wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
654   return immed_wide_int_const (c, mode);
655 }
656 
657 /* Cast a target constant CST to target CHAR and if that value fits into
658    host char type, return zero and put that value into variable pointed to by
659    P.  */
660 
661 static int
target_char_cast(tree cst,char * p)662 target_char_cast (tree cst, char *p)
663 {
664   unsigned HOST_WIDE_INT val, hostval;
665 
666   if (TREE_CODE (cst) != INTEGER_CST
667       || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
668     return 1;
669 
670   /* Do not care if it fits or not right here.  */
671   val = TREE_INT_CST_LOW (cst);
672 
673   if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
674     val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
675 
676   hostval = val;
677   if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
678     hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
679 
680   if (val != hostval)
681     return 1;
682 
683   *p = hostval;
684   return 0;
685 }
686 
687 /* Similar to save_expr, but assumes that arbitrary code is not executed
688    in between the multiple evaluations.  In particular, we assume that a
689    non-addressable local variable will not be modified.  */
690 
691 static tree
builtin_save_expr(tree exp)692 builtin_save_expr (tree exp)
693 {
694   if (TREE_CODE (exp) == SSA_NAME
695       || (TREE_ADDRESSABLE (exp) == 0
696 	  && (TREE_CODE (exp) == PARM_DECL
697 	      || (TREE_CODE (exp) == VAR_DECL && !TREE_STATIC (exp)))))
698     return exp;
699 
700   return save_expr (exp);
701 }
702 
703 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
704    times to get the address of either a higher stack frame, or a return
705    address located within it (depending on FNDECL_CODE).  */
706 
707 static rtx
expand_builtin_return_addr(enum built_in_function fndecl_code,int count)708 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
709 {
710   int i;
711   rtx tem = INITIAL_FRAME_ADDRESS_RTX;
712   if (tem == NULL_RTX)
713     {
714       /* For a zero count with __builtin_return_address, we don't care what
715 	 frame address we return, because target-specific definitions will
716 	 override us.  Therefore frame pointer elimination is OK, and using
717 	 the soft frame pointer is OK.
718 
719 	 For a nonzero count, or a zero count with __builtin_frame_address,
720 	 we require a stable offset from the current frame pointer to the
721 	 previous one, so we must use the hard frame pointer, and
722 	 we must disable frame pointer elimination.  */
723       if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
724 	tem = frame_pointer_rtx;
725       else
726 	{
727 	  tem = hard_frame_pointer_rtx;
728 
729 	  /* Tell reload not to eliminate the frame pointer.  */
730 	  crtl->accesses_prior_frames = 1;
731 	}
732     }
733 
734   if (count > 0)
735     SETUP_FRAME_ADDRESSES ();
736 
737   /* On the SPARC, the return address is not in the frame, it is in a
738      register.  There is no way to access it off of the current frame
739      pointer, but it can be accessed off the previous frame pointer by
740      reading the value from the register window save area.  */
741   if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
742     count--;
743 
744   /* Scan back COUNT frames to the specified frame.  */
745   for (i = 0; i < count; i++)
746     {
747       /* Assume the dynamic chain pointer is in the word that the
748 	 frame address points to, unless otherwise specified.  */
749       tem = DYNAMIC_CHAIN_ADDRESS (tem);
750       tem = memory_address (Pmode, tem);
751       tem = gen_frame_mem (Pmode, tem);
752       tem = copy_to_reg (tem);
753     }
754 
755   /* For __builtin_frame_address, return what we've got.  But, on
756      the SPARC for example, we may have to add a bias.  */
757   if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
758     return FRAME_ADDR_RTX (tem);
759 
760   /* For __builtin_return_address, get the return address from that frame.  */
761 #ifdef RETURN_ADDR_RTX
762   tem = RETURN_ADDR_RTX (count, tem);
763 #else
764   tem = memory_address (Pmode,
765 			plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
766   tem = gen_frame_mem (Pmode, tem);
767 #endif
768   return tem;
769 }
770 
771 /* Alias set used for setjmp buffer.  */
772 static alias_set_type setjmp_alias_set = -1;
773 
774 /* Construct the leading half of a __builtin_setjmp call.  Control will
775    return to RECEIVER_LABEL.  This is also called directly by the SJLJ
776    exception handling code.  */
777 
778 void
expand_builtin_setjmp_setup(rtx buf_addr,rtx receiver_label)779 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
780 {
781   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
782   rtx stack_save;
783   rtx mem;
784 
785   if (setjmp_alias_set == -1)
786     setjmp_alias_set = new_alias_set ();
787 
788   buf_addr = convert_memory_address (Pmode, buf_addr);
789 
790   buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
791 
792   /* We store the frame pointer and the address of receiver_label in
793      the buffer and use the rest of it for the stack save area, which
794      is machine-dependent.  */
795 
796   mem = gen_rtx_MEM (Pmode, buf_addr);
797   set_mem_alias_set (mem, setjmp_alias_set);
798   emit_move_insn (mem, targetm.builtin_setjmp_frame_value ());
799 
800   mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
801 					   GET_MODE_SIZE (Pmode))),
802   set_mem_alias_set (mem, setjmp_alias_set);
803 
804   emit_move_insn (validize_mem (mem),
805 		  force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
806 
807   stack_save = gen_rtx_MEM (sa_mode,
808 			    plus_constant (Pmode, buf_addr,
809 					   2 * GET_MODE_SIZE (Pmode)));
810   set_mem_alias_set (stack_save, setjmp_alias_set);
811   emit_stack_save (SAVE_NONLOCAL, &stack_save);
812 
813   /* If there is further processing to do, do it.  */
814   if (targetm.have_builtin_setjmp_setup ())
815     emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
816 
817   /* We have a nonlocal label.   */
818   cfun->has_nonlocal_label = 1;
819 }
820 
821 /* Construct the trailing part of a __builtin_setjmp call.  This is
822    also called directly by the SJLJ exception handling code.
823    If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler.  */
824 
825 void
expand_builtin_setjmp_receiver(rtx receiver_label)826 expand_builtin_setjmp_receiver (rtx receiver_label)
827 {
828   rtx chain;
829 
830   /* Mark the FP as used when we get here, so we have to make sure it's
831      marked as used by this function.  */
832   emit_use (hard_frame_pointer_rtx);
833 
834   /* Mark the static chain as clobbered here so life information
835      doesn't get messed up for it.  */
836   chain = targetm.calls.static_chain (current_function_decl, true);
837   if (chain && REG_P (chain))
838     emit_clobber (chain);
839 
840   /* Now put in the code to restore the frame pointer, and argument
841      pointer, if needed.  */
842   if (! targetm.have_nonlocal_goto ())
843     {
844       /* First adjust our frame pointer to its actual value.  It was
845 	 previously set to the start of the virtual area corresponding to
846 	 the stacked variables when we branched here and now needs to be
847 	 adjusted to the actual hardware fp value.
848 
849 	 Assignments to virtual registers are converted by
850 	 instantiate_virtual_regs into the corresponding assignment
851 	 to the underlying register (fp in this case) that makes
852 	 the original assignment true.
853 	 So the following insn will actually be decrementing fp by
854 	 STARTING_FRAME_OFFSET.  */
855       emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
856 
857       /* Restoring the frame pointer also modifies the hard frame pointer.
858 	 Mark it used (so that the previous assignment remains live once
859 	 the frame pointer is eliminated) and clobbered (to represent the
860 	 implicit update from the assignment).  */
861       emit_use (hard_frame_pointer_rtx);
862       emit_clobber (hard_frame_pointer_rtx);
863     }
864 
865   if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
866     {
867 #ifdef ELIMINABLE_REGS
868       /* If the argument pointer can be eliminated in favor of the
869 	 frame pointer, we don't need to restore it.  We assume here
870 	 that if such an elimination is present, it can always be used.
871 	 This is the case on all known machines; if we don't make this
872 	 assumption, we do unnecessary saving on many machines.  */
873       size_t i;
874       static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
875 
876       for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
877 	if (elim_regs[i].from == ARG_POINTER_REGNUM
878 	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
879 	  break;
880 
881       if (i == ARRAY_SIZE (elim_regs))
882 #endif
883 	{
884 	  /* Now restore our arg pointer from the address at which it
885 	     was saved in our stack frame.  */
886 	  emit_move_insn (crtl->args.internal_arg_pointer,
887 			  copy_to_reg (get_arg_pointer_save_area ()));
888 	}
889     }
890 
891   if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
892     emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
893   else if (targetm.have_nonlocal_goto_receiver ())
894     emit_insn (targetm.gen_nonlocal_goto_receiver ());
895   else
896     { /* Nothing */ }
897 
898   /* We must not allow the code we just generated to be reordered by
899      scheduling.  Specifically, the update of the frame pointer must
900      happen immediately, not later.  */
901   emit_insn (gen_blockage ());
902 }
903 
904 /* __builtin_longjmp is passed a pointer to an array of five words (not
905    all will be used on all machines).  It operates similarly to the C
906    library function of the same name, but is more efficient.  Much of
907    the code below is copied from the handling of non-local gotos.  */
908 
909 static void
expand_builtin_longjmp(rtx buf_addr,rtx value)910 expand_builtin_longjmp (rtx buf_addr, rtx value)
911 {
912   rtx fp, lab, stack;
913   rtx_insn *insn, *last;
914   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
915 
916   /* DRAP is needed for stack realign if longjmp is expanded to current
917      function  */
918   if (SUPPORTS_STACK_ALIGNMENT)
919     crtl->need_drap = true;
920 
921   if (setjmp_alias_set == -1)
922     setjmp_alias_set = new_alias_set ();
923 
924   buf_addr = convert_memory_address (Pmode, buf_addr);
925 
926   buf_addr = force_reg (Pmode, buf_addr);
927 
928   /* We require that the user must pass a second argument of 1, because
929      that is what builtin_setjmp will return.  */
930   gcc_assert (value == const1_rtx);
931 
932   last = get_last_insn ();
933   if (targetm.have_builtin_longjmp ())
934     emit_insn (targetm.gen_builtin_longjmp (buf_addr));
935   else
936     {
937       fp = gen_rtx_MEM (Pmode, buf_addr);
938       lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
939 					       GET_MODE_SIZE (Pmode)));
940 
941       stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
942 						   2 * GET_MODE_SIZE (Pmode)));
943       set_mem_alias_set (fp, setjmp_alias_set);
944       set_mem_alias_set (lab, setjmp_alias_set);
945       set_mem_alias_set (stack, setjmp_alias_set);
946 
947       /* Pick up FP, label, and SP from the block and jump.  This code is
948 	 from expand_goto in stmt.c; see there for detailed comments.  */
949       if (targetm.have_nonlocal_goto ())
950 	/* We have to pass a value to the nonlocal_goto pattern that will
951 	   get copied into the static_chain pointer, but it does not matter
952 	   what that value is, because builtin_setjmp does not use it.  */
953 	emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
954       else
955 	{
956 	  lab = copy_to_reg (lab);
957 
958 	  emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
959 	  emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
960 
961 	  emit_move_insn (hard_frame_pointer_rtx, fp);
962 	  emit_stack_restore (SAVE_NONLOCAL, stack);
963 
964 	  emit_use (hard_frame_pointer_rtx);
965 	  emit_use (stack_pointer_rtx);
966 	  emit_indirect_jump (lab);
967 	}
968     }
969 
970   /* Search backwards and mark the jump insn as a non-local goto.
971      Note that this precludes the use of __builtin_longjmp to a
972      __builtin_setjmp target in the same function.  However, we've
973      already cautioned the user that these functions are for
974      internal exception handling use only.  */
975   for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
976     {
977       gcc_assert (insn != last);
978 
979       if (JUMP_P (insn))
980 	{
981 	  add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
982 	  break;
983 	}
984       else if (CALL_P (insn))
985 	break;
986     }
987 }
988 
989 static inline bool
more_const_call_expr_args_p(const const_call_expr_arg_iterator * iter)990 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
991 {
992   return (iter->i < iter->n);
993 }
994 
995 /* This function validates the types of a function call argument list
996    against a specified list of tree_codes.  If the last specifier is a 0,
997    that represents an ellipses, otherwise the last specifier must be a
998    VOID_TYPE.  */
999 
1000 static bool
validate_arglist(const_tree callexpr,...)1001 validate_arglist (const_tree callexpr, ...)
1002 {
1003   enum tree_code code;
1004   bool res = 0;
1005   va_list ap;
1006   const_call_expr_arg_iterator iter;
1007   const_tree arg;
1008 
1009   va_start (ap, callexpr);
1010   init_const_call_expr_arg_iterator (callexpr, &iter);
1011 
1012   do
1013     {
1014       code = (enum tree_code) va_arg (ap, int);
1015       switch (code)
1016 	{
1017 	case 0:
1018 	  /* This signifies an ellipses, any further arguments are all ok.  */
1019 	  res = true;
1020 	  goto end;
1021 	case VOID_TYPE:
1022 	  /* This signifies an endlink, if no arguments remain, return
1023 	     true, otherwise return false.  */
1024 	  res = !more_const_call_expr_args_p (&iter);
1025 	  goto end;
1026 	default:
1027 	  /* If no parameters remain or the parameter's code does not
1028 	     match the specified code, return false.  Otherwise continue
1029 	     checking any remaining arguments.  */
1030 	  arg = next_const_call_expr_arg (&iter);
1031 	  if (!validate_arg (arg, code))
1032 	    goto end;
1033 	  break;
1034 	}
1035     }
1036   while (1);
1037 
1038   /* We need gotos here since we can only have one VA_CLOSE in a
1039      function.  */
1040  end: ;
1041   va_end (ap);
1042 
1043   return res;
1044 }
1045 
1046 /* Expand a call to __builtin_nonlocal_goto.  We're passed the target label
1047    and the address of the save area.  */
1048 
1049 static rtx
expand_builtin_nonlocal_goto(tree exp)1050 expand_builtin_nonlocal_goto (tree exp)
1051 {
1052   tree t_label, t_save_area;
1053   rtx r_label, r_save_area, r_fp, r_sp;
1054   rtx_insn *insn;
1055 
1056   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1057     return NULL_RTX;
1058 
1059   t_label = CALL_EXPR_ARG (exp, 0);
1060   t_save_area = CALL_EXPR_ARG (exp, 1);
1061 
1062   r_label = expand_normal (t_label);
1063   r_label = convert_memory_address (Pmode, r_label);
1064   r_save_area = expand_normal (t_save_area);
1065   r_save_area = convert_memory_address (Pmode, r_save_area);
1066   /* Copy the address of the save location to a register just in case it was
1067      based on the frame pointer.   */
1068   r_save_area = copy_to_reg (r_save_area);
1069   r_fp = gen_rtx_MEM (Pmode, r_save_area);
1070   r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1071 		      plus_constant (Pmode, r_save_area,
1072 				     GET_MODE_SIZE (Pmode)));
1073 
1074   crtl->has_nonlocal_goto = 1;
1075 
1076   /* ??? We no longer need to pass the static chain value, afaik.  */
1077   if (targetm.have_nonlocal_goto ())
1078     emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1079   else
1080     {
1081       r_label = copy_to_reg (r_label);
1082 
1083       emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1084       emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1085 
1086       /* Restore frame pointer for containing function.  */
1087       emit_move_insn (hard_frame_pointer_rtx, r_fp);
1088       emit_stack_restore (SAVE_NONLOCAL, r_sp);
1089 
1090       /* USE of hard_frame_pointer_rtx added for consistency;
1091 	 not clear if really needed.  */
1092       emit_use (hard_frame_pointer_rtx);
1093       emit_use (stack_pointer_rtx);
1094 
1095       /* If the architecture is using a GP register, we must
1096 	 conservatively assume that the target function makes use of it.
1097 	 The prologue of functions with nonlocal gotos must therefore
1098 	 initialize the GP register to the appropriate value, and we
1099 	 must then make sure that this value is live at the point
1100 	 of the jump.  (Note that this doesn't necessarily apply
1101 	 to targets with a nonlocal_goto pattern; they are free
1102 	 to implement it in their own way.  Note also that this is
1103 	 a no-op if the GP register is a global invariant.)  */
1104       if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
1105 	  && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
1106 	emit_use (pic_offset_table_rtx);
1107 
1108       emit_indirect_jump (r_label);
1109     }
1110 
1111   /* Search backwards to the jump insn and mark it as a
1112      non-local goto.  */
1113   for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1114     {
1115       if (JUMP_P (insn))
1116 	{
1117 	  add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1118 	  break;
1119 	}
1120       else if (CALL_P (insn))
1121 	break;
1122     }
1123 
1124   return const0_rtx;
1125 }
1126 
1127 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
1128    (not all will be used on all machines) that was passed to __builtin_setjmp.
1129    It updates the stack pointer in that block to the current value.  This is
1130    also called directly by the SJLJ exception handling code.  */
1131 
1132 void
expand_builtin_update_setjmp_buf(rtx buf_addr)1133 expand_builtin_update_setjmp_buf (rtx buf_addr)
1134 {
1135   machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1136   rtx stack_save
1137     = gen_rtx_MEM (sa_mode,
1138 		   memory_address
1139 		   (sa_mode,
1140 		    plus_constant (Pmode, buf_addr,
1141 				   2 * GET_MODE_SIZE (Pmode))));
1142 
1143   emit_stack_save (SAVE_NONLOCAL, &stack_save);
1144 }
1145 
1146 /* Expand a call to __builtin_prefetch.  For a target that does not support
1147    data prefetch, evaluate the memory address argument in case it has side
1148    effects.  */
1149 
1150 static void
expand_builtin_prefetch(tree exp)1151 expand_builtin_prefetch (tree exp)
1152 {
1153   tree arg0, arg1, arg2;
1154   int nargs;
1155   rtx op0, op1, op2;
1156 
1157   if (!validate_arglist (exp, POINTER_TYPE, 0))
1158     return;
1159 
1160   arg0 = CALL_EXPR_ARG (exp, 0);
1161 
1162   /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
1163      zero (read) and argument 2 (locality) defaults to 3 (high degree of
1164      locality).  */
1165   nargs = call_expr_nargs (exp);
1166   if (nargs > 1)
1167     arg1 = CALL_EXPR_ARG (exp, 1);
1168   else
1169     arg1 = integer_zero_node;
1170   if (nargs > 2)
1171     arg2 = CALL_EXPR_ARG (exp, 2);
1172   else
1173     arg2 = integer_three_node;
1174 
1175   /* Argument 0 is an address.  */
1176   op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
1177 
1178   /* Argument 1 (read/write flag) must be a compile-time constant int.  */
1179   if (TREE_CODE (arg1) != INTEGER_CST)
1180     {
1181       error ("second argument to %<__builtin_prefetch%> must be a constant");
1182       arg1 = integer_zero_node;
1183     }
1184   op1 = expand_normal (arg1);
1185   /* Argument 1 must be either zero or one.  */
1186   if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
1187     {
1188       warning (0, "invalid second argument to %<__builtin_prefetch%>;"
1189 	       " using zero");
1190       op1 = const0_rtx;
1191     }
1192 
1193   /* Argument 2 (locality) must be a compile-time constant int.  */
1194   if (TREE_CODE (arg2) != INTEGER_CST)
1195     {
1196       error ("third argument to %<__builtin_prefetch%> must be a constant");
1197       arg2 = integer_zero_node;
1198     }
1199   op2 = expand_normal (arg2);
1200   /* Argument 2 must be 0, 1, 2, or 3.  */
1201   if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
1202     {
1203       warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
1204       op2 = const0_rtx;
1205     }
1206 
1207   if (targetm.have_prefetch ())
1208     {
1209       struct expand_operand ops[3];
1210 
1211       create_address_operand (&ops[0], op0);
1212       create_integer_operand (&ops[1], INTVAL (op1));
1213       create_integer_operand (&ops[2], INTVAL (op2));
1214       if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
1215 	return;
1216     }
1217 
1218   /* Don't do anything with direct references to volatile memory, but
1219      generate code to handle other side effects.  */
1220   if (!MEM_P (op0) && side_effects_p (op0))
1221     emit_insn (op0);
1222 }
1223 
1224 /* Get a MEM rtx for expression EXP which is the address of an operand
1225    to be used in a string instruction (cmpstrsi, movmemsi, ..).  LEN is
1226    the maximum length of the block of memory that might be accessed or
1227    NULL if unknown.  */
1228 
1229 static rtx
get_memory_rtx(tree exp,tree len)1230 get_memory_rtx (tree exp, tree len)
1231 {
1232   tree orig_exp = exp;
1233   rtx addr, mem;
1234 
1235   /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
1236      from its expression, for expr->a.b only <variable>.a.b is recorded.  */
1237   if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
1238     exp = TREE_OPERAND (exp, 0);
1239 
1240   addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
1241   mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
1242 
1243   /* Get an expression we can use to find the attributes to assign to MEM.
1244      First remove any nops.  */
1245   while (CONVERT_EXPR_P (exp)
1246 	 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
1247     exp = TREE_OPERAND (exp, 0);
1248 
1249   /* Build a MEM_REF representing the whole accessed area as a byte blob,
1250      (as builtin stringops may alias with anything).  */
1251   exp = fold_build2 (MEM_REF,
1252 		     build_array_type (char_type_node,
1253 				       build_range_type (sizetype,
1254 							 size_one_node, len)),
1255 		     exp, build_int_cst (ptr_type_node, 0));
1256 
1257   /* If the MEM_REF has no acceptable address, try to get the base object
1258      from the original address we got, and build an all-aliasing
1259      unknown-sized access to that one.  */
1260   if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
1261     set_mem_attributes (mem, exp, 0);
1262   else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
1263 	   && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
1264 						     0))))
1265     {
1266       exp = build_fold_addr_expr (exp);
1267       exp = fold_build2 (MEM_REF,
1268 			 build_array_type (char_type_node,
1269 					   build_range_type (sizetype,
1270 							     size_zero_node,
1271 							     NULL)),
1272 			 exp, build_int_cst (ptr_type_node, 0));
1273       set_mem_attributes (mem, exp, 0);
1274     }
1275   set_mem_alias_set (mem, 0);
1276   return mem;
1277 }
1278 
1279 /* Built-in functions to perform an untyped call and return.  */
1280 
1281 #define apply_args_mode \
1282   (this_target_builtins->x_apply_args_mode)
1283 #define apply_result_mode \
1284   (this_target_builtins->x_apply_result_mode)
1285 
1286 /* Return the size required for the block returned by __builtin_apply_args,
1287    and initialize apply_args_mode.  */
1288 
1289 static int
apply_args_size(void)1290 apply_args_size (void)
1291 {
1292   static int size = -1;
1293   int align;
1294   unsigned int regno;
1295   machine_mode mode;
1296 
1297   /* The values computed by this function never change.  */
1298   if (size < 0)
1299     {
1300       /* The first value is the incoming arg-pointer.  */
1301       size = GET_MODE_SIZE (Pmode);
1302 
1303       /* The second value is the structure value address unless this is
1304 	 passed as an "invisible" first argument.  */
1305       if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1306 	size += GET_MODE_SIZE (Pmode);
1307 
1308       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1309 	if (FUNCTION_ARG_REGNO_P (regno))
1310 	  {
1311 	    mode = targetm.calls.get_raw_arg_mode (regno);
1312 
1313 	    gcc_assert (mode != VOIDmode);
1314 
1315 	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1316 	    if (size % align != 0)
1317 	      size = CEIL (size, align) * align;
1318 	    size += GET_MODE_SIZE (mode);
1319 	    apply_args_mode[regno] = mode;
1320 	  }
1321 	else
1322 	  {
1323 	    apply_args_mode[regno] = VOIDmode;
1324 	  }
1325     }
1326   return size;
1327 }
1328 
1329 /* Return the size required for the block returned by __builtin_apply,
1330    and initialize apply_result_mode.  */
1331 
1332 static int
apply_result_size(void)1333 apply_result_size (void)
1334 {
1335   static int size = -1;
1336   int align, regno;
1337   machine_mode mode;
1338 
1339   /* The values computed by this function never change.  */
1340   if (size < 0)
1341     {
1342       size = 0;
1343 
1344       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1345 	if (targetm.calls.function_value_regno_p (regno))
1346 	  {
1347 	    mode = targetm.calls.get_raw_result_mode (regno);
1348 
1349 	    gcc_assert (mode != VOIDmode);
1350 
1351 	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1352 	    if (size % align != 0)
1353 	      size = CEIL (size, align) * align;
1354 	    size += GET_MODE_SIZE (mode);
1355 	    apply_result_mode[regno] = mode;
1356 	  }
1357 	else
1358 	  apply_result_mode[regno] = VOIDmode;
1359 
1360       /* Allow targets that use untyped_call and untyped_return to override
1361 	 the size so that machine-specific information can be stored here.  */
1362 #ifdef APPLY_RESULT_SIZE
1363       size = APPLY_RESULT_SIZE;
1364 #endif
1365     }
1366   return size;
1367 }
1368 
1369 /* Create a vector describing the result block RESULT.  If SAVEP is true,
1370    the result block is used to save the values; otherwise it is used to
1371    restore the values.  */
1372 
1373 static rtx
result_vector(int savep,rtx result)1374 result_vector (int savep, rtx result)
1375 {
1376   int regno, size, align, nelts;
1377   machine_mode mode;
1378   rtx reg, mem;
1379   rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
1380 
1381   size = nelts = 0;
1382   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1383     if ((mode = apply_result_mode[regno]) != VOIDmode)
1384       {
1385 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1386 	if (size % align != 0)
1387 	  size = CEIL (size, align) * align;
1388 	reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1389 	mem = adjust_address (result, mode, size);
1390 	savevec[nelts++] = (savep
1391 			    ? gen_rtx_SET (mem, reg)
1392 			    : gen_rtx_SET (reg, mem));
1393 	size += GET_MODE_SIZE (mode);
1394       }
1395   return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1396 }
1397 
1398 /* Save the state required to perform an untyped call with the same
1399    arguments as were passed to the current function.  */
1400 
1401 static rtx
expand_builtin_apply_args_1(void)1402 expand_builtin_apply_args_1 (void)
1403 {
1404   rtx registers, tem;
1405   int size, align, regno;
1406   machine_mode mode;
1407   rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
1408 
1409   /* Create a block where the arg-pointer, structure value address,
1410      and argument registers can be saved.  */
1411   registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1412 
1413   /* Walk past the arg-pointer and structure value address.  */
1414   size = GET_MODE_SIZE (Pmode);
1415   if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
1416     size += GET_MODE_SIZE (Pmode);
1417 
1418   /* Save each register used in calling a function to the block.  */
1419   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1420     if ((mode = apply_args_mode[regno]) != VOIDmode)
1421       {
1422 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1423 	if (size % align != 0)
1424 	  size = CEIL (size, align) * align;
1425 
1426 	tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1427 
1428 	emit_move_insn (adjust_address (registers, mode, size), tem);
1429 	size += GET_MODE_SIZE (mode);
1430       }
1431 
1432   /* Save the arg pointer to the block.  */
1433   tem = copy_to_reg (crtl->args.internal_arg_pointer);
1434   /* We need the pointer as the caller actually passed them to us, not
1435      as we might have pretended they were passed.  Make sure it's a valid
1436      operand, as emit_move_insn isn't expected to handle a PLUS.  */
1437   if (STACK_GROWS_DOWNWARD)
1438     tem
1439       = force_operand (plus_constant (Pmode, tem,
1440 				      crtl->args.pretend_args_size),
1441 		       NULL_RTX);
1442   emit_move_insn (adjust_address (registers, Pmode, 0), tem);
1443 
1444   size = GET_MODE_SIZE (Pmode);
1445 
1446   /* Save the structure value address unless this is passed as an
1447      "invisible" first argument.  */
1448   if (struct_incoming_value)
1449     {
1450       emit_move_insn (adjust_address (registers, Pmode, size),
1451 		      copy_to_reg (struct_incoming_value));
1452       size += GET_MODE_SIZE (Pmode);
1453     }
1454 
1455   /* Return the address of the block.  */
1456   return copy_addr_to_reg (XEXP (registers, 0));
1457 }
1458 
1459 /* __builtin_apply_args returns block of memory allocated on
1460    the stack into which is stored the arg pointer, structure
1461    value address, static chain, and all the registers that might
1462    possibly be used in performing a function call.  The code is
1463    moved to the start of the function so the incoming values are
1464    saved.  */
1465 
1466 static rtx
expand_builtin_apply_args(void)1467 expand_builtin_apply_args (void)
1468 {
1469   /* Don't do __builtin_apply_args more than once in a function.
1470      Save the result of the first call and reuse it.  */
1471   if (apply_args_value != 0)
1472     return apply_args_value;
1473   {
1474     /* When this function is called, it means that registers must be
1475        saved on entry to this function.  So we migrate the
1476        call to the first insn of this function.  */
1477     rtx temp;
1478 
1479     start_sequence ();
1480     temp = expand_builtin_apply_args_1 ();
1481     rtx_insn *seq = get_insns ();
1482     end_sequence ();
1483 
1484     apply_args_value = temp;
1485 
1486     /* Put the insns after the NOTE that starts the function.
1487        If this is inside a start_sequence, make the outer-level insn
1488        chain current, so the code is placed at the start of the
1489        function.  If internal_arg_pointer is a non-virtual pseudo,
1490        it needs to be placed after the function that initializes
1491        that pseudo.  */
1492     push_topmost_sequence ();
1493     if (REG_P (crtl->args.internal_arg_pointer)
1494 	&& REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
1495       emit_insn_before (seq, parm_birth_insn);
1496     else
1497       emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
1498     pop_topmost_sequence ();
1499     return temp;
1500   }
1501 }
1502 
1503 /* Perform an untyped call and save the state required to perform an
1504    untyped return of whatever value was returned by the given function.  */
1505 
1506 static rtx
expand_builtin_apply(rtx function,rtx arguments,rtx argsize)1507 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
1508 {
1509   int size, align, regno;
1510   machine_mode mode;
1511   rtx incoming_args, result, reg, dest, src;
1512   rtx_call_insn *call_insn;
1513   rtx old_stack_level = 0;
1514   rtx call_fusage = 0;
1515   rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
1516 
1517   arguments = convert_memory_address (Pmode, arguments);
1518 
1519   /* Create a block where the return registers can be saved.  */
1520   result = assign_stack_local (BLKmode, apply_result_size (), -1);
1521 
1522   /* Fetch the arg pointer from the ARGUMENTS block.  */
1523   incoming_args = gen_reg_rtx (Pmode);
1524   emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1525   if (!STACK_GROWS_DOWNWARD)
1526     incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1527 					 incoming_args, 0, OPTAB_LIB_WIDEN);
1528 
1529   /* Push a new argument block and copy the arguments.  Do not allow
1530      the (potential) memcpy call below to interfere with our stack
1531      manipulations.  */
1532   do_pending_stack_adjust ();
1533   NO_DEFER_POP;
1534 
1535   /* Save the stack with nonlocal if available.  */
1536   if (targetm.have_save_stack_nonlocal ())
1537     emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
1538   else
1539     emit_stack_save (SAVE_BLOCK, &old_stack_level);
1540 
1541   /* Allocate a block of memory onto the stack and copy the memory
1542      arguments to the outgoing arguments address.  We can pass TRUE
1543      as the 4th argument because we just saved the stack pointer
1544      and will restore it right after the call.  */
1545   allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, true);
1546 
1547   /* Set DRAP flag to true, even though allocate_dynamic_stack_space
1548      may have already set current_function_calls_alloca to true.
1549      current_function_calls_alloca won't be set if argsize is zero,
1550      so we have to guarantee need_drap is true here.  */
1551   if (SUPPORTS_STACK_ALIGNMENT)
1552     crtl->need_drap = true;
1553 
1554   dest = virtual_outgoing_args_rtx;
1555   if (!STACK_GROWS_DOWNWARD)
1556     {
1557       if (CONST_INT_P (argsize))
1558 	dest = plus_constant (Pmode, dest, -INTVAL (argsize));
1559       else
1560 	dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
1561     }
1562   dest = gen_rtx_MEM (BLKmode, dest);
1563   set_mem_align (dest, PARM_BOUNDARY);
1564   src = gen_rtx_MEM (BLKmode, incoming_args);
1565   set_mem_align (src, PARM_BOUNDARY);
1566   emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1567 
1568   /* Refer to the argument block.  */
1569   apply_args_size ();
1570   arguments = gen_rtx_MEM (BLKmode, arguments);
1571   set_mem_align (arguments, PARM_BOUNDARY);
1572 
1573   /* Walk past the arg-pointer and structure value address.  */
1574   size = GET_MODE_SIZE (Pmode);
1575   if (struct_value)
1576     size += GET_MODE_SIZE (Pmode);
1577 
1578   /* Restore each of the registers previously saved.  Make USE insns
1579      for each of these registers for use in making the call.  */
1580   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1581     if ((mode = apply_args_mode[regno]) != VOIDmode)
1582       {
1583 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1584 	if (size % align != 0)
1585 	  size = CEIL (size, align) * align;
1586 	reg = gen_rtx_REG (mode, regno);
1587 	emit_move_insn (reg, adjust_address (arguments, mode, size));
1588 	use_reg (&call_fusage, reg);
1589 	size += GET_MODE_SIZE (mode);
1590       }
1591 
1592   /* Restore the structure value address unless this is passed as an
1593      "invisible" first argument.  */
1594   size = GET_MODE_SIZE (Pmode);
1595   if (struct_value)
1596     {
1597       rtx value = gen_reg_rtx (Pmode);
1598       emit_move_insn (value, adjust_address (arguments, Pmode, size));
1599       emit_move_insn (struct_value, value);
1600       if (REG_P (struct_value))
1601 	use_reg (&call_fusage, struct_value);
1602       size += GET_MODE_SIZE (Pmode);
1603     }
1604 
1605   /* All arguments and registers used for the call are set up by now!  */
1606   function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
1607 
1608   /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
1609      and we don't want to load it into a register as an optimization,
1610      because prepare_call_address already did it if it should be done.  */
1611   if (GET_CODE (function) != SYMBOL_REF)
1612     function = memory_address (FUNCTION_MODE, function);
1613 
1614   /* Generate the actual call instruction and save the return value.  */
1615   if (targetm.have_untyped_call ())
1616     {
1617       rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
1618       emit_call_insn (targetm.gen_untyped_call (mem, result,
1619 						result_vector (1, result)));
1620     }
1621   else if (targetm.have_call_value ())
1622     {
1623       rtx valreg = 0;
1624 
1625       /* Locate the unique return register.  It is not possible to
1626 	 express a call that sets more than one return register using
1627 	 call_value; use untyped_call for that.  In fact, untyped_call
1628 	 only needs to save the return registers in the given block.  */
1629       for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1630 	if ((mode = apply_result_mode[regno]) != VOIDmode)
1631 	  {
1632 	    gcc_assert (!valreg); /* have_untyped_call required.  */
1633 
1634 	    valreg = gen_rtx_REG (mode, regno);
1635 	  }
1636 
1637       emit_insn (targetm.gen_call_value (valreg,
1638 					 gen_rtx_MEM (FUNCTION_MODE, function),
1639 					 const0_rtx, NULL_RTX, const0_rtx));
1640 
1641       emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1642     }
1643   else
1644     gcc_unreachable ();
1645 
1646   /* Find the CALL insn we just emitted, and attach the register usage
1647      information.  */
1648   call_insn = last_call_insn ();
1649   add_function_usage_to (call_insn, call_fusage);
1650 
1651   /* Restore the stack.  */
1652   if (targetm.have_save_stack_nonlocal ())
1653     emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
1654   else
1655     emit_stack_restore (SAVE_BLOCK, old_stack_level);
1656   fixup_args_size_notes (call_insn, get_last_insn (), 0);
1657 
1658   OK_DEFER_POP;
1659 
1660   /* Return the address of the result block.  */
1661   result = copy_addr_to_reg (XEXP (result, 0));
1662   return convert_memory_address (ptr_mode, result);
1663 }
1664 
1665 /* Perform an untyped return.  */
1666 
1667 static void
expand_builtin_return(rtx result)1668 expand_builtin_return (rtx result)
1669 {
1670   int size, align, regno;
1671   machine_mode mode;
1672   rtx reg;
1673   rtx_insn *call_fusage = 0;
1674 
1675   result = convert_memory_address (Pmode, result);
1676 
1677   apply_result_size ();
1678   result = gen_rtx_MEM (BLKmode, result);
1679 
1680   if (targetm.have_untyped_return ())
1681     {
1682       rtx vector = result_vector (0, result);
1683       emit_jump_insn (targetm.gen_untyped_return (result, vector));
1684       emit_barrier ();
1685       return;
1686     }
1687 
1688   /* Restore the return value and note that each value is used.  */
1689   size = 0;
1690   for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1691     if ((mode = apply_result_mode[regno]) != VOIDmode)
1692       {
1693 	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1694 	if (size % align != 0)
1695 	  size = CEIL (size, align) * align;
1696 	reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1697 	emit_move_insn (reg, adjust_address (result, mode, size));
1698 
1699 	push_to_sequence (call_fusage);
1700 	emit_use (reg);
1701 	call_fusage = get_insns ();
1702 	end_sequence ();
1703 	size += GET_MODE_SIZE (mode);
1704       }
1705 
1706   /* Put the USE insns before the return.  */
1707   emit_insn (call_fusage);
1708 
1709   /* Return whatever values was restored by jumping directly to the end
1710      of the function.  */
1711   expand_naked_return ();
1712 }
1713 
1714 /* Used by expand_builtin_classify_type and fold_builtin_classify_type.  */
1715 
1716 static enum type_class
type_to_class(tree type)1717 type_to_class (tree type)
1718 {
1719   switch (TREE_CODE (type))
1720     {
1721     case VOID_TYPE:	   return void_type_class;
1722     case INTEGER_TYPE:	   return integer_type_class;
1723     case ENUMERAL_TYPE:	   return enumeral_type_class;
1724     case BOOLEAN_TYPE:	   return boolean_type_class;
1725     case POINTER_TYPE:	   return pointer_type_class;
1726     case REFERENCE_TYPE:   return reference_type_class;
1727     case OFFSET_TYPE:	   return offset_type_class;
1728     case REAL_TYPE:	   return real_type_class;
1729     case COMPLEX_TYPE:	   return complex_type_class;
1730     case FUNCTION_TYPE:	   return function_type_class;
1731     case METHOD_TYPE:	   return method_type_class;
1732     case RECORD_TYPE:	   return record_type_class;
1733     case UNION_TYPE:
1734     case QUAL_UNION_TYPE:  return union_type_class;
1735     case ARRAY_TYPE:	   return (TYPE_STRING_FLAG (type)
1736 				   ? string_type_class : array_type_class);
1737     case LANG_TYPE:	   return lang_type_class;
1738     default:		   return no_type_class;
1739     }
1740 }
1741 
1742 /* Expand a call EXP to __builtin_classify_type.  */
1743 
1744 static rtx
expand_builtin_classify_type(tree exp)1745 expand_builtin_classify_type (tree exp)
1746 {
1747   if (call_expr_nargs (exp))
1748     return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
1749   return GEN_INT (no_type_class);
1750 }
1751 
1752 /* This helper macro, meant to be used in mathfn_built_in below,
1753    determines which among a set of three builtin math functions is
1754    appropriate for a given type mode.  The `F' and `L' cases are
1755    automatically generated from the `double' case.  */
1756 #define CASE_MATHFN(MATHFN) \
1757   CASE_CFN_##MATHFN: \
1758   fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
1759   fcodel = BUILT_IN_##MATHFN##L ; break;
1760 /* Similar to above, but appends _R after any F/L suffix.  */
1761 #define CASE_MATHFN_REENT(MATHFN) \
1762   case CFN_BUILT_IN_##MATHFN##_R: \
1763   case CFN_BUILT_IN_##MATHFN##F_R: \
1764   case CFN_BUILT_IN_##MATHFN##L_R: \
1765   fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
1766   fcodel = BUILT_IN_##MATHFN##L_R ; break;
1767 
1768 /* Return a function equivalent to FN but operating on floating-point
1769    values of type TYPE, or END_BUILTINS if no such function exists.
1770    This is purely an operation on function codes; it does not guarantee
1771    that the target actually has an implementation of the function.  */
1772 
1773 static built_in_function
mathfn_built_in_2(tree type,combined_fn fn)1774 mathfn_built_in_2 (tree type, combined_fn fn)
1775 {
1776   built_in_function fcode, fcodef, fcodel;
1777 
1778   switch (fn)
1779     {
1780     CASE_MATHFN (ACOS)
1781     CASE_MATHFN (ACOSH)
1782     CASE_MATHFN (ASIN)
1783     CASE_MATHFN (ASINH)
1784     CASE_MATHFN (ATAN)
1785     CASE_MATHFN (ATAN2)
1786     CASE_MATHFN (ATANH)
1787     CASE_MATHFN (CBRT)
1788     CASE_MATHFN (CEIL)
1789     CASE_MATHFN (CEXPI)
1790     CASE_MATHFN (COPYSIGN)
1791     CASE_MATHFN (COS)
1792     CASE_MATHFN (COSH)
1793     CASE_MATHFN (DREM)
1794     CASE_MATHFN (ERF)
1795     CASE_MATHFN (ERFC)
1796     CASE_MATHFN (EXP)
1797     CASE_MATHFN (EXP10)
1798     CASE_MATHFN (EXP2)
1799     CASE_MATHFN (EXPM1)
1800     CASE_MATHFN (FABS)
1801     CASE_MATHFN (FDIM)
1802     CASE_MATHFN (FLOOR)
1803     CASE_MATHFN (FMA)
1804     CASE_MATHFN (FMAX)
1805     CASE_MATHFN (FMIN)
1806     CASE_MATHFN (FMOD)
1807     CASE_MATHFN (FREXP)
1808     CASE_MATHFN (GAMMA)
1809     CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */
1810     CASE_MATHFN (HUGE_VAL)
1811     CASE_MATHFN (HYPOT)
1812     CASE_MATHFN (ILOGB)
1813     CASE_MATHFN (ICEIL)
1814     CASE_MATHFN (IFLOOR)
1815     CASE_MATHFN (INF)
1816     CASE_MATHFN (IRINT)
1817     CASE_MATHFN (IROUND)
1818     CASE_MATHFN (ISINF)
1819     CASE_MATHFN (J0)
1820     CASE_MATHFN (J1)
1821     CASE_MATHFN (JN)
1822     CASE_MATHFN (LCEIL)
1823     CASE_MATHFN (LDEXP)
1824     CASE_MATHFN (LFLOOR)
1825     CASE_MATHFN (LGAMMA)
1826     CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */
1827     CASE_MATHFN (LLCEIL)
1828     CASE_MATHFN (LLFLOOR)
1829     CASE_MATHFN (LLRINT)
1830     CASE_MATHFN (LLROUND)
1831     CASE_MATHFN (LOG)
1832     CASE_MATHFN (LOG10)
1833     CASE_MATHFN (LOG1P)
1834     CASE_MATHFN (LOG2)
1835     CASE_MATHFN (LOGB)
1836     CASE_MATHFN (LRINT)
1837     CASE_MATHFN (LROUND)
1838     CASE_MATHFN (MODF)
1839     CASE_MATHFN (NAN)
1840     CASE_MATHFN (NANS)
1841     CASE_MATHFN (NEARBYINT)
1842     CASE_MATHFN (NEXTAFTER)
1843     CASE_MATHFN (NEXTTOWARD)
1844     CASE_MATHFN (POW)
1845     CASE_MATHFN (POWI)
1846     CASE_MATHFN (POW10)
1847     CASE_MATHFN (REMAINDER)
1848     CASE_MATHFN (REMQUO)
1849     CASE_MATHFN (RINT)
1850     CASE_MATHFN (ROUND)
1851     CASE_MATHFN (SCALB)
1852     CASE_MATHFN (SCALBLN)
1853     CASE_MATHFN (SCALBN)
1854     CASE_MATHFN (SIGNBIT)
1855     CASE_MATHFN (SIGNIFICAND)
1856     CASE_MATHFN (SIN)
1857     CASE_MATHFN (SINCOS)
1858     CASE_MATHFN (SINH)
1859     CASE_MATHFN (SQRT)
1860     CASE_MATHFN (TAN)
1861     CASE_MATHFN (TANH)
1862     CASE_MATHFN (TGAMMA)
1863     CASE_MATHFN (TRUNC)
1864     CASE_MATHFN (Y0)
1865     CASE_MATHFN (Y1)
1866     CASE_MATHFN (YN)
1867 
1868     default:
1869       return END_BUILTINS;
1870     }
1871 
1872   if (TYPE_MAIN_VARIANT (type) == double_type_node)
1873     return fcode;
1874   else if (TYPE_MAIN_VARIANT (type) == float_type_node)
1875     return fcodef;
1876   else if (TYPE_MAIN_VARIANT (type) == long_double_type_node)
1877     return fcodel;
1878   else
1879     return END_BUILTINS;
1880 }
1881 
1882 /* Return mathematic function equivalent to FN but operating directly on TYPE,
1883    if available.  If IMPLICIT_P is true use the implicit builtin declaration,
1884    otherwise use the explicit declaration.  If we can't do the conversion,
1885    return null.  */
1886 
1887 static tree
mathfn_built_in_1(tree type,combined_fn fn,bool implicit_p)1888 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
1889 {
1890   built_in_function fcode2 = mathfn_built_in_2 (type, fn);
1891   if (fcode2 == END_BUILTINS)
1892     return NULL_TREE;
1893 
1894   if (implicit_p && !builtin_decl_implicit_p (fcode2))
1895     return NULL_TREE;
1896 
1897   return builtin_decl_explicit (fcode2);
1898 }
1899 
1900 /* Like mathfn_built_in_1, but always use the implicit array.  */
1901 
1902 tree
mathfn_built_in(tree type,combined_fn fn)1903 mathfn_built_in (tree type, combined_fn fn)
1904 {
1905   return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
1906 }
1907 
1908 /* Like mathfn_built_in_1, but take a built_in_function and
1909    always use the implicit array.  */
1910 
1911 tree
mathfn_built_in(tree type,enum built_in_function fn)1912 mathfn_built_in (tree type, enum built_in_function fn)
1913 {
1914   return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
1915 }
1916 
1917 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
1918    return its code, otherwise return IFN_LAST.  Note that this function
1919    only tests whether the function is defined in internals.def, not whether
1920    it is actually available on the target.  */
1921 
1922 internal_fn
associated_internal_fn(tree fndecl)1923 associated_internal_fn (tree fndecl)
1924 {
1925   gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
1926   tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
1927   switch (DECL_FUNCTION_CODE (fndecl))
1928     {
1929 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
1930     CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1931 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
1932     CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
1933 #include "internal-fn.def"
1934 
1935     CASE_FLT_FN (BUILT_IN_POW10):
1936       return IFN_EXP10;
1937 
1938     CASE_FLT_FN (BUILT_IN_DREM):
1939       return IFN_REMAINDER;
1940 
1941     CASE_FLT_FN (BUILT_IN_SCALBN):
1942     CASE_FLT_FN (BUILT_IN_SCALBLN):
1943       if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
1944 	return IFN_LDEXP;
1945       return IFN_LAST;
1946 
1947     default:
1948       return IFN_LAST;
1949     }
1950 }
1951 
1952 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
1953    on the current target by a call to an internal function, return the
1954    code of that internal function, otherwise return IFN_LAST.  The caller
1955    is responsible for ensuring that any side-effects of the built-in
1956    call are dealt with correctly.  E.g. if CALL sets errno, the caller
1957    must decide that the errno result isn't needed or make it available
1958    in some other way.  */
1959 
1960 internal_fn
replacement_internal_fn(gcall * call)1961 replacement_internal_fn (gcall *call)
1962 {
1963   if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1964     {
1965       internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
1966       if (ifn != IFN_LAST)
1967 	{
1968 	  tree_pair types = direct_internal_fn_types (ifn, call);
1969 	  optimization_type opt_type = bb_optimization_type (gimple_bb (call));
1970 	  if (direct_internal_fn_supported_p (ifn, types, opt_type))
1971 	    return ifn;
1972 	}
1973     }
1974   return IFN_LAST;
1975 }
1976 
1977 /* Expand a call to the builtin trinary math functions (fma).
1978    Return NULL_RTX if a normal call should be emitted rather than expanding the
1979    function in-line.  EXP is the expression that is a call to the builtin
1980    function; if convenient, the result should be placed in TARGET.
1981    SUBTARGET may be used as the target for computing one of EXP's
1982    operands.  */
1983 
1984 static rtx
expand_builtin_mathfn_ternary(tree exp,rtx target,rtx subtarget)1985 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
1986 {
1987   optab builtin_optab;
1988   rtx op0, op1, op2, result;
1989   rtx_insn *insns;
1990   tree fndecl = get_callee_fndecl (exp);
1991   tree arg0, arg1, arg2;
1992   machine_mode mode;
1993 
1994   if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
1995     return NULL_RTX;
1996 
1997   arg0 = CALL_EXPR_ARG (exp, 0);
1998   arg1 = CALL_EXPR_ARG (exp, 1);
1999   arg2 = CALL_EXPR_ARG (exp, 2);
2000 
2001   switch (DECL_FUNCTION_CODE (fndecl))
2002     {
2003     CASE_FLT_FN (BUILT_IN_FMA):
2004       builtin_optab = fma_optab; break;
2005     default:
2006       gcc_unreachable ();
2007     }
2008 
2009   /* Make a suitable register to place result in.  */
2010   mode = TYPE_MODE (TREE_TYPE (exp));
2011 
2012   /* Before working hard, check whether the instruction is available.  */
2013   if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2014     return NULL_RTX;
2015 
2016   result = gen_reg_rtx (mode);
2017 
2018   /* Always stabilize the argument list.  */
2019   CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
2020   CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
2021   CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
2022 
2023   op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
2024   op1 = expand_normal (arg1);
2025   op2 = expand_normal (arg2);
2026 
2027   start_sequence ();
2028 
2029   /* Compute into RESULT.
2030      Set RESULT to wherever the result comes back.  */
2031   result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
2032 			      result, 0);
2033 
2034   /* If we were unable to expand via the builtin, stop the sequence
2035      (without outputting the insns) and call to the library function
2036      with the stabilized argument list.  */
2037   if (result == 0)
2038     {
2039       end_sequence ();
2040       return expand_call (exp, target, target == const0_rtx);
2041     }
2042 
2043   /* Output the entire sequence.  */
2044   insns = get_insns ();
2045   end_sequence ();
2046   emit_insn (insns);
2047 
2048   return result;
2049 }
2050 
2051 /* Expand a call to the builtin sin and cos math functions.
2052    Return NULL_RTX if a normal call should be emitted rather than expanding the
2053    function in-line.  EXP is the expression that is a call to the builtin
2054    function; if convenient, the result should be placed in TARGET.
2055    SUBTARGET may be used as the target for computing one of EXP's
2056    operands.  */
2057 
2058 static rtx
expand_builtin_mathfn_3(tree exp,rtx target,rtx subtarget)2059 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
2060 {
2061   optab builtin_optab;
2062   rtx op0;
2063   rtx_insn *insns;
2064   tree fndecl = get_callee_fndecl (exp);
2065   machine_mode mode;
2066   tree arg;
2067 
2068   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2069     return NULL_RTX;
2070 
2071   arg = CALL_EXPR_ARG (exp, 0);
2072 
2073   switch (DECL_FUNCTION_CODE (fndecl))
2074     {
2075     CASE_FLT_FN (BUILT_IN_SIN):
2076     CASE_FLT_FN (BUILT_IN_COS):
2077       builtin_optab = sincos_optab; break;
2078     default:
2079       gcc_unreachable ();
2080     }
2081 
2082   /* Make a suitable register to place result in.  */
2083   mode = TYPE_MODE (TREE_TYPE (exp));
2084 
2085   /* Check if sincos insn is available, otherwise fallback
2086      to sin or cos insn.  */
2087   if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2088     switch (DECL_FUNCTION_CODE (fndecl))
2089       {
2090       CASE_FLT_FN (BUILT_IN_SIN):
2091 	builtin_optab = sin_optab; break;
2092       CASE_FLT_FN (BUILT_IN_COS):
2093 	builtin_optab = cos_optab; break;
2094       default:
2095 	gcc_unreachable ();
2096       }
2097 
2098   /* Before working hard, check whether the instruction is available.  */
2099   if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
2100     {
2101       rtx result = gen_reg_rtx (mode);
2102 
2103       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2104 	 need to expand the argument again.  This way, we will not perform
2105 	 side-effects more the once.  */
2106       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2107 
2108       op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
2109 
2110       start_sequence ();
2111 
2112       /* Compute into RESULT.
2113 	 Set RESULT to wherever the result comes back.  */
2114       if (builtin_optab == sincos_optab)
2115 	{
2116 	  int ok;
2117 
2118 	  switch (DECL_FUNCTION_CODE (fndecl))
2119 	    {
2120 	    CASE_FLT_FN (BUILT_IN_SIN):
2121 	      ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
2122 	      break;
2123 	    CASE_FLT_FN (BUILT_IN_COS):
2124 	      ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
2125 	      break;
2126 	    default:
2127 	      gcc_unreachable ();
2128 	    }
2129 	  gcc_assert (ok);
2130 	}
2131       else
2132 	result = expand_unop (mode, builtin_optab, op0, result, 0);
2133 
2134       if (result != 0)
2135 	{
2136 	  /* Output the entire sequence.  */
2137 	  insns = get_insns ();
2138 	  end_sequence ();
2139 	  emit_insn (insns);
2140 	  return result;
2141 	}
2142 
2143       /* If we were unable to expand via the builtin, stop the sequence
2144 	 (without outputting the insns) and call to the library function
2145 	 with the stabilized argument list.  */
2146       end_sequence ();
2147     }
2148 
2149   return expand_call (exp, target, target == const0_rtx);
2150 }
2151 
2152 /* Given an interclass math builtin decl FNDECL and it's argument ARG
2153    return an RTL instruction code that implements the functionality.
2154    If that isn't possible or available return CODE_FOR_nothing.  */
2155 
2156 static enum insn_code
interclass_mathfn_icode(tree arg,tree fndecl)2157 interclass_mathfn_icode (tree arg, tree fndecl)
2158 {
2159   bool errno_set = false;
2160   optab builtin_optab = unknown_optab;
2161   machine_mode mode;
2162 
2163   switch (DECL_FUNCTION_CODE (fndecl))
2164     {
2165     CASE_FLT_FN (BUILT_IN_ILOGB):
2166       errno_set = true; builtin_optab = ilogb_optab; break;
2167     CASE_FLT_FN (BUILT_IN_ISINF):
2168       builtin_optab = isinf_optab; break;
2169     case BUILT_IN_ISNORMAL:
2170     case BUILT_IN_ISFINITE:
2171     CASE_FLT_FN (BUILT_IN_FINITE):
2172     case BUILT_IN_FINITED32:
2173     case BUILT_IN_FINITED64:
2174     case BUILT_IN_FINITED128:
2175     case BUILT_IN_ISINFD32:
2176     case BUILT_IN_ISINFD64:
2177     case BUILT_IN_ISINFD128:
2178       /* These builtins have no optabs (yet).  */
2179       break;
2180     default:
2181       gcc_unreachable ();
2182     }
2183 
2184   /* There's no easy way to detect the case we need to set EDOM.  */
2185   if (flag_errno_math && errno_set)
2186     return CODE_FOR_nothing;
2187 
2188   /* Optab mode depends on the mode of the input argument.  */
2189   mode = TYPE_MODE (TREE_TYPE (arg));
2190 
2191   if (builtin_optab)
2192     return optab_handler (builtin_optab, mode);
2193   return CODE_FOR_nothing;
2194 }
2195 
2196 /* Expand a call to one of the builtin math functions that operate on
2197    floating point argument and output an integer result (ilogb, isinf,
2198    isnan, etc).
2199    Return 0 if a normal call should be emitted rather than expanding the
2200    function in-line.  EXP is the expression that is a call to the builtin
2201    function; if convenient, the result should be placed in TARGET.  */
2202 
2203 static rtx
expand_builtin_interclass_mathfn(tree exp,rtx target)2204 expand_builtin_interclass_mathfn (tree exp, rtx target)
2205 {
2206   enum insn_code icode = CODE_FOR_nothing;
2207   rtx op0;
2208   tree fndecl = get_callee_fndecl (exp);
2209   machine_mode mode;
2210   tree arg;
2211 
2212   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2213     return NULL_RTX;
2214 
2215   arg = CALL_EXPR_ARG (exp, 0);
2216   icode = interclass_mathfn_icode (arg, fndecl);
2217   mode = TYPE_MODE (TREE_TYPE (arg));
2218 
2219   if (icode != CODE_FOR_nothing)
2220     {
2221       struct expand_operand ops[1];
2222       rtx_insn *last = get_last_insn ();
2223       tree orig_arg = arg;
2224 
2225       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2226 	 need to expand the argument again.  This way, we will not perform
2227 	 side-effects more the once.  */
2228       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2229 
2230       op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2231 
2232       if (mode != GET_MODE (op0))
2233 	op0 = convert_to_mode (mode, op0, 0);
2234 
2235       create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
2236       if (maybe_legitimize_operands (icode, 0, 1, ops)
2237 	  && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
2238 	return ops[0].value;
2239 
2240       delete_insns_since (last);
2241       CALL_EXPR_ARG (exp, 0) = orig_arg;
2242     }
2243 
2244   return NULL_RTX;
2245 }
2246 
2247 /* Expand a call to the builtin sincos math function.
2248    Return NULL_RTX if a normal call should be emitted rather than expanding the
2249    function in-line.  EXP is the expression that is a call to the builtin
2250    function.  */
2251 
2252 static rtx
expand_builtin_sincos(tree exp)2253 expand_builtin_sincos (tree exp)
2254 {
2255   rtx op0, op1, op2, target1, target2;
2256   machine_mode mode;
2257   tree arg, sinp, cosp;
2258   int result;
2259   location_t loc = EXPR_LOCATION (exp);
2260   tree alias_type, alias_off;
2261 
2262   if (!validate_arglist (exp, REAL_TYPE,
2263  			 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2264     return NULL_RTX;
2265 
2266   arg = CALL_EXPR_ARG (exp, 0);
2267   sinp = CALL_EXPR_ARG (exp, 1);
2268   cosp = CALL_EXPR_ARG (exp, 2);
2269 
2270   /* Make a suitable register to place result in.  */
2271   mode = TYPE_MODE (TREE_TYPE (arg));
2272 
2273   /* Check if sincos insn is available, otherwise emit the call.  */
2274   if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
2275     return NULL_RTX;
2276 
2277   target1 = gen_reg_rtx (mode);
2278   target2 = gen_reg_rtx (mode);
2279 
2280   op0 = expand_normal (arg);
2281   alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
2282   alias_off = build_int_cst (alias_type, 0);
2283   op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2284 					sinp, alias_off));
2285   op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
2286 					cosp, alias_off));
2287 
2288   /* Compute into target1 and target2.
2289      Set TARGET to wherever the result comes back.  */
2290   result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
2291   gcc_assert (result);
2292 
2293   /* Move target1 and target2 to the memory locations indicated
2294      by op1 and op2.  */
2295   emit_move_insn (op1, target1);
2296   emit_move_insn (op2, target2);
2297 
2298   return const0_rtx;
2299 }
2300 
2301 /* Expand a call to the internal cexpi builtin to the sincos math function.
2302    EXP is the expression that is a call to the builtin function; if convenient,
2303    the result should be placed in TARGET.  */
2304 
2305 static rtx
expand_builtin_cexpi(tree exp,rtx target)2306 expand_builtin_cexpi (tree exp, rtx target)
2307 {
2308   tree fndecl = get_callee_fndecl (exp);
2309   tree arg, type;
2310   machine_mode mode;
2311   rtx op0, op1, op2;
2312   location_t loc = EXPR_LOCATION (exp);
2313 
2314   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2315     return NULL_RTX;
2316 
2317   arg = CALL_EXPR_ARG (exp, 0);
2318   type = TREE_TYPE (arg);
2319   mode = TYPE_MODE (TREE_TYPE (arg));
2320 
2321   /* Try expanding via a sincos optab, fall back to emitting a libcall
2322      to sincos or cexp.  We are sure we have sincos or cexp because cexpi
2323      is only generated from sincos, cexp or if we have either of them.  */
2324   if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
2325     {
2326       op1 = gen_reg_rtx (mode);
2327       op2 = gen_reg_rtx (mode);
2328 
2329       op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
2330 
2331       /* Compute into op1 and op2.  */
2332       expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
2333     }
2334   else if (targetm.libc_has_function (function_sincos))
2335     {
2336       tree call, fn = NULL_TREE;
2337       tree top1, top2;
2338       rtx op1a, op2a;
2339 
2340       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2341 	fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
2342       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2343 	fn = builtin_decl_explicit (BUILT_IN_SINCOS);
2344       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2345 	fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
2346       else
2347 	gcc_unreachable ();
2348 
2349       op1 = assign_temp (TREE_TYPE (arg), 1, 1);
2350       op2 = assign_temp (TREE_TYPE (arg), 1, 1);
2351       op1a = copy_addr_to_reg (XEXP (op1, 0));
2352       op2a = copy_addr_to_reg (XEXP (op2, 0));
2353       top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
2354       top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
2355 
2356       /* Make sure not to fold the sincos call again.  */
2357       call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2358       expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
2359 				      call, 3, arg, top1, top2));
2360     }
2361   else
2362     {
2363       tree call, fn = NULL_TREE, narg;
2364       tree ctype = build_complex_type (type);
2365 
2366       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2367 	fn = builtin_decl_explicit (BUILT_IN_CEXPF);
2368       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2369 	fn = builtin_decl_explicit (BUILT_IN_CEXP);
2370       else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2371 	fn = builtin_decl_explicit (BUILT_IN_CEXPL);
2372       else
2373 	gcc_unreachable ();
2374 
2375       /* If we don't have a decl for cexp create one.  This is the
2376 	 friendliest fallback if the user calls __builtin_cexpi
2377 	 without full target C99 function support.  */
2378       if (fn == NULL_TREE)
2379 	{
2380 	  tree fntype;
2381 	  const char *name = NULL;
2382 
2383 	  if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
2384 	    name = "cexpf";
2385 	  else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
2386 	    name = "cexp";
2387 	  else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
2388 	    name = "cexpl";
2389 
2390 	  fntype = build_function_type_list (ctype, ctype, NULL_TREE);
2391 	  fn = build_fn_decl (name, fntype);
2392 	}
2393 
2394       narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
2395 			  build_real (type, dconst0), arg);
2396 
2397       /* Make sure not to fold the cexp call again.  */
2398       call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2399       return expand_expr (build_call_nary (ctype, call, 1, narg),
2400 			  target, VOIDmode, EXPAND_NORMAL);
2401     }
2402 
2403   /* Now build the proper return type.  */
2404   return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
2405 			      make_tree (TREE_TYPE (arg), op2),
2406 			      make_tree (TREE_TYPE (arg), op1)),
2407 		      target, VOIDmode, EXPAND_NORMAL);
2408 }
2409 
2410 /* Conveniently construct a function call expression.  FNDECL names the
2411    function to be called, N is the number of arguments, and the "..."
2412    parameters are the argument expressions.  Unlike build_call_exr
2413    this doesn't fold the call, hence it will always return a CALL_EXPR.  */
2414 
2415 static tree
build_call_nofold_loc(location_t loc,tree fndecl,int n,...)2416 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
2417 {
2418   va_list ap;
2419   tree fntype = TREE_TYPE (fndecl);
2420   tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
2421 
2422   va_start (ap, n);
2423   fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
2424   va_end (ap);
2425   SET_EXPR_LOCATION (fn, loc);
2426   return fn;
2427 }
2428 
2429 /* Expand a call to one of the builtin rounding functions gcc defines
2430    as an extension (lfloor and lceil).  As these are gcc extensions we
2431    do not need to worry about setting errno to EDOM.
2432    If expanding via optab fails, lower expression to (int)(floor(x)).
2433    EXP is the expression that is a call to the builtin function;
2434    if convenient, the result should be placed in TARGET.  */
2435 
2436 static rtx
expand_builtin_int_roundingfn(tree exp,rtx target)2437 expand_builtin_int_roundingfn (tree exp, rtx target)
2438 {
2439   convert_optab builtin_optab;
2440   rtx op0, tmp;
2441   rtx_insn *insns;
2442   tree fndecl = get_callee_fndecl (exp);
2443   enum built_in_function fallback_fn;
2444   tree fallback_fndecl;
2445   machine_mode mode;
2446   tree arg;
2447 
2448   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2449     gcc_unreachable ();
2450 
2451   arg = CALL_EXPR_ARG (exp, 0);
2452 
2453   switch (DECL_FUNCTION_CODE (fndecl))
2454     {
2455     CASE_FLT_FN (BUILT_IN_ICEIL):
2456     CASE_FLT_FN (BUILT_IN_LCEIL):
2457     CASE_FLT_FN (BUILT_IN_LLCEIL):
2458       builtin_optab = lceil_optab;
2459       fallback_fn = BUILT_IN_CEIL;
2460       break;
2461 
2462     CASE_FLT_FN (BUILT_IN_IFLOOR):
2463     CASE_FLT_FN (BUILT_IN_LFLOOR):
2464     CASE_FLT_FN (BUILT_IN_LLFLOOR):
2465       builtin_optab = lfloor_optab;
2466       fallback_fn = BUILT_IN_FLOOR;
2467       break;
2468 
2469     default:
2470       gcc_unreachable ();
2471     }
2472 
2473   /* Make a suitable register to place result in.  */
2474   mode = TYPE_MODE (TREE_TYPE (exp));
2475 
2476   target = gen_reg_rtx (mode);
2477 
2478   /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2479      need to expand the argument again.  This way, we will not perform
2480      side-effects more the once.  */
2481   CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2482 
2483   op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2484 
2485   start_sequence ();
2486 
2487   /* Compute into TARGET.  */
2488   if (expand_sfix_optab (target, op0, builtin_optab))
2489     {
2490       /* Output the entire sequence.  */
2491       insns = get_insns ();
2492       end_sequence ();
2493       emit_insn (insns);
2494       return target;
2495     }
2496 
2497   /* If we were unable to expand via the builtin, stop the sequence
2498      (without outputting the insns).  */
2499   end_sequence ();
2500 
2501   /* Fall back to floating point rounding optab.  */
2502   fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
2503 
2504   /* For non-C99 targets we may end up without a fallback fndecl here
2505      if the user called __builtin_lfloor directly.  In this case emit
2506      a call to the floor/ceil variants nevertheless.  This should result
2507      in the best user experience for not full C99 targets.  */
2508   if (fallback_fndecl == NULL_TREE)
2509     {
2510       tree fntype;
2511       const char *name = NULL;
2512 
2513       switch (DECL_FUNCTION_CODE (fndecl))
2514 	{
2515 	case BUILT_IN_ICEIL:
2516 	case BUILT_IN_LCEIL:
2517 	case BUILT_IN_LLCEIL:
2518 	  name = "ceil";
2519 	  break;
2520 	case BUILT_IN_ICEILF:
2521 	case BUILT_IN_LCEILF:
2522 	case BUILT_IN_LLCEILF:
2523 	  name = "ceilf";
2524 	  break;
2525 	case BUILT_IN_ICEILL:
2526 	case BUILT_IN_LCEILL:
2527 	case BUILT_IN_LLCEILL:
2528 	  name = "ceill";
2529 	  break;
2530 	case BUILT_IN_IFLOOR:
2531 	case BUILT_IN_LFLOOR:
2532 	case BUILT_IN_LLFLOOR:
2533 	  name = "floor";
2534 	  break;
2535 	case BUILT_IN_IFLOORF:
2536 	case BUILT_IN_LFLOORF:
2537 	case BUILT_IN_LLFLOORF:
2538 	  name = "floorf";
2539 	  break;
2540 	case BUILT_IN_IFLOORL:
2541 	case BUILT_IN_LFLOORL:
2542 	case BUILT_IN_LLFLOORL:
2543 	  name = "floorl";
2544 	  break;
2545 	default:
2546 	  gcc_unreachable ();
2547 	}
2548 
2549       fntype = build_function_type_list (TREE_TYPE (arg),
2550 					 TREE_TYPE (arg), NULL_TREE);
2551       fallback_fndecl = build_fn_decl (name, fntype);
2552     }
2553 
2554   exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
2555 
2556   tmp = expand_normal (exp);
2557   tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
2558 
2559   /* Truncate the result of floating point optab to integer
2560      via expand_fix ().  */
2561   target = gen_reg_rtx (mode);
2562   expand_fix (target, tmp, 0);
2563 
2564   return target;
2565 }
2566 
2567 /* Expand a call to one of the builtin math functions doing integer
2568    conversion (lrint).
2569    Return 0 if a normal call should be emitted rather than expanding the
2570    function in-line.  EXP is the expression that is a call to the builtin
2571    function; if convenient, the result should be placed in TARGET.  */
2572 
2573 static rtx
expand_builtin_int_roundingfn_2(tree exp,rtx target)2574 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
2575 {
2576   convert_optab builtin_optab;
2577   rtx op0;
2578   rtx_insn *insns;
2579   tree fndecl = get_callee_fndecl (exp);
2580   tree arg;
2581   machine_mode mode;
2582   enum built_in_function fallback_fn = BUILT_IN_NONE;
2583 
2584   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
2585      gcc_unreachable ();
2586 
2587   arg = CALL_EXPR_ARG (exp, 0);
2588 
2589   switch (DECL_FUNCTION_CODE (fndecl))
2590     {
2591     CASE_FLT_FN (BUILT_IN_IRINT):
2592       fallback_fn = BUILT_IN_LRINT;
2593       /* FALLTHRU */
2594     CASE_FLT_FN (BUILT_IN_LRINT):
2595     CASE_FLT_FN (BUILT_IN_LLRINT):
2596       builtin_optab = lrint_optab;
2597       break;
2598 
2599     CASE_FLT_FN (BUILT_IN_IROUND):
2600       fallback_fn = BUILT_IN_LROUND;
2601       /* FALLTHRU */
2602     CASE_FLT_FN (BUILT_IN_LROUND):
2603     CASE_FLT_FN (BUILT_IN_LLROUND):
2604       builtin_optab = lround_optab;
2605       break;
2606 
2607     default:
2608       gcc_unreachable ();
2609     }
2610 
2611   /* There's no easy way to detect the case we need to set EDOM.  */
2612   if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
2613     return NULL_RTX;
2614 
2615   /* Make a suitable register to place result in.  */
2616   mode = TYPE_MODE (TREE_TYPE (exp));
2617 
2618   /* There's no easy way to detect the case we need to set EDOM.  */
2619   if (!flag_errno_math)
2620     {
2621       rtx result = gen_reg_rtx (mode);
2622 
2623       /* Wrap the computation of the argument in a SAVE_EXPR, as we may
2624 	 need to expand the argument again.  This way, we will not perform
2625 	 side-effects more the once.  */
2626       CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
2627 
2628       op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
2629 
2630       start_sequence ();
2631 
2632       if (expand_sfix_optab (result, op0, builtin_optab))
2633 	{
2634 	  /* Output the entire sequence.  */
2635 	  insns = get_insns ();
2636 	  end_sequence ();
2637 	  emit_insn (insns);
2638 	  return result;
2639 	}
2640 
2641       /* If we were unable to expand via the builtin, stop the sequence
2642 	 (without outputting the insns) and call to the library function
2643 	 with the stabilized argument list.  */
2644       end_sequence ();
2645     }
2646 
2647   if (fallback_fn != BUILT_IN_NONE)
2648     {
2649       /* Fall back to rounding to long int.  Use implicit_p 0 - for non-C99
2650 	 targets, (int) round (x) should never be transformed into
2651 	 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
2652 	 a call to lround in the hope that the target provides at least some
2653 	 C99 functions.  This should result in the best user experience for
2654 	 not full C99 targets.  */
2655       tree fallback_fndecl = mathfn_built_in_1
2656 	(TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
2657 
2658       exp = build_call_nofold_loc (EXPR_LOCATION (exp),
2659 				   fallback_fndecl, 1, arg);
2660 
2661       target = expand_call (exp, NULL_RTX, target == const0_rtx);
2662       target = maybe_emit_group_store (target, TREE_TYPE (exp));
2663       return convert_to_mode (mode, target, 0);
2664     }
2665 
2666   return expand_call (exp, target, target == const0_rtx);
2667 }
2668 
2669 /* Expand a call to the powi built-in mathematical function.  Return NULL_RTX if
2670    a normal call should be emitted rather than expanding the function
2671    in-line.  EXP is the expression that is a call to the builtin
2672    function; if convenient, the result should be placed in TARGET.  */
2673 
2674 static rtx
expand_builtin_powi(tree exp,rtx target)2675 expand_builtin_powi (tree exp, rtx target)
2676 {
2677   tree arg0, arg1;
2678   rtx op0, op1;
2679   machine_mode mode;
2680   machine_mode mode2;
2681 
2682   if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
2683     return NULL_RTX;
2684 
2685   arg0 = CALL_EXPR_ARG (exp, 0);
2686   arg1 = CALL_EXPR_ARG (exp, 1);
2687   mode = TYPE_MODE (TREE_TYPE (exp));
2688 
2689   /* Emit a libcall to libgcc.  */
2690 
2691   /* Mode of the 2nd argument must match that of an int.  */
2692   mode2 = mode_for_size (INT_TYPE_SIZE, MODE_INT, 0);
2693 
2694   if (target == NULL_RTX)
2695     target = gen_reg_rtx (mode);
2696 
2697   op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
2698   if (GET_MODE (op0) != mode)
2699     op0 = convert_to_mode (mode, op0, 0);
2700   op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
2701   if (GET_MODE (op1) != mode2)
2702     op1 = convert_to_mode (mode2, op1, 0);
2703 
2704   target = emit_library_call_value (optab_libfunc (powi_optab, mode),
2705 				    target, LCT_CONST, mode, 2,
2706 				    op0, mode, op1, mode2);
2707 
2708   return target;
2709 }
2710 
2711 /* Expand expression EXP which is a call to the strlen builtin.  Return
2712    NULL_RTX if we failed the caller should emit a normal call, otherwise
2713    try to get the result in TARGET, if convenient.  */
2714 
2715 static rtx
expand_builtin_strlen(tree exp,rtx target,machine_mode target_mode)2716 expand_builtin_strlen (tree exp, rtx target,
2717 		       machine_mode target_mode)
2718 {
2719   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
2720     return NULL_RTX;
2721   else
2722     {
2723       struct expand_operand ops[4];
2724       rtx pat;
2725       tree len;
2726       tree src = CALL_EXPR_ARG (exp, 0);
2727       rtx src_reg;
2728       rtx_insn *before_strlen;
2729       machine_mode insn_mode = target_mode;
2730       enum insn_code icode = CODE_FOR_nothing;
2731       unsigned int align;
2732 
2733       /* If the length can be computed at compile-time, return it.  */
2734       len = c_strlen (src, 0);
2735       if (len)
2736 	return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2737 
2738       /* If the length can be computed at compile-time and is constant
2739 	 integer, but there are side-effects in src, evaluate
2740 	 src for side-effects, then return len.
2741 	 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
2742 	 can be optimized into: i++; x = 3;  */
2743       len = c_strlen (src, 1);
2744       if (len && TREE_CODE (len) == INTEGER_CST)
2745 	{
2746 	  expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2747 	  return expand_expr (len, target, target_mode, EXPAND_NORMAL);
2748 	}
2749 
2750       align = get_pointer_alignment (src) / BITS_PER_UNIT;
2751 
2752       /* If SRC is not a pointer type, don't do this operation inline.  */
2753       if (align == 0)
2754 	return NULL_RTX;
2755 
2756       /* Bail out if we can't compute strlen in the right mode.  */
2757       while (insn_mode != VOIDmode)
2758 	{
2759 	  icode = optab_handler (strlen_optab, insn_mode);
2760 	  if (icode != CODE_FOR_nothing)
2761 	    break;
2762 
2763 	  insn_mode = GET_MODE_WIDER_MODE (insn_mode);
2764 	}
2765       if (insn_mode == VOIDmode)
2766 	return NULL_RTX;
2767 
2768       /* Make a place to hold the source address.  We will not expand
2769 	 the actual source until we are sure that the expansion will
2770 	 not fail -- there are trees that cannot be expanded twice.  */
2771       src_reg = gen_reg_rtx (Pmode);
2772 
2773       /* Mark the beginning of the strlen sequence so we can emit the
2774 	 source operand later.  */
2775       before_strlen = get_last_insn ();
2776 
2777       create_output_operand (&ops[0], target, insn_mode);
2778       create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
2779       create_integer_operand (&ops[2], 0);
2780       create_integer_operand (&ops[3], align);
2781       if (!maybe_expand_insn (icode, 4, ops))
2782 	return NULL_RTX;
2783 
2784       /* Now that we are assured of success, expand the source.  */
2785       start_sequence ();
2786       pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
2787       if (pat != src_reg)
2788 	{
2789 #ifdef POINTERS_EXTEND_UNSIGNED
2790 	  if (GET_MODE (pat) != Pmode)
2791 	    pat = convert_to_mode (Pmode, pat,
2792 				   POINTERS_EXTEND_UNSIGNED);
2793 #endif
2794 	  emit_move_insn (src_reg, pat);
2795 	}
2796       pat = get_insns ();
2797       end_sequence ();
2798 
2799       if (before_strlen)
2800 	emit_insn_after (pat, before_strlen);
2801       else
2802 	emit_insn_before (pat, get_insns ());
2803 
2804       /* Return the value in the proper mode for this function.  */
2805       if (GET_MODE (ops[0].value) == target_mode)
2806 	target = ops[0].value;
2807       else if (target != 0)
2808 	convert_move (target, ops[0].value, 0);
2809       else
2810 	target = convert_to_mode (target_mode, ops[0].value, 0);
2811 
2812       return target;
2813     }
2814 }
2815 
2816 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
2817    bytes from constant string DATA + OFFSET and return it as target
2818    constant.  */
2819 
2820 static rtx
builtin_memcpy_read_str(void * data,HOST_WIDE_INT offset,machine_mode mode)2821 builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
2822 			 machine_mode mode)
2823 {
2824   const char *str = (const char *) data;
2825 
2826   gcc_assert (offset >= 0
2827 	      && ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
2828 		  <= strlen (str) + 1));
2829 
2830   return c_readstr (str + offset, mode);
2831 }
2832 
2833 /* LEN specify length of the block of memcpy/memset operation.
2834    Figure out its range and put it into MIN_SIZE/MAX_SIZE.
2835    In some cases we can make very likely guess on max size, then we
2836    set it into PROBABLE_MAX_SIZE.  */
2837 
2838 static void
determine_block_size(tree len,rtx len_rtx,unsigned HOST_WIDE_INT * min_size,unsigned HOST_WIDE_INT * max_size,unsigned HOST_WIDE_INT * probable_max_size)2839 determine_block_size (tree len, rtx len_rtx,
2840 		      unsigned HOST_WIDE_INT *min_size,
2841 		      unsigned HOST_WIDE_INT *max_size,
2842 		      unsigned HOST_WIDE_INT *probable_max_size)
2843 {
2844   if (CONST_INT_P (len_rtx))
2845     {
2846       *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
2847       return;
2848     }
2849   else
2850     {
2851       wide_int min, max;
2852       enum value_range_type range_type = VR_UNDEFINED;
2853 
2854       /* Determine bounds from the type.  */
2855       if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
2856 	*min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
2857       else
2858 	*min_size = 0;
2859       if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
2860 	*probable_max_size = *max_size
2861 	  = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
2862       else
2863 	*probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
2864 
2865       if (TREE_CODE (len) == SSA_NAME)
2866 	range_type = get_range_info (len, &min, &max);
2867       if (range_type == VR_RANGE)
2868 	{
2869 	  if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
2870 	    *min_size = min.to_uhwi ();
2871 	  if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
2872 	    *probable_max_size = *max_size = max.to_uhwi ();
2873 	}
2874       else if (range_type == VR_ANTI_RANGE)
2875 	{
2876 	  /* Anti range 0...N lets us to determine minimal size to N+1.  */
2877 	  if (min == 0)
2878 	    {
2879 	      if (wi::fits_uhwi_p (max) && max.to_uhwi () + 1 != 0)
2880 		*min_size = max.to_uhwi () + 1;
2881 	    }
2882 	  /* Code like
2883 
2884 	     int n;
2885 	     if (n < 100)
2886 	       memcpy (a, b, n)
2887 
2888 	     Produce anti range allowing negative values of N.  We still
2889 	     can use the information and make a guess that N is not negative.
2890 	     */
2891 	  else if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
2892 	    *probable_max_size = min.to_uhwi () - 1;
2893 	}
2894     }
2895   gcc_checking_assert (*max_size <=
2896 		       (unsigned HOST_WIDE_INT)
2897 			  GET_MODE_MASK (GET_MODE (len_rtx)));
2898 }
2899 
2900 /* Helper function to do the actual work for expand_builtin_memcpy.  */
2901 
2902 static rtx
expand_builtin_memcpy_args(tree dest,tree src,tree len,rtx target,tree exp)2903 expand_builtin_memcpy_args (tree dest, tree src, tree len, rtx target, tree exp)
2904 {
2905   const char *src_str;
2906   unsigned int src_align = get_pointer_alignment (src);
2907   unsigned int dest_align = get_pointer_alignment (dest);
2908   rtx dest_mem, src_mem, dest_addr, len_rtx;
2909   HOST_WIDE_INT expected_size = -1;
2910   unsigned int expected_align = 0;
2911   unsigned HOST_WIDE_INT min_size;
2912   unsigned HOST_WIDE_INT max_size;
2913   unsigned HOST_WIDE_INT probable_max_size;
2914 
2915   /* If DEST is not a pointer type, call the normal function.  */
2916   if (dest_align == 0)
2917     return NULL_RTX;
2918 
2919   /* If either SRC is not a pointer type, don't do this
2920      operation in-line.  */
2921   if (src_align == 0)
2922     return NULL_RTX;
2923 
2924   if (currently_expanding_gimple_stmt)
2925     stringop_block_profile (currently_expanding_gimple_stmt,
2926 			    &expected_align, &expected_size);
2927 
2928   if (expected_align < dest_align)
2929     expected_align = dest_align;
2930   dest_mem = get_memory_rtx (dest, len);
2931   set_mem_align (dest_mem, dest_align);
2932   len_rtx = expand_normal (len);
2933   determine_block_size (len, len_rtx, &min_size, &max_size,
2934 			&probable_max_size);
2935   src_str = c_getstr (src);
2936 
2937   /* If SRC is a string constant and block move would be done
2938      by pieces, we can avoid loading the string from memory
2939      and only stored the computed constants.  */
2940   if (src_str
2941       && CONST_INT_P (len_rtx)
2942       && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
2943       && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
2944 			      CONST_CAST (char *, src_str),
2945 			      dest_align, false))
2946     {
2947       dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
2948 				  builtin_memcpy_read_str,
2949 				  CONST_CAST (char *, src_str),
2950 				  dest_align, false, 0);
2951       dest_mem = force_operand (XEXP (dest_mem, 0), target);
2952       dest_mem = convert_memory_address (ptr_mode, dest_mem);
2953       return dest_mem;
2954     }
2955 
2956   src_mem = get_memory_rtx (src, len);
2957   set_mem_align (src_mem, src_align);
2958 
2959   /* Copy word part most expediently.  */
2960   dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx,
2961 				     CALL_EXPR_TAILCALL (exp)
2962 				     ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
2963 				     expected_align, expected_size,
2964 				     min_size, max_size, probable_max_size);
2965 
2966   if (dest_addr == 0)
2967     {
2968       dest_addr = force_operand (XEXP (dest_mem, 0), target);
2969       dest_addr = convert_memory_address (ptr_mode, dest_addr);
2970     }
2971 
2972   return dest_addr;
2973 }
2974 
2975 /* Expand a call EXP to the memcpy builtin.
2976    Return NULL_RTX if we failed, the caller should emit a normal call,
2977    otherwise try to get the result in TARGET, if convenient (and in
2978    mode MODE if that's convenient).  */
2979 
2980 static rtx
expand_builtin_memcpy(tree exp,rtx target)2981 expand_builtin_memcpy (tree exp, rtx target)
2982 {
2983   if (!validate_arglist (exp,
2984  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2985     return NULL_RTX;
2986   else
2987     {
2988       tree dest = CALL_EXPR_ARG (exp, 0);
2989       tree src = CALL_EXPR_ARG (exp, 1);
2990       tree len = CALL_EXPR_ARG (exp, 2);
2991       return expand_builtin_memcpy_args (dest, src, len, target, exp);
2992     }
2993 }
2994 
2995 /* Expand an instrumented call EXP to the memcpy builtin.
2996    Return NULL_RTX if we failed, the caller should emit a normal call,
2997    otherwise try to get the result in TARGET, if convenient (and in
2998    mode MODE if that's convenient).  */
2999 
3000 static rtx
expand_builtin_memcpy_with_bounds(tree exp,rtx target)3001 expand_builtin_memcpy_with_bounds (tree exp, rtx target)
3002 {
3003   if (!validate_arglist (exp,
3004 			 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3005 			 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3006 			 INTEGER_TYPE, VOID_TYPE))
3007     return NULL_RTX;
3008   else
3009     {
3010       tree dest = CALL_EXPR_ARG (exp, 0);
3011       tree src = CALL_EXPR_ARG (exp, 2);
3012       tree len = CALL_EXPR_ARG (exp, 4);
3013       rtx res = expand_builtin_memcpy_args (dest, src, len, target, exp);
3014 
3015       /* Return src bounds with the result.  */
3016       if (res)
3017 	{
3018 	  rtx bnd = force_reg (targetm.chkp_bound_mode (),
3019 			       expand_normal (CALL_EXPR_ARG (exp, 1)));
3020 	  res = chkp_join_splitted_slot (res, bnd);
3021 	}
3022       return res;
3023     }
3024 }
3025 
3026 /* Expand a call EXP to the mempcpy builtin.
3027    Return NULL_RTX if we failed; the caller should emit a normal call,
3028    otherwise try to get the result in TARGET, if convenient (and in
3029    mode MODE if that's convenient).  If ENDP is 0 return the
3030    destination pointer, if ENDP is 1 return the end pointer ala
3031    mempcpy, and if ENDP is 2 return the end pointer minus one ala
3032    stpcpy.  */
3033 
3034 static rtx
expand_builtin_mempcpy(tree exp,rtx target,machine_mode mode)3035 expand_builtin_mempcpy (tree exp, rtx target, machine_mode mode)
3036 {
3037   if (!validate_arglist (exp,
3038  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3039     return NULL_RTX;
3040   else
3041     {
3042       tree dest = CALL_EXPR_ARG (exp, 0);
3043       tree src = CALL_EXPR_ARG (exp, 1);
3044       tree len = CALL_EXPR_ARG (exp, 2);
3045       return expand_builtin_mempcpy_args (dest, src, len,
3046 					  target, mode, /*endp=*/ 1,
3047 					  exp);
3048     }
3049 }
3050 
3051 /* Expand an instrumented call EXP to the mempcpy builtin.
3052    Return NULL_RTX if we failed, the caller should emit a normal call,
3053    otherwise try to get the result in TARGET, if convenient (and in
3054    mode MODE if that's convenient).  */
3055 
3056 static rtx
expand_builtin_mempcpy_with_bounds(tree exp,rtx target,machine_mode mode)3057 expand_builtin_mempcpy_with_bounds (tree exp, rtx target, machine_mode mode)
3058 {
3059   if (!validate_arglist (exp,
3060 			 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3061 			 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3062 			 INTEGER_TYPE, VOID_TYPE))
3063     return NULL_RTX;
3064   else
3065     {
3066       tree dest = CALL_EXPR_ARG (exp, 0);
3067       tree src = CALL_EXPR_ARG (exp, 2);
3068       tree len = CALL_EXPR_ARG (exp, 4);
3069       rtx res = expand_builtin_mempcpy_args (dest, src, len, target,
3070 					     mode, 1, exp);
3071 
3072       /* Return src bounds with the result.  */
3073       if (res)
3074 	{
3075 	  rtx bnd = force_reg (targetm.chkp_bound_mode (),
3076 			       expand_normal (CALL_EXPR_ARG (exp, 1)));
3077 	  res = chkp_join_splitted_slot (res, bnd);
3078 	}
3079       return res;
3080     }
3081 }
3082 
3083 /* Helper function to do the actual work for expand_builtin_mempcpy.  The
3084    arguments to the builtin_mempcpy call DEST, SRC, and LEN are broken out
3085    so that this can also be called without constructing an actual CALL_EXPR.
3086    The other arguments and return value are the same as for
3087    expand_builtin_mempcpy.  */
3088 
3089 static rtx
expand_builtin_mempcpy_args(tree dest,tree src,tree len,rtx target,machine_mode mode,int endp,tree orig_exp)3090 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
3091 			     rtx target, machine_mode mode, int endp,
3092 			     tree orig_exp)
3093 {
3094   tree fndecl = get_callee_fndecl (orig_exp);
3095 
3096     /* If return value is ignored, transform mempcpy into memcpy.  */
3097   if (target == const0_rtx
3098       && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP
3099       && builtin_decl_implicit_p (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP))
3100     {
3101       tree fn = builtin_decl_implicit (BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP);
3102       tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3103 					   dest, src, len);
3104       return expand_expr (result, target, mode, EXPAND_NORMAL);
3105     }
3106   else if (target == const0_rtx
3107 	   && builtin_decl_implicit_p (BUILT_IN_MEMCPY))
3108     {
3109       tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3110       tree result = build_call_nofold_loc (UNKNOWN_LOCATION, fn, 3,
3111 					   dest, src, len);
3112       return expand_expr (result, target, mode, EXPAND_NORMAL);
3113     }
3114   else
3115     {
3116       const char *src_str;
3117       unsigned int src_align = get_pointer_alignment (src);
3118       unsigned int dest_align = get_pointer_alignment (dest);
3119       rtx dest_mem, src_mem, len_rtx;
3120 
3121       /* If either SRC or DEST is not a pointer type, don't do this
3122 	 operation in-line.  */
3123       if (dest_align == 0 || src_align == 0)
3124 	return NULL_RTX;
3125 
3126       /* If LEN is not constant, call the normal function.  */
3127       if (! tree_fits_uhwi_p (len))
3128 	return NULL_RTX;
3129 
3130       len_rtx = expand_normal (len);
3131       src_str = c_getstr (src);
3132 
3133       /* If SRC is a string constant and block move would be done
3134 	 by pieces, we can avoid loading the string from memory
3135 	 and only stored the computed constants.  */
3136       if (src_str
3137 	  && CONST_INT_P (len_rtx)
3138 	  && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
3139 	  && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
3140 				  CONST_CAST (char *, src_str),
3141 				  dest_align, false))
3142 	{
3143 	  dest_mem = get_memory_rtx (dest, len);
3144 	  set_mem_align (dest_mem, dest_align);
3145 	  dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
3146 				      builtin_memcpy_read_str,
3147 				      CONST_CAST (char *, src_str),
3148 				      dest_align, false, endp);
3149 	  dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3150 	  dest_mem = convert_memory_address (ptr_mode, dest_mem);
3151 	  return dest_mem;
3152 	}
3153 
3154       if (CONST_INT_P (len_rtx)
3155 	  && can_move_by_pieces (INTVAL (len_rtx),
3156 				 MIN (dest_align, src_align)))
3157 	{
3158 	  dest_mem = get_memory_rtx (dest, len);
3159 	  set_mem_align (dest_mem, dest_align);
3160 	  src_mem = get_memory_rtx (src, len);
3161 	  set_mem_align (src_mem, src_align);
3162 	  dest_mem = move_by_pieces (dest_mem, src_mem, INTVAL (len_rtx),
3163 				     MIN (dest_align, src_align), endp);
3164 	  dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3165 	  dest_mem = convert_memory_address (ptr_mode, dest_mem);
3166 	  return dest_mem;
3167 	}
3168 
3169       return NULL_RTX;
3170     }
3171 }
3172 
3173 /* Expand into a movstr instruction, if one is available.  Return NULL_RTX if
3174    we failed, the caller should emit a normal call, otherwise try to
3175    get the result in TARGET, if convenient.  If ENDP is 0 return the
3176    destination pointer, if ENDP is 1 return the end pointer ala
3177    mempcpy, and if ENDP is 2 return the end pointer minus one ala
3178    stpcpy.  */
3179 
3180 static rtx
expand_movstr(tree dest,tree src,rtx target,int endp)3181 expand_movstr (tree dest, tree src, rtx target, int endp)
3182 {
3183   struct expand_operand ops[3];
3184   rtx dest_mem;
3185   rtx src_mem;
3186 
3187   if (!targetm.have_movstr ())
3188     return NULL_RTX;
3189 
3190   dest_mem = get_memory_rtx (dest, NULL);
3191   src_mem = get_memory_rtx (src, NULL);
3192   if (!endp)
3193     {
3194       target = force_reg (Pmode, XEXP (dest_mem, 0));
3195       dest_mem = replace_equiv_address (dest_mem, target);
3196     }
3197 
3198   create_output_operand (&ops[0], endp ? target : NULL_RTX, Pmode);
3199   create_fixed_operand (&ops[1], dest_mem);
3200   create_fixed_operand (&ops[2], src_mem);
3201   if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
3202     return NULL_RTX;
3203 
3204   if (endp && target != const0_rtx)
3205     {
3206       target = ops[0].value;
3207       /* movstr is supposed to set end to the address of the NUL
3208 	 terminator.  If the caller requested a mempcpy-like return value,
3209 	 adjust it.  */
3210       if (endp == 1)
3211 	{
3212 	  rtx tem = plus_constant (GET_MODE (target),
3213 				   gen_lowpart (GET_MODE (target), target), 1);
3214 	  emit_move_insn (target, force_operand (tem, NULL_RTX));
3215 	}
3216     }
3217   return target;
3218 }
3219 
3220 /* Expand expression EXP, which is a call to the strcpy builtin.  Return
3221    NULL_RTX if we failed the caller should emit a normal call, otherwise
3222    try to get the result in TARGET, if convenient (and in mode MODE if that's
3223    convenient).  */
3224 
3225 static rtx
expand_builtin_strcpy(tree exp,rtx target)3226 expand_builtin_strcpy (tree exp, rtx target)
3227 {
3228   if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3229    {
3230      tree dest = CALL_EXPR_ARG (exp, 0);
3231      tree src = CALL_EXPR_ARG (exp, 1);
3232      return expand_builtin_strcpy_args (dest, src, target);
3233    }
3234    return NULL_RTX;
3235 }
3236 
3237 /* Helper function to do the actual work for expand_builtin_strcpy.  The
3238    arguments to the builtin_strcpy call DEST and SRC are broken out
3239    so that this can also be called without constructing an actual CALL_EXPR.
3240    The other arguments and return value are the same as for
3241    expand_builtin_strcpy.  */
3242 
3243 static rtx
expand_builtin_strcpy_args(tree dest,tree src,rtx target)3244 expand_builtin_strcpy_args (tree dest, tree src, rtx target)
3245 {
3246   return expand_movstr (dest, src, target, /*endp=*/0);
3247 }
3248 
3249 /* Expand a call EXP to the stpcpy builtin.
3250    Return NULL_RTX if we failed the caller should emit a normal call,
3251    otherwise try to get the result in TARGET, if convenient (and in
3252    mode MODE if that's convenient).  */
3253 
3254 static rtx
expand_builtin_stpcpy(tree exp,rtx target,machine_mode mode)3255 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
3256 {
3257   tree dst, src;
3258   location_t loc = EXPR_LOCATION (exp);
3259 
3260   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3261     return NULL_RTX;
3262 
3263   dst = CALL_EXPR_ARG (exp, 0);
3264   src = CALL_EXPR_ARG (exp, 1);
3265 
3266   /* If return value is ignored, transform stpcpy into strcpy.  */
3267   if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
3268     {
3269       tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3270       tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
3271       return expand_expr (result, target, mode, EXPAND_NORMAL);
3272     }
3273   else
3274     {
3275       tree len, lenp1;
3276       rtx ret;
3277 
3278       /* Ensure we get an actual string whose length can be evaluated at
3279 	 compile-time, not an expression containing a string.  This is
3280 	 because the latter will potentially produce pessimized code
3281 	 when used to produce the return value.  */
3282       if (! c_getstr (src) || ! (len = c_strlen (src, 0)))
3283 	return expand_movstr (dst, src, target, /*endp=*/2);
3284 
3285       lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
3286       ret = expand_builtin_mempcpy_args (dst, src, lenp1,
3287 					 target, mode, /*endp=*/2,
3288 					 exp);
3289 
3290       if (ret)
3291 	return ret;
3292 
3293       if (TREE_CODE (len) == INTEGER_CST)
3294 	{
3295 	  rtx len_rtx = expand_normal (len);
3296 
3297 	  if (CONST_INT_P (len_rtx))
3298 	    {
3299 	      ret = expand_builtin_strcpy_args (dst, src, target);
3300 
3301 	      if (ret)
3302 		{
3303 		  if (! target)
3304 		    {
3305 		      if (mode != VOIDmode)
3306 			target = gen_reg_rtx (mode);
3307 		      else
3308 			target = gen_reg_rtx (GET_MODE (ret));
3309 		    }
3310 		  if (GET_MODE (target) != GET_MODE (ret))
3311 		    ret = gen_lowpart (GET_MODE (target), ret);
3312 
3313 		  ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
3314 		  ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
3315 		  gcc_assert (ret);
3316 
3317 		  return target;
3318 		}
3319 	    }
3320 	}
3321 
3322       return expand_movstr (dst, src, target, /*endp=*/2);
3323     }
3324 }
3325 
3326 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
3327    bytes from constant string DATA + OFFSET and return it as target
3328    constant.  */
3329 
3330 rtx
builtin_strncpy_read_str(void * data,HOST_WIDE_INT offset,machine_mode mode)3331 builtin_strncpy_read_str (void *data, HOST_WIDE_INT offset,
3332 			  machine_mode mode)
3333 {
3334   const char *str = (const char *) data;
3335 
3336   if ((unsigned HOST_WIDE_INT) offset > strlen (str))
3337     return const0_rtx;
3338 
3339   return c_readstr (str + offset, mode);
3340 }
3341 
3342 /* Expand expression EXP, which is a call to the strncpy builtin.  Return
3343    NULL_RTX if we failed the caller should emit a normal call.  */
3344 
3345 static rtx
expand_builtin_strncpy(tree exp,rtx target)3346 expand_builtin_strncpy (tree exp, rtx target)
3347 {
3348   location_t loc = EXPR_LOCATION (exp);
3349 
3350   if (validate_arglist (exp,
3351  			POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3352     {
3353       tree dest = CALL_EXPR_ARG (exp, 0);
3354       tree src = CALL_EXPR_ARG (exp, 1);
3355       tree len = CALL_EXPR_ARG (exp, 2);
3356       tree slen = c_strlen (src, 1);
3357 
3358       /* We must be passed a constant len and src parameter.  */
3359       if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
3360 	return NULL_RTX;
3361 
3362       slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
3363 
3364       /* We're required to pad with trailing zeros if the requested
3365 	 len is greater than strlen(s2)+1.  In that case try to
3366 	 use store_by_pieces, if it fails, punt.  */
3367       if (tree_int_cst_lt (slen, len))
3368 	{
3369 	  unsigned int dest_align = get_pointer_alignment (dest);
3370 	  const char *p = c_getstr (src);
3371 	  rtx dest_mem;
3372 
3373 	  if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
3374 	      || !can_store_by_pieces (tree_to_uhwi (len),
3375 				       builtin_strncpy_read_str,
3376 				       CONST_CAST (char *, p),
3377 				       dest_align, false))
3378 	    return NULL_RTX;
3379 
3380 	  dest_mem = get_memory_rtx (dest, len);
3381 	  store_by_pieces (dest_mem, tree_to_uhwi (len),
3382 			   builtin_strncpy_read_str,
3383 			   CONST_CAST (char *, p), dest_align, false, 0);
3384 	  dest_mem = force_operand (XEXP (dest_mem, 0), target);
3385 	  dest_mem = convert_memory_address (ptr_mode, dest_mem);
3386 	  return dest_mem;
3387 	}
3388     }
3389   return NULL_RTX;
3390 }
3391 
3392 /* Callback routine for store_by_pieces.  Read GET_MODE_BITSIZE (MODE)
3393    bytes from constant string DATA + OFFSET and return it as target
3394    constant.  */
3395 
3396 rtx
builtin_memset_read_str(void * data,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,machine_mode mode)3397 builtin_memset_read_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3398 			 machine_mode mode)
3399 {
3400   const char *c = (const char *) data;
3401   char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
3402 
3403   memset (p, *c, GET_MODE_SIZE (mode));
3404 
3405   return c_readstr (p, mode);
3406 }
3407 
3408 /* Callback routine for store_by_pieces.  Return the RTL of a register
3409    containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
3410    char value given in the RTL register data.  For example, if mode is
3411    4 bytes wide, return the RTL for 0x01010101*data.  */
3412 
3413 static rtx
builtin_memset_gen_str(void * data,HOST_WIDE_INT offset ATTRIBUTE_UNUSED,machine_mode mode)3414 builtin_memset_gen_str (void *data, HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
3415 			machine_mode mode)
3416 {
3417   rtx target, coeff;
3418   size_t size;
3419   char *p;
3420 
3421   size = GET_MODE_SIZE (mode);
3422   if (size == 1)
3423     return (rtx) data;
3424 
3425   p = XALLOCAVEC (char, size);
3426   memset (p, 1, size);
3427   coeff = c_readstr (p, mode);
3428 
3429   target = convert_to_mode (mode, (rtx) data, 1);
3430   target = expand_mult (mode, target, coeff, NULL_RTX, 1);
3431   return force_reg (mode, target);
3432 }
3433 
3434 /* Expand expression EXP, which is a call to the memset builtin.  Return
3435    NULL_RTX if we failed the caller should emit a normal call, otherwise
3436    try to get the result in TARGET, if convenient (and in mode MODE if that's
3437    convenient).  */
3438 
3439 static rtx
expand_builtin_memset(tree exp,rtx target,machine_mode mode)3440 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
3441 {
3442   if (!validate_arglist (exp,
3443  			 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3444     return NULL_RTX;
3445   else
3446     {
3447       tree dest = CALL_EXPR_ARG (exp, 0);
3448       tree val = CALL_EXPR_ARG (exp, 1);
3449       tree len = CALL_EXPR_ARG (exp, 2);
3450       return expand_builtin_memset_args (dest, val, len, target, mode, exp);
3451     }
3452 }
3453 
3454 /* Expand expression EXP, which is an instrumented call to the memset builtin.
3455    Return NULL_RTX if we failed the caller should emit a normal call, otherwise
3456    try to get the result in TARGET, if convenient (and in mode MODE if that's
3457    convenient).  */
3458 
3459 static rtx
expand_builtin_memset_with_bounds(tree exp,rtx target,machine_mode mode)3460 expand_builtin_memset_with_bounds (tree exp, rtx target, machine_mode mode)
3461 {
3462   if (!validate_arglist (exp,
3463 			 POINTER_TYPE, POINTER_BOUNDS_TYPE,
3464 			 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
3465     return NULL_RTX;
3466   else
3467     {
3468       tree dest = CALL_EXPR_ARG (exp, 0);
3469       tree val = CALL_EXPR_ARG (exp, 2);
3470       tree len = CALL_EXPR_ARG (exp, 3);
3471       rtx res = expand_builtin_memset_args (dest, val, len, target, mode, exp);
3472 
3473       /* Return src bounds with the result.  */
3474       if (res)
3475 	{
3476 	  rtx bnd = force_reg (targetm.chkp_bound_mode (),
3477 			       expand_normal (CALL_EXPR_ARG (exp, 1)));
3478 	  res = chkp_join_splitted_slot (res, bnd);
3479 	}
3480       return res;
3481     }
3482 }
3483 
3484 /* Helper function to do the actual work for expand_builtin_memset.  The
3485    arguments to the builtin_memset call DEST, VAL, and LEN are broken out
3486    so that this can also be called without constructing an actual CALL_EXPR.
3487    The other arguments and return value are the same as for
3488    expand_builtin_memset.  */
3489 
3490 static rtx
expand_builtin_memset_args(tree dest,tree val,tree len,rtx target,machine_mode mode,tree orig_exp)3491 expand_builtin_memset_args (tree dest, tree val, tree len,
3492 			    rtx target, machine_mode mode, tree orig_exp)
3493 {
3494   tree fndecl, fn;
3495   enum built_in_function fcode;
3496   machine_mode val_mode;
3497   char c;
3498   unsigned int dest_align;
3499   rtx dest_mem, dest_addr, len_rtx;
3500   HOST_WIDE_INT expected_size = -1;
3501   unsigned int expected_align = 0;
3502   unsigned HOST_WIDE_INT min_size;
3503   unsigned HOST_WIDE_INT max_size;
3504   unsigned HOST_WIDE_INT probable_max_size;
3505 
3506   dest_align = get_pointer_alignment (dest);
3507 
3508   /* If DEST is not a pointer type, don't do this operation in-line.  */
3509   if (dest_align == 0)
3510     return NULL_RTX;
3511 
3512   if (currently_expanding_gimple_stmt)
3513     stringop_block_profile (currently_expanding_gimple_stmt,
3514 			    &expected_align, &expected_size);
3515 
3516   if (expected_align < dest_align)
3517     expected_align = dest_align;
3518 
3519   /* If the LEN parameter is zero, return DEST.  */
3520   if (integer_zerop (len))
3521     {
3522       /* Evaluate and ignore VAL in case it has side-effects.  */
3523       expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
3524       return expand_expr (dest, target, mode, EXPAND_NORMAL);
3525     }
3526 
3527   /* Stabilize the arguments in case we fail.  */
3528   dest = builtin_save_expr (dest);
3529   val = builtin_save_expr (val);
3530   len = builtin_save_expr (len);
3531 
3532   len_rtx = expand_normal (len);
3533   determine_block_size (len, len_rtx, &min_size, &max_size,
3534 			&probable_max_size);
3535   dest_mem = get_memory_rtx (dest, len);
3536   val_mode = TYPE_MODE (unsigned_char_type_node);
3537 
3538   if (TREE_CODE (val) != INTEGER_CST)
3539     {
3540       rtx val_rtx;
3541 
3542       val_rtx = expand_normal (val);
3543       val_rtx = convert_to_mode (val_mode, val_rtx, 0);
3544 
3545       /* Assume that we can memset by pieces if we can store
3546        * the coefficients by pieces (in the required modes).
3547        * We can't pass builtin_memset_gen_str as that emits RTL.  */
3548       c = 1;
3549       if (tree_fits_uhwi_p (len)
3550 	  && can_store_by_pieces (tree_to_uhwi (len),
3551 				  builtin_memset_read_str, &c, dest_align,
3552 				  true))
3553 	{
3554 	  val_rtx = force_reg (val_mode, val_rtx);
3555 	  store_by_pieces (dest_mem, tree_to_uhwi (len),
3556 			   builtin_memset_gen_str, val_rtx, dest_align,
3557 			   true, 0);
3558 	}
3559       else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
3560 					dest_align, expected_align,
3561 					expected_size, min_size, max_size,
3562 					probable_max_size))
3563 	goto do_libcall;
3564 
3565       dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3566       dest_mem = convert_memory_address (ptr_mode, dest_mem);
3567       return dest_mem;
3568     }
3569 
3570   if (target_char_cast (val, &c))
3571     goto do_libcall;
3572 
3573   if (c)
3574     {
3575       if (tree_fits_uhwi_p (len)
3576 	  && can_store_by_pieces (tree_to_uhwi (len),
3577 				  builtin_memset_read_str, &c, dest_align,
3578 				  true))
3579 	store_by_pieces (dest_mem, tree_to_uhwi (len),
3580 			 builtin_memset_read_str, &c, dest_align, true, 0);
3581       else if (!set_storage_via_setmem (dest_mem, len_rtx,
3582 					gen_int_mode (c, val_mode),
3583 					dest_align, expected_align,
3584 					expected_size, min_size, max_size,
3585 					probable_max_size))
3586 	goto do_libcall;
3587 
3588       dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3589       dest_mem = convert_memory_address (ptr_mode, dest_mem);
3590       return dest_mem;
3591     }
3592 
3593   set_mem_align (dest_mem, dest_align);
3594   dest_addr = clear_storage_hints (dest_mem, len_rtx,
3595 				   CALL_EXPR_TAILCALL (orig_exp)
3596 				   ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
3597 				   expected_align, expected_size,
3598 				   min_size, max_size,
3599 				   probable_max_size);
3600 
3601   if (dest_addr == 0)
3602     {
3603       dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
3604       dest_addr = convert_memory_address (ptr_mode, dest_addr);
3605     }
3606 
3607   return dest_addr;
3608 
3609  do_libcall:
3610   fndecl = get_callee_fndecl (orig_exp);
3611   fcode = DECL_FUNCTION_CODE (fndecl);
3612   if (fcode == BUILT_IN_MEMSET
3613       || fcode == BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP)
3614     fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
3615 				dest, val, len);
3616   else if (fcode == BUILT_IN_BZERO)
3617     fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
3618 				dest, len);
3619   else
3620     gcc_unreachable ();
3621   gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3622   CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
3623   return expand_call (fn, target, target == const0_rtx);
3624 }
3625 
3626 /* Expand expression EXP, which is a call to the bzero builtin.  Return
3627    NULL_RTX if we failed the caller should emit a normal call.  */
3628 
3629 static rtx
expand_builtin_bzero(tree exp)3630 expand_builtin_bzero (tree exp)
3631 {
3632   tree dest, size;
3633   location_t loc = EXPR_LOCATION (exp);
3634 
3635   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3636     return NULL_RTX;
3637 
3638   dest = CALL_EXPR_ARG (exp, 0);
3639   size = CALL_EXPR_ARG (exp, 1);
3640 
3641   /* New argument list transforming bzero(ptr x, int y) to
3642      memset(ptr x, int 0, size_t y).   This is done this way
3643      so that if it isn't expanded inline, we fallback to
3644      calling bzero instead of memset.  */
3645 
3646   return expand_builtin_memset_args (dest, integer_zero_node,
3647 				     fold_convert_loc (loc,
3648 						       size_type_node, size),
3649 				     const0_rtx, VOIDmode, exp);
3650 }
3651 
3652 /* Try to expand cmpstr operation ICODE with the given operands.
3653    Return the result rtx on success, otherwise return null.  */
3654 
3655 static rtx
expand_cmpstr(insn_code icode,rtx target,rtx arg1_rtx,rtx arg2_rtx,HOST_WIDE_INT align)3656 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
3657 	       HOST_WIDE_INT align)
3658 {
3659   machine_mode insn_mode = insn_data[icode].operand[0].mode;
3660 
3661   if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3662     target = NULL_RTX;
3663 
3664   struct expand_operand ops[4];
3665   create_output_operand (&ops[0], target, insn_mode);
3666   create_fixed_operand (&ops[1], arg1_rtx);
3667   create_fixed_operand (&ops[2], arg2_rtx);
3668   create_integer_operand (&ops[3], align);
3669   if (maybe_expand_insn (icode, 4, ops))
3670     return ops[0].value;
3671   return NULL_RTX;
3672 }
3673 
3674 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
3675    ARG3_TYPE is the type of ARG3_RTX.  Return the result rtx on success,
3676    otherwise return null.  */
3677 
3678 static rtx
expand_cmpstrn_or_cmpmem(insn_code icode,rtx target,rtx arg1_rtx,rtx arg2_rtx,tree arg3_type,rtx arg3_rtx,HOST_WIDE_INT align)3679 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
3680 			  rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
3681 			  HOST_WIDE_INT align)
3682 {
3683   machine_mode insn_mode = insn_data[icode].operand[0].mode;
3684 
3685   if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
3686     target = NULL_RTX;
3687 
3688   struct expand_operand ops[5];
3689   create_output_operand (&ops[0], target, insn_mode);
3690   create_fixed_operand (&ops[1], arg1_rtx);
3691   create_fixed_operand (&ops[2], arg2_rtx);
3692   create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
3693 			       TYPE_UNSIGNED (arg3_type));
3694   create_integer_operand (&ops[4], align);
3695   if (maybe_expand_insn (icode, 5, ops))
3696     return ops[0].value;
3697   return NULL_RTX;
3698 }
3699 
3700 /* Expand expression EXP, which is a call to the memcmp built-in function.
3701    Return NULL_RTX if we failed and the caller should emit a normal call,
3702    otherwise try to get the result in TARGET, if convenient.  */
3703 
3704 static rtx
expand_builtin_memcmp(tree exp,rtx target)3705 expand_builtin_memcmp (tree exp, rtx target)
3706 {
3707   if (!validate_arglist (exp,
3708  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3709     return NULL_RTX;
3710 
3711   /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
3712      implementing memcmp because it will stop if it encounters two
3713      zero bytes.  */
3714   insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
3715   if (icode == CODE_FOR_nothing)
3716     return NULL_RTX;
3717 
3718   tree arg1 = CALL_EXPR_ARG (exp, 0);
3719   tree arg2 = CALL_EXPR_ARG (exp, 1);
3720   tree len = CALL_EXPR_ARG (exp, 2);
3721 
3722   unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3723   unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3724 
3725   /* If we don't have POINTER_TYPE, call the function.  */
3726   if (arg1_align == 0 || arg2_align == 0)
3727     return NULL_RTX;
3728 
3729   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3730   location_t loc = EXPR_LOCATION (exp);
3731   rtx arg1_rtx = get_memory_rtx (arg1, len);
3732   rtx arg2_rtx = get_memory_rtx (arg2, len);
3733   rtx arg3_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
3734 
3735   /* Set MEM_SIZE as appropriate.  */
3736   if (CONST_INT_P (arg3_rtx))
3737     {
3738       set_mem_size (arg1_rtx, INTVAL (arg3_rtx));
3739       set_mem_size (arg2_rtx, INTVAL (arg3_rtx));
3740     }
3741 
3742   rtx result = expand_cmpstrn_or_cmpmem (icode, target, arg1_rtx, arg2_rtx,
3743 					 TREE_TYPE (len), arg3_rtx,
3744 					 MIN (arg1_align, arg2_align));
3745   if (result)
3746     {
3747       /* Return the value in the proper mode for this function.  */
3748       if (GET_MODE (result) == mode)
3749 	return result;
3750 
3751       if (target != 0)
3752 	{
3753 	  convert_move (target, result, 0);
3754 	  return target;
3755 	}
3756 
3757       return convert_to_mode (mode, result, 0);
3758     }
3759 
3760   result = target;
3761   if (! (result != 0
3762 	 && REG_P (result) && GET_MODE (result) == mode
3763 	 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
3764     result = gen_reg_rtx (mode);
3765 
3766   emit_library_call_value (memcmp_libfunc, result, LCT_PURE,
3767 			   TYPE_MODE (integer_type_node), 3,
3768 			   XEXP (arg1_rtx, 0), Pmode,
3769 			   XEXP (arg2_rtx, 0), Pmode,
3770 			   convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
3771 					    TYPE_UNSIGNED (sizetype)),
3772 			   TYPE_MODE (sizetype));
3773   return result;
3774 }
3775 
3776 /* Expand expression EXP, which is a call to the strcmp builtin.  Return NULL_RTX
3777    if we failed the caller should emit a normal call, otherwise try to get
3778    the result in TARGET, if convenient.  */
3779 
3780 static rtx
expand_builtin_strcmp(tree exp,ATTRIBUTE_UNUSED rtx target)3781 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
3782 {
3783   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3784     return NULL_RTX;
3785 
3786   insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
3787   insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3788   if (cmpstr_icode != CODE_FOR_nothing || cmpstrn_icode != CODE_FOR_nothing)
3789     {
3790       rtx arg1_rtx, arg2_rtx;
3791       tree fndecl, fn;
3792       tree arg1 = CALL_EXPR_ARG (exp, 0);
3793       tree arg2 = CALL_EXPR_ARG (exp, 1);
3794       rtx result = NULL_RTX;
3795 
3796       unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3797       unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3798 
3799       /* If we don't have POINTER_TYPE, call the function.  */
3800       if (arg1_align == 0 || arg2_align == 0)
3801 	return NULL_RTX;
3802 
3803       /* Stabilize the arguments in case gen_cmpstr(n)si fail.  */
3804       arg1 = builtin_save_expr (arg1);
3805       arg2 = builtin_save_expr (arg2);
3806 
3807       arg1_rtx = get_memory_rtx (arg1, NULL);
3808       arg2_rtx = get_memory_rtx (arg2, NULL);
3809 
3810       /* Try to call cmpstrsi.  */
3811       if (cmpstr_icode != CODE_FOR_nothing)
3812 	result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
3813 				MIN (arg1_align, arg2_align));
3814 
3815       /* Try to determine at least one length and call cmpstrnsi.  */
3816       if (!result && cmpstrn_icode != CODE_FOR_nothing)
3817 	{
3818 	  tree len;
3819 	  rtx arg3_rtx;
3820 
3821 	  tree len1 = c_strlen (arg1, 1);
3822 	  tree len2 = c_strlen (arg2, 1);
3823 
3824 	  if (len1)
3825 	    len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
3826 	  if (len2)
3827 	    len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
3828 
3829 	  /* If we don't have a constant length for the first, use the length
3830 	     of the second, if we know it.  We don't require a constant for
3831 	     this case; some cost analysis could be done if both are available
3832 	     but neither is constant.  For now, assume they're equally cheap,
3833 	     unless one has side effects.  If both strings have constant lengths,
3834 	     use the smaller.  */
3835 
3836 	  if (!len1)
3837 	    len = len2;
3838 	  else if (!len2)
3839 	    len = len1;
3840 	  else if (TREE_SIDE_EFFECTS (len1))
3841 	    len = len2;
3842 	  else if (TREE_SIDE_EFFECTS (len2))
3843 	    len = len1;
3844 	  else if (TREE_CODE (len1) != INTEGER_CST)
3845 	    len = len2;
3846 	  else if (TREE_CODE (len2) != INTEGER_CST)
3847 	    len = len1;
3848 	  else if (tree_int_cst_lt (len1, len2))
3849 	    len = len1;
3850 	  else
3851 	    len = len2;
3852 
3853 	  /* If both arguments have side effects, we cannot optimize.  */
3854 	  if (len && !TREE_SIDE_EFFECTS (len))
3855 	    {
3856 	      arg3_rtx = expand_normal (len);
3857 	      result = expand_cmpstrn_or_cmpmem
3858 		(cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
3859 		 arg3_rtx, MIN (arg1_align, arg2_align));
3860 	    }
3861 	}
3862 
3863       if (result)
3864 	{
3865 	  /* Return the value in the proper mode for this function.  */
3866 	  machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
3867 	  if (GET_MODE (result) == mode)
3868 	    return result;
3869 	  if (target == 0)
3870 	    return convert_to_mode (mode, result, 0);
3871 	  convert_move (target, result, 0);
3872 	  return target;
3873 	}
3874 
3875       /* Expand the library call ourselves using a stabilized argument
3876 	 list to avoid re-evaluating the function's arguments twice.  */
3877       fndecl = get_callee_fndecl (exp);
3878       fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
3879       gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3880       CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3881       return expand_call (fn, target, target == const0_rtx);
3882     }
3883   return NULL_RTX;
3884 }
3885 
3886 /* Expand expression EXP, which is a call to the strncmp builtin. Return
3887    NULL_RTX if we failed the caller should emit a normal call, otherwise try to get
3888    the result in TARGET, if convenient.  */
3889 
3890 static rtx
expand_builtin_strncmp(tree exp,ATTRIBUTE_UNUSED rtx target,ATTRIBUTE_UNUSED machine_mode mode)3891 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
3892 			ATTRIBUTE_UNUSED machine_mode mode)
3893 {
3894   location_t loc ATTRIBUTE_UNUSED = EXPR_LOCATION (exp);
3895 
3896   if (!validate_arglist (exp,
3897  			 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3898     return NULL_RTX;
3899 
3900   /* If c_strlen can determine an expression for one of the string
3901      lengths, and it doesn't have side effects, then emit cmpstrnsi
3902      using length MIN(strlen(string)+1, arg3).  */
3903   insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
3904   if (cmpstrn_icode != CODE_FOR_nothing)
3905   {
3906     tree len, len1, len2;
3907     rtx arg1_rtx, arg2_rtx, arg3_rtx;
3908     rtx result;
3909     tree fndecl, fn;
3910     tree arg1 = CALL_EXPR_ARG (exp, 0);
3911     tree arg2 = CALL_EXPR_ARG (exp, 1);
3912     tree arg3 = CALL_EXPR_ARG (exp, 2);
3913 
3914     unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
3915     unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
3916 
3917     len1 = c_strlen (arg1, 1);
3918     len2 = c_strlen (arg2, 1);
3919 
3920     if (len1)
3921       len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
3922     if (len2)
3923       len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
3924 
3925     /* If we don't have a constant length for the first, use the length
3926        of the second, if we know it.  We don't require a constant for
3927        this case; some cost analysis could be done if both are available
3928        but neither is constant.  For now, assume they're equally cheap,
3929        unless one has side effects.  If both strings have constant lengths,
3930        use the smaller.  */
3931 
3932     if (!len1)
3933       len = len2;
3934     else if (!len2)
3935       len = len1;
3936     else if (TREE_SIDE_EFFECTS (len1))
3937       len = len2;
3938     else if (TREE_SIDE_EFFECTS (len2))
3939       len = len1;
3940     else if (TREE_CODE (len1) != INTEGER_CST)
3941       len = len2;
3942     else if (TREE_CODE (len2) != INTEGER_CST)
3943       len = len1;
3944     else if (tree_int_cst_lt (len1, len2))
3945       len = len1;
3946     else
3947       len = len2;
3948 
3949     /* If both arguments have side effects, we cannot optimize.  */
3950     if (!len || TREE_SIDE_EFFECTS (len))
3951       return NULL_RTX;
3952 
3953     /* The actual new length parameter is MIN(len,arg3).  */
3954     len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len,
3955 		       fold_convert_loc (loc, TREE_TYPE (len), arg3));
3956 
3957     /* If we don't have POINTER_TYPE, call the function.  */
3958     if (arg1_align == 0 || arg2_align == 0)
3959       return NULL_RTX;
3960 
3961     /* Stabilize the arguments in case gen_cmpstrnsi fails.  */
3962     arg1 = builtin_save_expr (arg1);
3963     arg2 = builtin_save_expr (arg2);
3964     len = builtin_save_expr (len);
3965 
3966     arg1_rtx = get_memory_rtx (arg1, len);
3967     arg2_rtx = get_memory_rtx (arg2, len);
3968     arg3_rtx = expand_normal (len);
3969     result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
3970 				       arg2_rtx, TREE_TYPE (len), arg3_rtx,
3971 				       MIN (arg1_align, arg2_align));
3972     if (result)
3973       {
3974 	/* Return the value in the proper mode for this function.  */
3975 	mode = TYPE_MODE (TREE_TYPE (exp));
3976 	if (GET_MODE (result) == mode)
3977 	  return result;
3978 	if (target == 0)
3979 	  return convert_to_mode (mode, result, 0);
3980 	convert_move (target, result, 0);
3981 	return target;
3982       }
3983 
3984     /* Expand the library call ourselves using a stabilized argument
3985        list to avoid re-evaluating the function's arguments twice.  */
3986     fndecl = get_callee_fndecl (exp);
3987     fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 3,
3988 				arg1, arg2, len);
3989     gcc_assert (TREE_CODE (fn) == CALL_EXPR);
3990     CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
3991     return expand_call (fn, target, target == const0_rtx);
3992   }
3993   return NULL_RTX;
3994 }
3995 
3996 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
3997    if that's convenient.  */
3998 
3999 rtx
expand_builtin_saveregs(void)4000 expand_builtin_saveregs (void)
4001 {
4002   rtx val;
4003   rtx_insn *seq;
4004 
4005   /* Don't do __builtin_saveregs more than once in a function.
4006      Save the result of the first call and reuse it.  */
4007   if (saveregs_value != 0)
4008     return saveregs_value;
4009 
4010   /* When this function is called, it means that registers must be
4011      saved on entry to this function.  So we migrate the call to the
4012      first insn of this function.  */
4013 
4014   start_sequence ();
4015 
4016   /* Do whatever the machine needs done in this case.  */
4017   val = targetm.calls.expand_builtin_saveregs ();
4018 
4019   seq = get_insns ();
4020   end_sequence ();
4021 
4022   saveregs_value = val;
4023 
4024   /* Put the insns after the NOTE that starts the function.  If this
4025      is inside a start_sequence, make the outer-level insn chain current, so
4026      the code is placed at the start of the function.  */
4027   push_topmost_sequence ();
4028   emit_insn_after (seq, entry_of_function ());
4029   pop_topmost_sequence ();
4030 
4031   return val;
4032 }
4033 
4034 /* Expand a call to __builtin_next_arg.  */
4035 
4036 static rtx
expand_builtin_next_arg(void)4037 expand_builtin_next_arg (void)
4038 {
4039   /* Checking arguments is already done in fold_builtin_next_arg
4040      that must be called before this function.  */
4041   return expand_binop (ptr_mode, add_optab,
4042 		       crtl->args.internal_arg_pointer,
4043 		       crtl->args.arg_offset_rtx,
4044 		       NULL_RTX, 0, OPTAB_LIB_WIDEN);
4045 }
4046 
4047 /* Make it easier for the backends by protecting the valist argument
4048    from multiple evaluations.  */
4049 
4050 static tree
stabilize_va_list_loc(location_t loc,tree valist,int needs_lvalue)4051 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
4052 {
4053   tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
4054 
4055   /* The current way of determining the type of valist is completely
4056      bogus.  We should have the information on the va builtin instead.  */
4057   if (!vatype)
4058     vatype = targetm.fn_abi_va_list (cfun->decl);
4059 
4060   if (TREE_CODE (vatype) == ARRAY_TYPE)
4061     {
4062       if (TREE_SIDE_EFFECTS (valist))
4063 	valist = save_expr (valist);
4064 
4065       /* For this case, the backends will be expecting a pointer to
4066 	 vatype, but it's possible we've actually been given an array
4067 	 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
4068 	 So fix it.  */
4069       if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
4070 	{
4071 	  tree p1 = build_pointer_type (TREE_TYPE (vatype));
4072 	  valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
4073 	}
4074     }
4075   else
4076     {
4077       tree pt = build_pointer_type (vatype);
4078 
4079       if (! needs_lvalue)
4080 	{
4081 	  if (! TREE_SIDE_EFFECTS (valist))
4082 	    return valist;
4083 
4084 	  valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
4085 	  TREE_SIDE_EFFECTS (valist) = 1;
4086 	}
4087 
4088       if (TREE_SIDE_EFFECTS (valist))
4089 	valist = save_expr (valist);
4090       valist = fold_build2_loc (loc, MEM_REF,
4091 				vatype, valist, build_int_cst (pt, 0));
4092     }
4093 
4094   return valist;
4095 }
4096 
4097 /* The "standard" definition of va_list is void*.  */
4098 
4099 tree
std_build_builtin_va_list(void)4100 std_build_builtin_va_list (void)
4101 {
4102   return ptr_type_node;
4103 }
4104 
4105 /* The "standard" abi va_list is va_list_type_node.  */
4106 
4107 tree
std_fn_abi_va_list(tree fndecl ATTRIBUTE_UNUSED)4108 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
4109 {
4110   return va_list_type_node;
4111 }
4112 
4113 /* The "standard" type of va_list is va_list_type_node.  */
4114 
4115 tree
std_canonical_va_list_type(tree type)4116 std_canonical_va_list_type (tree type)
4117 {
4118   tree wtype, htype;
4119 
4120   if (INDIRECT_REF_P (type))
4121     type = TREE_TYPE (type);
4122   else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (type)))
4123     type = TREE_TYPE (type);
4124   wtype = va_list_type_node;
4125   htype = type;
4126   /* Treat structure va_list types.  */
4127   if (TREE_CODE (wtype) == RECORD_TYPE && POINTER_TYPE_P (htype))
4128     htype = TREE_TYPE (htype);
4129   else if (TREE_CODE (wtype) == ARRAY_TYPE)
4130     {
4131       /* If va_list is an array type, the argument may have decayed
4132 	 to a pointer type, e.g. by being passed to another function.
4133 	 In that case, unwrap both types so that we can compare the
4134 	 underlying records.  */
4135       if (TREE_CODE (htype) == ARRAY_TYPE
4136 	  || POINTER_TYPE_P (htype))
4137 	{
4138 	  wtype = TREE_TYPE (wtype);
4139 	  htype = TREE_TYPE (htype);
4140 	}
4141     }
4142   if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
4143     return va_list_type_node;
4144 
4145   return NULL_TREE;
4146 }
4147 
4148 /* The "standard" implementation of va_start: just assign `nextarg' to
4149    the variable.  */
4150 
4151 void
std_expand_builtin_va_start(tree valist,rtx nextarg)4152 std_expand_builtin_va_start (tree valist, rtx nextarg)
4153 {
4154   rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
4155   convert_move (va_r, nextarg, 0);
4156 
4157   /* We do not have any valid bounds for the pointer, so
4158      just store zero bounds for it.  */
4159   if (chkp_function_instrumented_p (current_function_decl))
4160     chkp_expand_bounds_reset_for_mem (valist,
4161 				      make_tree (TREE_TYPE (valist),
4162 						 nextarg));
4163 }
4164 
4165 /* Expand EXP, a call to __builtin_va_start.  */
4166 
4167 static rtx
expand_builtin_va_start(tree exp)4168 expand_builtin_va_start (tree exp)
4169 {
4170   rtx nextarg;
4171   tree valist;
4172   location_t loc = EXPR_LOCATION (exp);
4173 
4174   if (call_expr_nargs (exp) < 2)
4175     {
4176       error_at (loc, "too few arguments to function %<va_start%>");
4177       return const0_rtx;
4178     }
4179 
4180   if (fold_builtin_next_arg (exp, true))
4181     return const0_rtx;
4182 
4183   nextarg = expand_builtin_next_arg ();
4184   valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
4185 
4186   if (targetm.expand_builtin_va_start)
4187     targetm.expand_builtin_va_start (valist, nextarg);
4188   else
4189     std_expand_builtin_va_start (valist, nextarg);
4190 
4191   return const0_rtx;
4192 }
4193 
4194 /* Expand EXP, a call to __builtin_va_end.  */
4195 
4196 static rtx
expand_builtin_va_end(tree exp)4197 expand_builtin_va_end (tree exp)
4198 {
4199   tree valist = CALL_EXPR_ARG (exp, 0);
4200 
4201   /* Evaluate for side effects, if needed.  I hate macros that don't
4202      do that.  */
4203   if (TREE_SIDE_EFFECTS (valist))
4204     expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
4205 
4206   return const0_rtx;
4207 }
4208 
4209 /* Expand EXP, a call to __builtin_va_copy.  We do this as a
4210    builtin rather than just as an assignment in stdarg.h because of the
4211    nastiness of array-type va_list types.  */
4212 
4213 static rtx
expand_builtin_va_copy(tree exp)4214 expand_builtin_va_copy (tree exp)
4215 {
4216   tree dst, src, t;
4217   location_t loc = EXPR_LOCATION (exp);
4218 
4219   dst = CALL_EXPR_ARG (exp, 0);
4220   src = CALL_EXPR_ARG (exp, 1);
4221 
4222   dst = stabilize_va_list_loc (loc, dst, 1);
4223   src = stabilize_va_list_loc (loc, src, 0);
4224 
4225   gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
4226 
4227   if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
4228     {
4229       t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
4230       TREE_SIDE_EFFECTS (t) = 1;
4231       expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
4232     }
4233   else
4234     {
4235       rtx dstb, srcb, size;
4236 
4237       /* Evaluate to pointers.  */
4238       dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
4239       srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
4240       size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
4241       		  NULL_RTX, VOIDmode, EXPAND_NORMAL);
4242 
4243       dstb = convert_memory_address (Pmode, dstb);
4244       srcb = convert_memory_address (Pmode, srcb);
4245 
4246       /* "Dereference" to BLKmode memories.  */
4247       dstb = gen_rtx_MEM (BLKmode, dstb);
4248       set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
4249       set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4250       srcb = gen_rtx_MEM (BLKmode, srcb);
4251       set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
4252       set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
4253 
4254       /* Copy.  */
4255       emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
4256     }
4257 
4258   return const0_rtx;
4259 }
4260 
4261 /* Expand a call to one of the builtin functions __builtin_frame_address or
4262    __builtin_return_address.  */
4263 
4264 static rtx
expand_builtin_frame_address(tree fndecl,tree exp)4265 expand_builtin_frame_address (tree fndecl, tree exp)
4266 {
4267   /* The argument must be a nonnegative integer constant.
4268      It counts the number of frames to scan up the stack.
4269      The value is either the frame pointer value or the return
4270      address saved in that frame.  */
4271   if (call_expr_nargs (exp) == 0)
4272     /* Warning about missing arg was already issued.  */
4273     return const0_rtx;
4274   else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
4275     {
4276       error ("invalid argument to %qD", fndecl);
4277       return const0_rtx;
4278     }
4279   else
4280     {
4281       /* Number of frames to scan up the stack.  */
4282       unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
4283 
4284       rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
4285 
4286       /* Some ports cannot access arbitrary stack frames.  */
4287       if (tem == NULL)
4288 	{
4289 	  warning (0, "unsupported argument to %qD", fndecl);
4290 	  return const0_rtx;
4291 	}
4292 
4293       if (count)
4294 	{
4295 	  /* Warn since no effort is made to ensure that any frame
4296 	     beyond the current one exists or can be safely reached.  */
4297 	  warning (OPT_Wframe_address, "calling %qD with "
4298 		   "a nonzero argument is unsafe", fndecl);
4299 	}
4300 
4301       /* For __builtin_frame_address, return what we've got.  */
4302       if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
4303 	return tem;
4304 
4305       if (!REG_P (tem)
4306 	  && ! CONSTANT_P (tem))
4307 	tem = copy_addr_to_reg (tem);
4308       return tem;
4309     }
4310 }
4311 
4312 /* Expand EXP, a call to the alloca builtin.  Return NULL_RTX if we
4313    failed and the caller should emit a normal call.  CANNOT_ACCUMULATE
4314    is the same as for allocate_dynamic_stack_space.  */
4315 
4316 static rtx
expand_builtin_alloca(tree exp,bool cannot_accumulate)4317 expand_builtin_alloca (tree exp, bool cannot_accumulate)
4318 {
4319   rtx op0;
4320   rtx result;
4321   bool valid_arglist;
4322   unsigned int align;
4323   bool alloca_with_align = (DECL_FUNCTION_CODE (get_callee_fndecl (exp))
4324 			    == BUILT_IN_ALLOCA_WITH_ALIGN);
4325 
4326   valid_arglist
4327     = (alloca_with_align
4328        ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
4329        : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
4330 
4331   if (!valid_arglist)
4332     return NULL_RTX;
4333 
4334   /* Compute the argument.  */
4335   op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
4336 
4337   /* Compute the alignment.  */
4338   align = (alloca_with_align
4339 	   ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1))
4340 	   : BIGGEST_ALIGNMENT);
4341 
4342   /* Allocate the desired space.  */
4343   result = allocate_dynamic_stack_space (op0, 0, align, cannot_accumulate);
4344   result = convert_memory_address (ptr_mode, result);
4345 
4346   return result;
4347 }
4348 
4349 /* Expand a call to bswap builtin in EXP.
4350    Return NULL_RTX if a normal call should be emitted rather than expanding the
4351    function in-line.  If convenient, the result should be placed in TARGET.
4352    SUBTARGET may be used as the target for computing one of EXP's operands.  */
4353 
4354 static rtx
expand_builtin_bswap(machine_mode target_mode,tree exp,rtx target,rtx subtarget)4355 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
4356 		      rtx subtarget)
4357 {
4358   tree arg;
4359   rtx op0;
4360 
4361   if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4362     return NULL_RTX;
4363 
4364   arg = CALL_EXPR_ARG (exp, 0);
4365   op0 = expand_expr (arg,
4366 		     subtarget && GET_MODE (subtarget) == target_mode
4367 		     ? subtarget : NULL_RTX,
4368 		     target_mode, EXPAND_NORMAL);
4369   if (GET_MODE (op0) != target_mode)
4370     op0 = convert_to_mode (target_mode, op0, 1);
4371 
4372   target = expand_unop (target_mode, bswap_optab, op0, target, 1);
4373 
4374   gcc_assert (target);
4375 
4376   return convert_to_mode (target_mode, target, 1);
4377 }
4378 
4379 /* Expand a call to a unary builtin in EXP.
4380    Return NULL_RTX if a normal call should be emitted rather than expanding the
4381    function in-line.  If convenient, the result should be placed in TARGET.
4382    SUBTARGET may be used as the target for computing one of EXP's operands.  */
4383 
4384 static rtx
expand_builtin_unop(machine_mode target_mode,tree exp,rtx target,rtx subtarget,optab op_optab)4385 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
4386 		     rtx subtarget, optab op_optab)
4387 {
4388   rtx op0;
4389 
4390   if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
4391     return NULL_RTX;
4392 
4393   /* Compute the argument.  */
4394   op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
4395 		     (subtarget
4396 		      && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
4397 			  == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
4398 		     VOIDmode, EXPAND_NORMAL);
4399   /* Compute op, into TARGET if possible.
4400      Set TARGET to wherever the result comes back.  */
4401   target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
4402 			op_optab, op0, target, op_optab != clrsb_optab);
4403   gcc_assert (target);
4404 
4405   return convert_to_mode (target_mode, target, 0);
4406 }
4407 
4408 /* Expand a call to __builtin_expect.  We just return our argument
4409    as the builtin_expect semantic should've been already executed by
4410    tree branch prediction pass. */
4411 
4412 static rtx
expand_builtin_expect(tree exp,rtx target)4413 expand_builtin_expect (tree exp, rtx target)
4414 {
4415   tree arg;
4416 
4417   if (call_expr_nargs (exp) < 2)
4418     return const0_rtx;
4419   arg = CALL_EXPR_ARG (exp, 0);
4420 
4421   target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4422   /* When guessing was done, the hints should be already stripped away.  */
4423   gcc_assert (!flag_guess_branch_prob
4424 	      || optimize == 0 || seen_error ());
4425   return target;
4426 }
4427 
4428 /* Expand a call to __builtin_assume_aligned.  We just return our first
4429    argument as the builtin_assume_aligned semantic should've been already
4430    executed by CCP.  */
4431 
4432 static rtx
expand_builtin_assume_aligned(tree exp,rtx target)4433 expand_builtin_assume_aligned (tree exp, rtx target)
4434 {
4435   if (call_expr_nargs (exp) < 2)
4436     return const0_rtx;
4437   target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
4438 			EXPAND_NORMAL);
4439   gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
4440 	      && (call_expr_nargs (exp) < 3
4441 		  || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
4442   return target;
4443 }
4444 
4445 void
expand_builtin_trap(void)4446 expand_builtin_trap (void)
4447 {
4448   if (targetm.have_trap ())
4449     {
4450       rtx_insn *insn = emit_insn (targetm.gen_trap ());
4451       /* For trap insns when not accumulating outgoing args force
4452 	 REG_ARGS_SIZE note to prevent crossjumping of calls with
4453 	 different args sizes.  */
4454       if (!ACCUMULATE_OUTGOING_ARGS)
4455 	add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4456     }
4457   else
4458     emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
4459   emit_barrier ();
4460 }
4461 
4462 /* Expand a call to __builtin_unreachable.  We do nothing except emit
4463    a barrier saying that control flow will not pass here.
4464 
4465    It is the responsibility of the program being compiled to ensure
4466    that control flow does never reach __builtin_unreachable.  */
4467 static void
expand_builtin_unreachable(void)4468 expand_builtin_unreachable (void)
4469 {
4470   emit_barrier ();
4471 }
4472 
4473 /* Expand EXP, a call to fabs, fabsf or fabsl.
4474    Return NULL_RTX if a normal call should be emitted rather than expanding
4475    the function inline.  If convenient, the result should be placed
4476    in TARGET.  SUBTARGET may be used as the target for computing
4477    the operand.  */
4478 
4479 static rtx
expand_builtin_fabs(tree exp,rtx target,rtx subtarget)4480 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
4481 {
4482   machine_mode mode;
4483   tree arg;
4484   rtx op0;
4485 
4486   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4487     return NULL_RTX;
4488 
4489   arg = CALL_EXPR_ARG (exp, 0);
4490   CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
4491   mode = TYPE_MODE (TREE_TYPE (arg));
4492   op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4493   return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
4494 }
4495 
4496 /* Expand EXP, a call to copysign, copysignf, or copysignl.
4497    Return NULL is a normal call should be emitted rather than expanding the
4498    function inline.  If convenient, the result should be placed in TARGET.
4499    SUBTARGET may be used as the target for computing the operand.  */
4500 
4501 static rtx
expand_builtin_copysign(tree exp,rtx target,rtx subtarget)4502 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
4503 {
4504   rtx op0, op1;
4505   tree arg;
4506 
4507   if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
4508     return NULL_RTX;
4509 
4510   arg = CALL_EXPR_ARG (exp, 0);
4511   op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
4512 
4513   arg = CALL_EXPR_ARG (exp, 1);
4514   op1 = expand_normal (arg);
4515 
4516   return expand_copysign (op0, op1, target);
4517 }
4518 
4519 /* Expand a call to __builtin___clear_cache.  */
4520 
4521 static rtx
expand_builtin___clear_cache(tree exp)4522 expand_builtin___clear_cache (tree exp)
4523 {
4524   if (!targetm.code_for_clear_cache)
4525     {
4526 #ifdef CLEAR_INSN_CACHE
4527       /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4528 	 does something.  Just do the default expansion to a call to
4529 	 __clear_cache().  */
4530       return NULL_RTX;
4531 #else
4532       /* There is no "clear_cache" insn, and __clear_cache() in libgcc
4533 	 does nothing.  There is no need to call it.  Do nothing.  */
4534       return const0_rtx;
4535 #endif /* CLEAR_INSN_CACHE */
4536     }
4537 
4538   /* We have a "clear_cache" insn, and it will handle everything.  */
4539   tree begin, end;
4540   rtx begin_rtx, end_rtx;
4541 
4542   /* We must not expand to a library call.  If we did, any
4543      fallback library function in libgcc that might contain a call to
4544      __builtin___clear_cache() would recurse infinitely.  */
4545   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
4546     {
4547       error ("both arguments to %<__builtin___clear_cache%> must be pointers");
4548       return const0_rtx;
4549     }
4550 
4551   if (targetm.have_clear_cache ())
4552     {
4553       struct expand_operand ops[2];
4554 
4555       begin = CALL_EXPR_ARG (exp, 0);
4556       begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
4557 
4558       end = CALL_EXPR_ARG (exp, 1);
4559       end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
4560 
4561       create_address_operand (&ops[0], begin_rtx);
4562       create_address_operand (&ops[1], end_rtx);
4563       if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
4564 	return const0_rtx;
4565     }
4566   return const0_rtx;
4567 }
4568 
4569 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT.  */
4570 
4571 static rtx
round_trampoline_addr(rtx tramp)4572 round_trampoline_addr (rtx tramp)
4573 {
4574   rtx temp, addend, mask;
4575 
4576   /* If we don't need too much alignment, we'll have been guaranteed
4577      proper alignment by get_trampoline_type.  */
4578   if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
4579     return tramp;
4580 
4581   /* Round address up to desired boundary.  */
4582   temp = gen_reg_rtx (Pmode);
4583   addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
4584   mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
4585 
4586   temp  = expand_simple_binop (Pmode, PLUS, tramp, addend,
4587 			       temp, 0, OPTAB_LIB_WIDEN);
4588   tramp = expand_simple_binop (Pmode, AND, temp, mask,
4589 			       temp, 0, OPTAB_LIB_WIDEN);
4590 
4591   return tramp;
4592 }
4593 
4594 static rtx
expand_builtin_init_trampoline(tree exp,bool onstack)4595 expand_builtin_init_trampoline (tree exp, bool onstack)
4596 {
4597   tree t_tramp, t_func, t_chain;
4598   rtx m_tramp, r_tramp, r_chain, tmp;
4599 
4600   if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
4601 			 POINTER_TYPE, VOID_TYPE))
4602     return NULL_RTX;
4603 
4604   t_tramp = CALL_EXPR_ARG (exp, 0);
4605   t_func = CALL_EXPR_ARG (exp, 1);
4606   t_chain = CALL_EXPR_ARG (exp, 2);
4607 
4608   r_tramp = expand_normal (t_tramp);
4609   m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
4610   MEM_NOTRAP_P (m_tramp) = 1;
4611 
4612   /* If ONSTACK, the TRAMP argument should be the address of a field
4613      within the local function's FRAME decl.  Either way, let's see if
4614      we can fill in the MEM_ATTRs for this memory.  */
4615   if (TREE_CODE (t_tramp) == ADDR_EXPR)
4616     set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
4617 
4618   /* Creator of a heap trampoline is responsible for making sure the
4619      address is aligned to at least STACK_BOUNDARY.  Normally malloc
4620      will ensure this anyhow.  */
4621   tmp = round_trampoline_addr (r_tramp);
4622   if (tmp != r_tramp)
4623     {
4624       m_tramp = change_address (m_tramp, BLKmode, tmp);
4625       set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
4626       set_mem_size (m_tramp, TRAMPOLINE_SIZE);
4627     }
4628 
4629   /* The FUNC argument should be the address of the nested function.
4630      Extract the actual function decl to pass to the hook.  */
4631   gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
4632   t_func = TREE_OPERAND (t_func, 0);
4633   gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
4634 
4635   r_chain = expand_normal (t_chain);
4636 
4637   /* Generate insns to initialize the trampoline.  */
4638   targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
4639 
4640   if (onstack)
4641     {
4642       trampolines_created = 1;
4643 
4644       warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
4645 		  "trampoline generated for nested function %qD", t_func);
4646     }
4647 
4648   return const0_rtx;
4649 }
4650 
4651 static rtx
expand_builtin_adjust_trampoline(tree exp)4652 expand_builtin_adjust_trampoline (tree exp)
4653 {
4654   rtx tramp;
4655 
4656   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
4657     return NULL_RTX;
4658 
4659   tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
4660   tramp = round_trampoline_addr (tramp);
4661   if (targetm.calls.trampoline_adjust_address)
4662     tramp = targetm.calls.trampoline_adjust_address (tramp);
4663 
4664   return tramp;
4665 }
4666 
4667 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
4668    function.  The function first checks whether the back end provides
4669    an insn to implement signbit for the respective mode.  If not, it
4670    checks whether the floating point format of the value is such that
4671    the sign bit can be extracted.  If that is not the case, error out.
4672    EXP is the expression that is a call to the builtin function; if
4673    convenient, the result should be placed in TARGET.  */
4674 static rtx
expand_builtin_signbit(tree exp,rtx target)4675 expand_builtin_signbit (tree exp, rtx target)
4676 {
4677   const struct real_format *fmt;
4678   machine_mode fmode, imode, rmode;
4679   tree arg;
4680   int word, bitpos;
4681   enum insn_code icode;
4682   rtx temp;
4683   location_t loc = EXPR_LOCATION (exp);
4684 
4685   if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
4686     return NULL_RTX;
4687 
4688   arg = CALL_EXPR_ARG (exp, 0);
4689   fmode = TYPE_MODE (TREE_TYPE (arg));
4690   rmode = TYPE_MODE (TREE_TYPE (exp));
4691   fmt = REAL_MODE_FORMAT (fmode);
4692 
4693   arg = builtin_save_expr (arg);
4694 
4695   /* Expand the argument yielding a RTX expression. */
4696   temp = expand_normal (arg);
4697 
4698   /* Check if the back end provides an insn that handles signbit for the
4699      argument's mode. */
4700   icode = optab_handler (signbit_optab, fmode);
4701   if (icode != CODE_FOR_nothing)
4702     {
4703       rtx_insn *last = get_last_insn ();
4704       target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4705       if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
4706 	return target;
4707       delete_insns_since (last);
4708     }
4709 
4710   /* For floating point formats without a sign bit, implement signbit
4711      as "ARG < 0.0".  */
4712   bitpos = fmt->signbit_ro;
4713   if (bitpos < 0)
4714   {
4715     /* But we can't do this if the format supports signed zero.  */
4716     gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
4717 
4718     arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
4719 		       build_real (TREE_TYPE (arg), dconst0));
4720     return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
4721   }
4722 
4723   if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
4724     {
4725       imode = int_mode_for_mode (fmode);
4726       gcc_assert (imode != BLKmode);
4727       temp = gen_lowpart (imode, temp);
4728     }
4729   else
4730     {
4731       imode = word_mode;
4732       /* Handle targets with different FP word orders.  */
4733       if (FLOAT_WORDS_BIG_ENDIAN)
4734 	word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
4735       else
4736 	word = bitpos / BITS_PER_WORD;
4737       temp = operand_subword_force (temp, word, fmode);
4738       bitpos = bitpos % BITS_PER_WORD;
4739     }
4740 
4741   /* Force the intermediate word_mode (or narrower) result into a
4742      register.  This avoids attempting to create paradoxical SUBREGs
4743      of floating point modes below.  */
4744   temp = force_reg (imode, temp);
4745 
4746   /* If the bitpos is within the "result mode" lowpart, the operation
4747      can be implement with a single bitwise AND.  Otherwise, we need
4748      a right shift and an AND.  */
4749 
4750   if (bitpos < GET_MODE_BITSIZE (rmode))
4751     {
4752       wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
4753 
4754       if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
4755 	temp = gen_lowpart (rmode, temp);
4756       temp = expand_binop (rmode, and_optab, temp,
4757 			   immed_wide_int_const (mask, rmode),
4758 			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
4759     }
4760   else
4761     {
4762       /* Perform a logical right shift to place the signbit in the least
4763 	 significant bit, then truncate the result to the desired mode
4764 	 and mask just this bit.  */
4765       temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
4766       temp = gen_lowpart (rmode, temp);
4767       temp = expand_binop (rmode, and_optab, temp, const1_rtx,
4768 			   NULL_RTX, 1, OPTAB_LIB_WIDEN);
4769     }
4770 
4771   return temp;
4772 }
4773 
4774 /* Expand fork or exec calls.  TARGET is the desired target of the
4775    call.  EXP is the call. FN is the
4776    identificator of the actual function.  IGNORE is nonzero if the
4777    value is to be ignored.  */
4778 
4779 static rtx
expand_builtin_fork_or_exec(tree fn,tree exp,rtx target,int ignore)4780 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
4781 {
4782   tree id, decl;
4783   tree call;
4784 
4785   /* If we are not profiling, just call the function.  */
4786   if (!profile_arc_flag)
4787     return NULL_RTX;
4788 
4789   /* Otherwise call the wrapper.  This should be equivalent for the rest of
4790      compiler, so the code does not diverge, and the wrapper may run the
4791      code necessary for keeping the profiling sane.  */
4792 
4793   switch (DECL_FUNCTION_CODE (fn))
4794     {
4795     case BUILT_IN_FORK:
4796       id = get_identifier ("__gcov_fork");
4797       break;
4798 
4799     case BUILT_IN_EXECL:
4800       id = get_identifier ("__gcov_execl");
4801       break;
4802 
4803     case BUILT_IN_EXECV:
4804       id = get_identifier ("__gcov_execv");
4805       break;
4806 
4807     case BUILT_IN_EXECLP:
4808       id = get_identifier ("__gcov_execlp");
4809       break;
4810 
4811     case BUILT_IN_EXECLE:
4812       id = get_identifier ("__gcov_execle");
4813       break;
4814 
4815     case BUILT_IN_EXECVP:
4816       id = get_identifier ("__gcov_execvp");
4817       break;
4818 
4819     case BUILT_IN_EXECVE:
4820       id = get_identifier ("__gcov_execve");
4821       break;
4822 
4823     default:
4824       gcc_unreachable ();
4825     }
4826 
4827   decl = build_decl (DECL_SOURCE_LOCATION (fn),
4828 		     FUNCTION_DECL, id, TREE_TYPE (fn));
4829   DECL_EXTERNAL (decl) = 1;
4830   TREE_PUBLIC (decl) = 1;
4831   DECL_ARTIFICIAL (decl) = 1;
4832   TREE_NOTHROW (decl) = 1;
4833   DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
4834   DECL_VISIBILITY_SPECIFIED (decl) = 1;
4835   call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
4836   return expand_call (call, target, ignore);
4837  }
4838 
4839 
4840 
4841 /* Reconstitute a mode for a __sync intrinsic operation.  Since the type of
4842    the pointer in these functions is void*, the tree optimizers may remove
4843    casts.  The mode computed in expand_builtin isn't reliable either, due
4844    to __sync_bool_compare_and_swap.
4845 
4846    FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
4847    group of builtins.  This gives us log2 of the mode size.  */
4848 
4849 static inline machine_mode
get_builtin_sync_mode(int fcode_diff)4850 get_builtin_sync_mode (int fcode_diff)
4851 {
4852   /* The size is not negotiable, so ask not to get BLKmode in return
4853      if the target indicates that a smaller size would be better.  */
4854   return mode_for_size (BITS_PER_UNIT << fcode_diff, MODE_INT, 0);
4855 }
4856 
4857 /* Expand the memory expression LOC and return the appropriate memory operand
4858    for the builtin_sync operations.  */
4859 
4860 static rtx
get_builtin_sync_mem(tree loc,machine_mode mode)4861 get_builtin_sync_mem (tree loc, machine_mode mode)
4862 {
4863   rtx addr, mem;
4864 
4865   addr = expand_expr (loc, NULL_RTX, ptr_mode, EXPAND_SUM);
4866   addr = convert_memory_address (Pmode, addr);
4867 
4868   /* Note that we explicitly do not want any alias information for this
4869      memory, so that we kill all other live memories.  Otherwise we don't
4870      satisfy the full barrier semantics of the intrinsic.  */
4871   mem = validize_mem (gen_rtx_MEM (mode, addr));
4872 
4873   /* The alignment needs to be at least according to that of the mode.  */
4874   set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
4875 			   get_pointer_alignment (loc)));
4876   set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4877   MEM_VOLATILE_P (mem) = 1;
4878 
4879   return mem;
4880 }
4881 
4882 /* Make sure an argument is in the right mode.
4883    EXP is the tree argument.
4884    MODE is the mode it should be in.  */
4885 
4886 static rtx
expand_expr_force_mode(tree exp,machine_mode mode)4887 expand_expr_force_mode (tree exp, machine_mode mode)
4888 {
4889   rtx val;
4890   machine_mode old_mode;
4891 
4892   val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
4893   /* If VAL is promoted to a wider mode, convert it back to MODE.  Take care
4894      of CONST_INTs, where we know the old_mode only from the call argument.  */
4895 
4896   old_mode = GET_MODE (val);
4897   if (old_mode == VOIDmode)
4898     old_mode = TYPE_MODE (TREE_TYPE (exp));
4899   val = convert_modes (mode, old_mode, val, 1);
4900   return val;
4901 }
4902 
4903 
4904 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
4905    EXP is the CALL_EXPR.  CODE is the rtx code
4906    that corresponds to the arithmetic or logical operation from the name;
4907    an exception here is that NOT actually means NAND.  TARGET is an optional
4908    place for us to store the results; AFTER is true if this is the
4909    fetch_and_xxx form.  */
4910 
4911 static rtx
expand_builtin_sync_operation(machine_mode mode,tree exp,enum rtx_code code,bool after,rtx target)4912 expand_builtin_sync_operation (machine_mode mode, tree exp,
4913 			       enum rtx_code code, bool after,
4914 			       rtx target)
4915 {
4916   rtx val, mem;
4917   location_t loc = EXPR_LOCATION (exp);
4918 
4919   if (code == NOT && warn_sync_nand)
4920     {
4921       tree fndecl = get_callee_fndecl (exp);
4922       enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4923 
4924       static bool warned_f_a_n, warned_n_a_f;
4925 
4926       switch (fcode)
4927 	{
4928 	case BUILT_IN_SYNC_FETCH_AND_NAND_1:
4929 	case BUILT_IN_SYNC_FETCH_AND_NAND_2:
4930 	case BUILT_IN_SYNC_FETCH_AND_NAND_4:
4931 	case BUILT_IN_SYNC_FETCH_AND_NAND_8:
4932 	case BUILT_IN_SYNC_FETCH_AND_NAND_16:
4933 	  if (warned_f_a_n)
4934 	    break;
4935 
4936 	  fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
4937 	  inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4938 	  warned_f_a_n = true;
4939 	  break;
4940 
4941 	case BUILT_IN_SYNC_NAND_AND_FETCH_1:
4942 	case BUILT_IN_SYNC_NAND_AND_FETCH_2:
4943 	case BUILT_IN_SYNC_NAND_AND_FETCH_4:
4944 	case BUILT_IN_SYNC_NAND_AND_FETCH_8:
4945 	case BUILT_IN_SYNC_NAND_AND_FETCH_16:
4946 	  if (warned_n_a_f)
4947 	    break;
4948 
4949 	 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
4950 	  inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
4951 	  warned_n_a_f = true;
4952 	  break;
4953 
4954 	default:
4955 	  gcc_unreachable ();
4956 	}
4957     }
4958 
4959   /* Expand the operands.  */
4960   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4961   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4962 
4963   return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
4964 				 after);
4965 }
4966 
4967 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
4968    intrinsics. EXP is the CALL_EXPR.  IS_BOOL is
4969    true if this is the boolean form.  TARGET is a place for us to store the
4970    results; this is NOT optional if IS_BOOL is true.  */
4971 
4972 static rtx
expand_builtin_compare_and_swap(machine_mode mode,tree exp,bool is_bool,rtx target)4973 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
4974 				 bool is_bool, rtx target)
4975 {
4976   rtx old_val, new_val, mem;
4977   rtx *pbool, *poval;
4978 
4979   /* Expand the operands.  */
4980   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
4981   old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
4982   new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
4983 
4984   pbool = poval = NULL;
4985   if (target != const0_rtx)
4986     {
4987       if (is_bool)
4988 	pbool = &target;
4989       else
4990 	poval = &target;
4991     }
4992   if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
4993 				       false, MEMMODEL_SYNC_SEQ_CST,
4994 				       MEMMODEL_SYNC_SEQ_CST))
4995     return NULL_RTX;
4996 
4997   return target;
4998 }
4999 
5000 /* Expand the __sync_lock_test_and_set intrinsic.  Note that the most
5001    general form is actually an atomic exchange, and some targets only
5002    support a reduced form with the second argument being a constant 1.
5003    EXP is the CALL_EXPR; TARGET is an optional place for us to store
5004    the results.  */
5005 
5006 static rtx
expand_builtin_sync_lock_test_and_set(machine_mode mode,tree exp,rtx target)5007 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
5008 				       rtx target)
5009 {
5010   rtx val, mem;
5011 
5012   /* Expand the operands.  */
5013   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5014   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5015 
5016   return expand_sync_lock_test_and_set (target, mem, val);
5017 }
5018 
5019 /* Expand the __sync_lock_release intrinsic.  EXP is the CALL_EXPR.  */
5020 
5021 static void
expand_builtin_sync_lock_release(machine_mode mode,tree exp)5022 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
5023 {
5024   rtx mem;
5025 
5026   /* Expand the operands.  */
5027   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5028 
5029   expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
5030 }
5031 
5032 /* Given an integer representing an ``enum memmodel'', verify its
5033    correctness and return the memory model enum.  */
5034 
5035 static enum memmodel
get_memmodel(tree exp)5036 get_memmodel (tree exp)
5037 {
5038   rtx op;
5039   unsigned HOST_WIDE_INT val;
5040   source_location loc
5041     = expansion_point_location_if_in_system_header (input_location);
5042 
5043   /* If the parameter is not a constant, it's a run time value so we'll just
5044      convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking.  */
5045   if (TREE_CODE (exp) != INTEGER_CST)
5046     return MEMMODEL_SEQ_CST;
5047 
5048   op = expand_normal (exp);
5049 
5050   val = INTVAL (op);
5051   if (targetm.memmodel_check)
5052     val = targetm.memmodel_check (val);
5053   else if (val & ~MEMMODEL_MASK)
5054     {
5055       warning_at (loc, OPT_Winvalid_memory_model,
5056 		  "unknown architecture specifier in memory model to builtin");
5057       return MEMMODEL_SEQ_CST;
5058     }
5059 
5060   /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
5061   if (memmodel_base (val) >= MEMMODEL_LAST)
5062     {
5063       warning_at (loc, OPT_Winvalid_memory_model,
5064 		  "invalid memory model argument to builtin");
5065       return MEMMODEL_SEQ_CST;
5066     }
5067 
5068   /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
5069      be conservative and promote consume to acquire.  */
5070   if (val == MEMMODEL_CONSUME)
5071     val = MEMMODEL_ACQUIRE;
5072 
5073   return (enum memmodel) val;
5074 }
5075 
5076 /* Expand the __atomic_exchange intrinsic:
5077    	TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
5078    EXP is the CALL_EXPR.
5079    TARGET is an optional place for us to store the results.  */
5080 
5081 static rtx
expand_builtin_atomic_exchange(machine_mode mode,tree exp,rtx target)5082 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
5083 {
5084   rtx val, mem;
5085   enum memmodel model;
5086 
5087   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5088 
5089   if (!flag_inline_atomics)
5090     return NULL_RTX;
5091 
5092   /* Expand the operands.  */
5093   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5094   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5095 
5096   return expand_atomic_exchange (target, mem, val, model);
5097 }
5098 
5099 /* Expand the __atomic_compare_exchange intrinsic:
5100    	bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
5101 					TYPE desired, BOOL weak,
5102 					enum memmodel success,
5103 					enum memmodel failure)
5104    EXP is the CALL_EXPR.
5105    TARGET is an optional place for us to store the results.  */
5106 
5107 static rtx
expand_builtin_atomic_compare_exchange(machine_mode mode,tree exp,rtx target)5108 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
5109 					rtx target)
5110 {
5111   rtx expect, desired, mem, oldval;
5112   rtx_code_label *label;
5113   enum memmodel success, failure;
5114   tree weak;
5115   bool is_weak;
5116   source_location loc
5117     = expansion_point_location_if_in_system_header (input_location);
5118 
5119   success = get_memmodel (CALL_EXPR_ARG (exp, 4));
5120   failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
5121 
5122   if (failure > success)
5123     {
5124       warning_at (loc, OPT_Winvalid_memory_model,
5125 		  "failure memory model cannot be stronger than success "
5126 		  "memory model for %<__atomic_compare_exchange%>");
5127       success = MEMMODEL_SEQ_CST;
5128     }
5129 
5130   if (is_mm_release (failure) || is_mm_acq_rel (failure))
5131     {
5132       warning_at (loc, OPT_Winvalid_memory_model,
5133 		  "invalid failure memory model for "
5134 		  "%<__atomic_compare_exchange%>");
5135       failure = MEMMODEL_SEQ_CST;
5136       success = MEMMODEL_SEQ_CST;
5137     }
5138 
5139 
5140   if (!flag_inline_atomics)
5141     return NULL_RTX;
5142 
5143   /* Expand the operands.  */
5144   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5145 
5146   expect = expand_normal (CALL_EXPR_ARG (exp, 1));
5147   expect = convert_memory_address (Pmode, expect);
5148   expect = gen_rtx_MEM (mode, expect);
5149   desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
5150 
5151   weak = CALL_EXPR_ARG (exp, 3);
5152   is_weak = false;
5153   if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
5154     is_weak = true;
5155 
5156   if (target == const0_rtx)
5157     target = NULL;
5158 
5159   /* Lest the rtl backend create a race condition with an imporoper store
5160      to memory, always create a new pseudo for OLDVAL.  */
5161   oldval = NULL;
5162 
5163   if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
5164 				       is_weak, success, failure))
5165     return NULL_RTX;
5166 
5167   /* Conditionally store back to EXPECT, lest we create a race condition
5168      with an improper store to memory.  */
5169   /* ??? With a rearrangement of atomics at the gimple level, we can handle
5170      the normal case where EXPECT is totally private, i.e. a register.  At
5171      which point the store can be unconditional.  */
5172   label = gen_label_rtx ();
5173   emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
5174 			   GET_MODE (target), 1, label);
5175   emit_move_insn (expect, oldval);
5176   emit_label (label);
5177 
5178   return target;
5179 }
5180 
5181 /* Expand the __atomic_load intrinsic:
5182    	TYPE __atomic_load (TYPE *object, enum memmodel)
5183    EXP is the CALL_EXPR.
5184    TARGET is an optional place for us to store the results.  */
5185 
5186 static rtx
expand_builtin_atomic_load(machine_mode mode,tree exp,rtx target)5187 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
5188 {
5189   rtx mem;
5190   enum memmodel model;
5191 
5192   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5193   if (is_mm_release (model) || is_mm_acq_rel (model))
5194     {
5195       source_location loc
5196 	= expansion_point_location_if_in_system_header (input_location);
5197       warning_at (loc, OPT_Winvalid_memory_model,
5198 		  "invalid memory model for %<__atomic_load%>");
5199       model = MEMMODEL_SEQ_CST;
5200     }
5201 
5202   if (!flag_inline_atomics)
5203     return NULL_RTX;
5204 
5205   /* Expand the operand.  */
5206   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5207 
5208   return expand_atomic_load (target, mem, model);
5209 }
5210 
5211 
5212 /* Expand the __atomic_store intrinsic:
5213    	void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
5214    EXP is the CALL_EXPR.
5215    TARGET is an optional place for us to store the results.  */
5216 
5217 static rtx
expand_builtin_atomic_store(machine_mode mode,tree exp)5218 expand_builtin_atomic_store (machine_mode mode, tree exp)
5219 {
5220   rtx mem, val;
5221   enum memmodel model;
5222 
5223   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5224   if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
5225 	|| is_mm_release (model)))
5226     {
5227       source_location loc
5228 	= expansion_point_location_if_in_system_header (input_location);
5229       warning_at (loc, OPT_Winvalid_memory_model,
5230 		  "invalid memory model for %<__atomic_store%>");
5231       model = MEMMODEL_SEQ_CST;
5232     }
5233 
5234   if (!flag_inline_atomics)
5235     return NULL_RTX;
5236 
5237   /* Expand the operands.  */
5238   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5239   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5240 
5241   return expand_atomic_store (mem, val, model, false);
5242 }
5243 
5244 /* Expand the __atomic_fetch_XXX intrinsic:
5245    	TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
5246    EXP is the CALL_EXPR.
5247    TARGET is an optional place for us to store the results.
5248    CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
5249    FETCH_AFTER is true if returning the result of the operation.
5250    FETCH_AFTER is false if returning the value before the operation.
5251    IGNORE is true if the result is not used.
5252    EXT_CALL is the correct builtin for an external call if this cannot be
5253    resolved to an instruction sequence.  */
5254 
5255 static rtx
expand_builtin_atomic_fetch_op(machine_mode mode,tree exp,rtx target,enum rtx_code code,bool fetch_after,bool ignore,enum built_in_function ext_call)5256 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
5257 				enum rtx_code code, bool fetch_after,
5258 				bool ignore, enum built_in_function ext_call)
5259 {
5260   rtx val, mem, ret;
5261   enum memmodel model;
5262   tree fndecl;
5263   tree addr;
5264 
5265   model = get_memmodel (CALL_EXPR_ARG (exp, 2));
5266 
5267   /* Expand the operands.  */
5268   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5269   val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
5270 
5271   /* Only try generating instructions if inlining is turned on.  */
5272   if (flag_inline_atomics)
5273     {
5274       ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
5275       if (ret)
5276 	return ret;
5277     }
5278 
5279   /* Return if a different routine isn't needed for the library call.  */
5280   if (ext_call == BUILT_IN_NONE)
5281     return NULL_RTX;
5282 
5283   /* Change the call to the specified function.  */
5284   fndecl = get_callee_fndecl (exp);
5285   addr = CALL_EXPR_FN (exp);
5286   STRIP_NOPS (addr);
5287 
5288   gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
5289   TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
5290 
5291   /* If we will emit code after the call, the call can not be a tail call.
5292      If it is emitted as a tail call, a barrier is emitted after it, and
5293      then all trailing code is removed.  */
5294   if (!ignore)
5295     CALL_EXPR_TAILCALL (exp) = 0;
5296 
5297   /* Expand the call here so we can emit trailing code.  */
5298   ret = expand_call (exp, target, ignore);
5299 
5300   /* Replace the original function just in case it matters.  */
5301   TREE_OPERAND (addr, 0) = fndecl;
5302 
5303   /* Then issue the arithmetic correction to return the right result.  */
5304   if (!ignore)
5305     {
5306       if (code == NOT)
5307 	{
5308 	  ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
5309 				     OPTAB_LIB_WIDEN);
5310 	  ret = expand_simple_unop (mode, NOT, ret, target, true);
5311 	}
5312       else
5313 	ret = expand_simple_binop (mode, code, ret, val, target, true,
5314 				   OPTAB_LIB_WIDEN);
5315     }
5316   return ret;
5317 }
5318 
5319 /* Expand an atomic clear operation.
5320 	void _atomic_clear (BOOL *obj, enum memmodel)
5321    EXP is the call expression.  */
5322 
5323 static rtx
expand_builtin_atomic_clear(tree exp)5324 expand_builtin_atomic_clear (tree exp)
5325 {
5326   machine_mode mode;
5327   rtx mem, ret;
5328   enum memmodel model;
5329 
5330   mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5331   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5332   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5333 
5334   if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
5335     {
5336       source_location loc
5337 	= expansion_point_location_if_in_system_header (input_location);
5338       warning_at (loc, OPT_Winvalid_memory_model,
5339 		  "invalid memory model for %<__atomic_store%>");
5340       model = MEMMODEL_SEQ_CST;
5341     }
5342 
5343   /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
5344      Failing that, a store is issued by __atomic_store.  The only way this can
5345      fail is if the bool type is larger than a word size.  Unlikely, but
5346      handle it anyway for completeness.  Assume a single threaded model since
5347      there is no atomic support in this case, and no barriers are required.  */
5348   ret = expand_atomic_store (mem, const0_rtx, model, true);
5349   if (!ret)
5350     emit_move_insn (mem, const0_rtx);
5351   return const0_rtx;
5352 }
5353 
5354 /* Expand an atomic test_and_set operation.
5355 	bool _atomic_test_and_set (BOOL *obj, enum memmodel)
5356    EXP is the call expression.  */
5357 
5358 static rtx
expand_builtin_atomic_test_and_set(tree exp,rtx target)5359 expand_builtin_atomic_test_and_set (tree exp, rtx target)
5360 {
5361   rtx mem;
5362   enum memmodel model;
5363   machine_mode mode;
5364 
5365   mode = mode_for_size (BOOL_TYPE_SIZE, MODE_INT, 0);
5366   mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
5367   model = get_memmodel (CALL_EXPR_ARG (exp, 1));
5368 
5369   return expand_atomic_test_and_set (target, mem, model);
5370 }
5371 
5372 
5373 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
5374    this architecture.  If ARG1 is NULL, use typical alignment for size ARG0.  */
5375 
5376 static tree
fold_builtin_atomic_always_lock_free(tree arg0,tree arg1)5377 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
5378 {
5379   int size;
5380   machine_mode mode;
5381   unsigned int mode_align, type_align;
5382 
5383   if (TREE_CODE (arg0) != INTEGER_CST)
5384     return NULL_TREE;
5385 
5386   size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
5387   mode = mode_for_size (size, MODE_INT, 0);
5388   mode_align = GET_MODE_ALIGNMENT (mode);
5389 
5390   if (TREE_CODE (arg1) == INTEGER_CST)
5391     {
5392       unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
5393 
5394       /* Either this argument is null, or it's a fake pointer encoding
5395          the alignment of the object.  */
5396       val = val & -val;
5397       val *= BITS_PER_UNIT;
5398 
5399       if (val == 0 || mode_align < val)
5400         type_align = mode_align;
5401       else
5402         type_align = val;
5403     }
5404   else
5405     {
5406       tree ttype = TREE_TYPE (arg1);
5407 
5408       /* This function is usually invoked and folded immediately by the front
5409 	 end before anything else has a chance to look at it.  The pointer
5410 	 parameter at this point is usually cast to a void *, so check for that
5411 	 and look past the cast.  */
5412       if (CONVERT_EXPR_P (arg1)
5413 	  && POINTER_TYPE_P (ttype)
5414 	  && VOID_TYPE_P (TREE_TYPE (ttype))
5415 	  && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5416 	arg1 = TREE_OPERAND (arg1, 0);
5417 
5418       ttype = TREE_TYPE (arg1);
5419       gcc_assert (POINTER_TYPE_P (ttype));
5420 
5421       /* Get the underlying type of the object.  */
5422       ttype = TREE_TYPE (ttype);
5423       type_align = TYPE_ALIGN (ttype);
5424     }
5425 
5426   /* If the object has smaller alignment, the lock free routines cannot
5427      be used.  */
5428   if (type_align < mode_align)
5429     return boolean_false_node;
5430 
5431   /* Check if a compare_and_swap pattern exists for the mode which represents
5432      the required size.  The pattern is not allowed to fail, so the existence
5433      of the pattern indicates support is present.  */
5434   if (can_compare_and_swap_p (mode, true))
5435     return boolean_true_node;
5436   else
5437     return boolean_false_node;
5438 }
5439 
5440 /* Return true if the parameters to call EXP represent an object which will
5441    always generate lock free instructions.  The first argument represents the
5442    size of the object, and the second parameter is a pointer to the object
5443    itself.  If NULL is passed for the object, then the result is based on
5444    typical alignment for an object of the specified size.  Otherwise return
5445    false.  */
5446 
5447 static rtx
expand_builtin_atomic_always_lock_free(tree exp)5448 expand_builtin_atomic_always_lock_free (tree exp)
5449 {
5450   tree size;
5451   tree arg0 = CALL_EXPR_ARG (exp, 0);
5452   tree arg1 = CALL_EXPR_ARG (exp, 1);
5453 
5454   if (TREE_CODE (arg0) != INTEGER_CST)
5455     {
5456       error ("non-constant argument 1 to __atomic_always_lock_free");
5457       return const0_rtx;
5458     }
5459 
5460   size = fold_builtin_atomic_always_lock_free (arg0, arg1);
5461   if (size == boolean_true_node)
5462     return const1_rtx;
5463   return const0_rtx;
5464 }
5465 
5466 /* Return a one or zero if it can be determined that object ARG1 of size ARG
5467    is lock free on this architecture.  */
5468 
5469 static tree
fold_builtin_atomic_is_lock_free(tree arg0,tree arg1)5470 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
5471 {
5472   if (!flag_inline_atomics)
5473     return NULL_TREE;
5474 
5475   /* If it isn't always lock free, don't generate a result.  */
5476   if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
5477     return boolean_true_node;
5478 
5479   return NULL_TREE;
5480 }
5481 
5482 /* Return true if the parameters to call EXP represent an object which will
5483    always generate lock free instructions.  The first argument represents the
5484    size of the object, and the second parameter is a pointer to the object
5485    itself.  If NULL is passed for the object, then the result is based on
5486    typical alignment for an object of the specified size.  Otherwise return
5487    NULL*/
5488 
5489 static rtx
expand_builtin_atomic_is_lock_free(tree exp)5490 expand_builtin_atomic_is_lock_free (tree exp)
5491 {
5492   tree size;
5493   tree arg0 = CALL_EXPR_ARG (exp, 0);
5494   tree arg1 = CALL_EXPR_ARG (exp, 1);
5495 
5496   if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5497     {
5498       error ("non-integer argument 1 to __atomic_is_lock_free");
5499       return NULL_RTX;
5500     }
5501 
5502   if (!flag_inline_atomics)
5503     return NULL_RTX;
5504 
5505   /* If the value is known at compile time, return the RTX for it.  */
5506   size = fold_builtin_atomic_is_lock_free (arg0, arg1);
5507   if (size == boolean_true_node)
5508     return const1_rtx;
5509 
5510   return NULL_RTX;
5511 }
5512 
5513 /* Expand the __atomic_thread_fence intrinsic:
5514    	void __atomic_thread_fence (enum memmodel)
5515    EXP is the CALL_EXPR.  */
5516 
5517 static void
expand_builtin_atomic_thread_fence(tree exp)5518 expand_builtin_atomic_thread_fence (tree exp)
5519 {
5520   enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5521   expand_mem_thread_fence (model);
5522 }
5523 
5524 /* Expand the __atomic_signal_fence intrinsic:
5525    	void __atomic_signal_fence (enum memmodel)
5526    EXP is the CALL_EXPR.  */
5527 
5528 static void
expand_builtin_atomic_signal_fence(tree exp)5529 expand_builtin_atomic_signal_fence (tree exp)
5530 {
5531   enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
5532   expand_mem_signal_fence (model);
5533 }
5534 
5535 /* Expand the __sync_synchronize intrinsic.  */
5536 
5537 static void
expand_builtin_sync_synchronize(void)5538 expand_builtin_sync_synchronize (void)
5539 {
5540   expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
5541 }
5542 
5543 static rtx
expand_builtin_thread_pointer(tree exp,rtx target)5544 expand_builtin_thread_pointer (tree exp, rtx target)
5545 {
5546   enum insn_code icode;
5547   if (!validate_arglist (exp, VOID_TYPE))
5548     return const0_rtx;
5549   icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
5550   if (icode != CODE_FOR_nothing)
5551     {
5552       struct expand_operand op;
5553       /* If the target is not sutitable then create a new target. */
5554       if (target == NULL_RTX
5555 	  || !REG_P (target)
5556 	  || GET_MODE (target) != Pmode)
5557 	target = gen_reg_rtx (Pmode);
5558       create_output_operand (&op, target, Pmode);
5559       expand_insn (icode, 1, &op);
5560       return target;
5561     }
5562   error ("__builtin_thread_pointer is not supported on this target");
5563   return const0_rtx;
5564 }
5565 
5566 static void
expand_builtin_set_thread_pointer(tree exp)5567 expand_builtin_set_thread_pointer (tree exp)
5568 {
5569   enum insn_code icode;
5570   if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5571     return;
5572   icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
5573   if (icode != CODE_FOR_nothing)
5574     {
5575       struct expand_operand op;
5576       rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
5577 			     Pmode, EXPAND_NORMAL);
5578       create_input_operand (&op, val, Pmode);
5579       expand_insn (icode, 1, &op);
5580       return;
5581     }
5582   error ("__builtin_set_thread_pointer is not supported on this target");
5583 }
5584 
5585 
5586 /* Emit code to restore the current value of stack.  */
5587 
5588 static void
expand_stack_restore(tree var)5589 expand_stack_restore (tree var)
5590 {
5591   rtx_insn *prev;
5592   rtx sa = expand_normal (var);
5593 
5594   sa = convert_memory_address (Pmode, sa);
5595 
5596   prev = get_last_insn ();
5597   emit_stack_restore (SAVE_BLOCK, sa);
5598 
5599   record_new_stack_level ();
5600 
5601   fixup_args_size_notes (prev, get_last_insn (), 0);
5602 }
5603 
5604 /* Emit code to save the current value of stack.  */
5605 
5606 static rtx
expand_stack_save(void)5607 expand_stack_save (void)
5608 {
5609   rtx ret = NULL_RTX;
5610 
5611   emit_stack_save (SAVE_BLOCK, &ret);
5612   return ret;
5613 }
5614 
5615 
5616 /* Expand an expression EXP that calls a built-in function,
5617    with result going to TARGET if that's convenient
5618    (and in mode MODE if that's convenient).
5619    SUBTARGET may be used as the target for computing one of EXP's operands.
5620    IGNORE is nonzero if the value is to be ignored.  */
5621 
5622 rtx
expand_builtin(tree exp,rtx target,rtx subtarget,machine_mode mode,int ignore)5623 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
5624 		int ignore)
5625 {
5626   tree fndecl = get_callee_fndecl (exp);
5627   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
5628   machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
5629   int flags;
5630 
5631   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
5632     return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
5633 
5634   /* When ASan is enabled, we don't want to expand some memory/string
5635      builtins and rely on libsanitizer's hooks.  This allows us to avoid
5636      redundant checks and be sure, that possible overflow will be detected
5637      by ASan.  */
5638 
5639   if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
5640     return expand_call (exp, target, ignore);
5641 
5642   /* When not optimizing, generate calls to library functions for a certain
5643      set of builtins.  */
5644   if (!optimize
5645       && !called_as_built_in (fndecl)
5646       && fcode != BUILT_IN_FORK
5647       && fcode != BUILT_IN_EXECL
5648       && fcode != BUILT_IN_EXECV
5649       && fcode != BUILT_IN_EXECLP
5650       && fcode != BUILT_IN_EXECLE
5651       && fcode != BUILT_IN_EXECVP
5652       && fcode != BUILT_IN_EXECVE
5653       && fcode != BUILT_IN_ALLOCA
5654       && fcode != BUILT_IN_ALLOCA_WITH_ALIGN
5655       && fcode != BUILT_IN_FREE
5656       && fcode != BUILT_IN_CHKP_SET_PTR_BOUNDS
5657       && fcode != BUILT_IN_CHKP_INIT_PTR_BOUNDS
5658       && fcode != BUILT_IN_CHKP_NULL_PTR_BOUNDS
5659       && fcode != BUILT_IN_CHKP_COPY_PTR_BOUNDS
5660       && fcode != BUILT_IN_CHKP_NARROW_PTR_BOUNDS
5661       && fcode != BUILT_IN_CHKP_STORE_PTR_BOUNDS
5662       && fcode != BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
5663       && fcode != BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
5664       && fcode != BUILT_IN_CHKP_CHECK_PTR_BOUNDS
5665       && fcode != BUILT_IN_CHKP_GET_PTR_LBOUND
5666       && fcode != BUILT_IN_CHKP_GET_PTR_UBOUND
5667       && fcode != BUILT_IN_CHKP_BNDRET)
5668     return expand_call (exp, target, ignore);
5669 
5670   /* The built-in function expanders test for target == const0_rtx
5671      to determine whether the function's result will be ignored.  */
5672   if (ignore)
5673     target = const0_rtx;
5674 
5675   /* If the result of a pure or const built-in function is ignored, and
5676      none of its arguments are volatile, we can avoid expanding the
5677      built-in call and just evaluate the arguments for side-effects.  */
5678   if (target == const0_rtx
5679       && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
5680       && !(flags & ECF_LOOPING_CONST_OR_PURE))
5681     {
5682       bool volatilep = false;
5683       tree arg;
5684       call_expr_arg_iterator iter;
5685 
5686       FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5687 	if (TREE_THIS_VOLATILE (arg))
5688 	  {
5689 	    volatilep = true;
5690 	    break;
5691 	  }
5692 
5693       if (! volatilep)
5694 	{
5695 	  FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
5696 	    expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
5697 	  return const0_rtx;
5698 	}
5699     }
5700 
5701   /* expand_builtin_with_bounds is supposed to be used for
5702      instrumented builtin calls.  */
5703   gcc_assert (!CALL_WITH_BOUNDS_P (exp));
5704 
5705   switch (fcode)
5706     {
5707     CASE_FLT_FN (BUILT_IN_FABS):
5708     case BUILT_IN_FABSD32:
5709     case BUILT_IN_FABSD64:
5710     case BUILT_IN_FABSD128:
5711       target = expand_builtin_fabs (exp, target, subtarget);
5712       if (target)
5713 	return target;
5714       break;
5715 
5716     CASE_FLT_FN (BUILT_IN_COPYSIGN):
5717       target = expand_builtin_copysign (exp, target, subtarget);
5718       if (target)
5719 	return target;
5720       break;
5721 
5722       /* Just do a normal library call if we were unable to fold
5723 	 the values.  */
5724     CASE_FLT_FN (BUILT_IN_CABS):
5725       break;
5726 
5727     CASE_FLT_FN (BUILT_IN_FMA):
5728       target = expand_builtin_mathfn_ternary (exp, target, subtarget);
5729       if (target)
5730 	return target;
5731       break;
5732 
5733     CASE_FLT_FN (BUILT_IN_ILOGB):
5734       if (! flag_unsafe_math_optimizations)
5735 	break;
5736     CASE_FLT_FN (BUILT_IN_ISINF):
5737     CASE_FLT_FN (BUILT_IN_FINITE):
5738     case BUILT_IN_ISFINITE:
5739     case BUILT_IN_ISNORMAL:
5740       target = expand_builtin_interclass_mathfn (exp, target);
5741       if (target)
5742 	return target;
5743       break;
5744 
5745     CASE_FLT_FN (BUILT_IN_ICEIL):
5746     CASE_FLT_FN (BUILT_IN_LCEIL):
5747     CASE_FLT_FN (BUILT_IN_LLCEIL):
5748     CASE_FLT_FN (BUILT_IN_LFLOOR):
5749     CASE_FLT_FN (BUILT_IN_IFLOOR):
5750     CASE_FLT_FN (BUILT_IN_LLFLOOR):
5751       target = expand_builtin_int_roundingfn (exp, target);
5752       if (target)
5753 	return target;
5754       break;
5755 
5756     CASE_FLT_FN (BUILT_IN_IRINT):
5757     CASE_FLT_FN (BUILT_IN_LRINT):
5758     CASE_FLT_FN (BUILT_IN_LLRINT):
5759     CASE_FLT_FN (BUILT_IN_IROUND):
5760     CASE_FLT_FN (BUILT_IN_LROUND):
5761     CASE_FLT_FN (BUILT_IN_LLROUND):
5762       target = expand_builtin_int_roundingfn_2 (exp, target);
5763       if (target)
5764 	return target;
5765       break;
5766 
5767     CASE_FLT_FN (BUILT_IN_POWI):
5768       target = expand_builtin_powi (exp, target);
5769       if (target)
5770 	return target;
5771       break;
5772 
5773     CASE_FLT_FN (BUILT_IN_CEXPI):
5774       target = expand_builtin_cexpi (exp, target);
5775       gcc_assert (target);
5776       return target;
5777 
5778     CASE_FLT_FN (BUILT_IN_SIN):
5779     CASE_FLT_FN (BUILT_IN_COS):
5780       if (! flag_unsafe_math_optimizations)
5781 	break;
5782       target = expand_builtin_mathfn_3 (exp, target, subtarget);
5783       if (target)
5784 	return target;
5785       break;
5786 
5787     CASE_FLT_FN (BUILT_IN_SINCOS):
5788       if (! flag_unsafe_math_optimizations)
5789 	break;
5790       target = expand_builtin_sincos (exp);
5791       if (target)
5792 	return target;
5793       break;
5794 
5795     case BUILT_IN_APPLY_ARGS:
5796       return expand_builtin_apply_args ();
5797 
5798       /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5799 	 FUNCTION with a copy of the parameters described by
5800 	 ARGUMENTS, and ARGSIZE.  It returns a block of memory
5801 	 allocated on the stack into which is stored all the registers
5802 	 that might possibly be used for returning the result of a
5803 	 function.  ARGUMENTS is the value returned by
5804 	 __builtin_apply_args.  ARGSIZE is the number of bytes of
5805 	 arguments that must be copied.  ??? How should this value be
5806 	 computed?  We'll also need a safe worst case value for varargs
5807 	 functions.  */
5808     case BUILT_IN_APPLY:
5809       if (!validate_arglist (exp, POINTER_TYPE,
5810 			     POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
5811 	  && !validate_arglist (exp, REFERENCE_TYPE,
5812 				POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5813 	return const0_rtx;
5814       else
5815 	{
5816 	  rtx ops[3];
5817 
5818 	  ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
5819 	  ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
5820 	  ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
5821 
5822 	  return expand_builtin_apply (ops[0], ops[1], ops[2]);
5823 	}
5824 
5825       /* __builtin_return (RESULT) causes the function to return the
5826 	 value described by RESULT.  RESULT is address of the block of
5827 	 memory returned by __builtin_apply.  */
5828     case BUILT_IN_RETURN:
5829       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
5830 	expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
5831       return const0_rtx;
5832 
5833     case BUILT_IN_SAVEREGS:
5834       return expand_builtin_saveregs ();
5835 
5836     case BUILT_IN_VA_ARG_PACK:
5837       /* All valid uses of __builtin_va_arg_pack () are removed during
5838 	 inlining.  */
5839       error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
5840       return const0_rtx;
5841 
5842     case BUILT_IN_VA_ARG_PACK_LEN:
5843       /* All valid uses of __builtin_va_arg_pack_len () are removed during
5844 	 inlining.  */
5845       error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
5846       return const0_rtx;
5847 
5848       /* Return the address of the first anonymous stack arg.  */
5849     case BUILT_IN_NEXT_ARG:
5850       if (fold_builtin_next_arg (exp, false))
5851 	return const0_rtx;
5852       return expand_builtin_next_arg ();
5853 
5854     case BUILT_IN_CLEAR_CACHE:
5855       target = expand_builtin___clear_cache (exp);
5856       if (target)
5857         return target;
5858       break;
5859 
5860     case BUILT_IN_CLASSIFY_TYPE:
5861       return expand_builtin_classify_type (exp);
5862 
5863     case BUILT_IN_CONSTANT_P:
5864       return const0_rtx;
5865 
5866     case BUILT_IN_FRAME_ADDRESS:
5867     case BUILT_IN_RETURN_ADDRESS:
5868       return expand_builtin_frame_address (fndecl, exp);
5869 
5870     /* Returns the address of the area where the structure is returned.
5871        0 otherwise.  */
5872     case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
5873       if (call_expr_nargs (exp) != 0
5874 	  || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
5875 	  || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
5876 	return const0_rtx;
5877       else
5878 	return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
5879 
5880     case BUILT_IN_ALLOCA:
5881     case BUILT_IN_ALLOCA_WITH_ALIGN:
5882       /* If the allocation stems from the declaration of a variable-sized
5883 	 object, it cannot accumulate.  */
5884       target = expand_builtin_alloca (exp, CALL_ALLOCA_FOR_VAR_P (exp));
5885       if (target)
5886 	return target;
5887       break;
5888 
5889     case BUILT_IN_STACK_SAVE:
5890       return expand_stack_save ();
5891 
5892     case BUILT_IN_STACK_RESTORE:
5893       expand_stack_restore (CALL_EXPR_ARG (exp, 0));
5894       return const0_rtx;
5895 
5896     case BUILT_IN_BSWAP16:
5897     case BUILT_IN_BSWAP32:
5898     case BUILT_IN_BSWAP64:
5899       target = expand_builtin_bswap (target_mode, exp, target, subtarget);
5900       if (target)
5901 	return target;
5902       break;
5903 
5904     CASE_INT_FN (BUILT_IN_FFS):
5905       target = expand_builtin_unop (target_mode, exp, target,
5906 				    subtarget, ffs_optab);
5907       if (target)
5908 	return target;
5909       break;
5910 
5911     CASE_INT_FN (BUILT_IN_CLZ):
5912       target = expand_builtin_unop (target_mode, exp, target,
5913 				    subtarget, clz_optab);
5914       if (target)
5915 	return target;
5916       break;
5917 
5918     CASE_INT_FN (BUILT_IN_CTZ):
5919       target = expand_builtin_unop (target_mode, exp, target,
5920 				    subtarget, ctz_optab);
5921       if (target)
5922 	return target;
5923       break;
5924 
5925     CASE_INT_FN (BUILT_IN_CLRSB):
5926       target = expand_builtin_unop (target_mode, exp, target,
5927 				    subtarget, clrsb_optab);
5928       if (target)
5929 	return target;
5930       break;
5931 
5932     CASE_INT_FN (BUILT_IN_POPCOUNT):
5933       target = expand_builtin_unop (target_mode, exp, target,
5934 				    subtarget, popcount_optab);
5935       if (target)
5936 	return target;
5937       break;
5938 
5939     CASE_INT_FN (BUILT_IN_PARITY):
5940       target = expand_builtin_unop (target_mode, exp, target,
5941 				    subtarget, parity_optab);
5942       if (target)
5943 	return target;
5944       break;
5945 
5946     case BUILT_IN_STRLEN:
5947       target = expand_builtin_strlen (exp, target, target_mode);
5948       if (target)
5949 	return target;
5950       break;
5951 
5952     case BUILT_IN_STRCPY:
5953       target = expand_builtin_strcpy (exp, target);
5954       if (target)
5955 	return target;
5956       break;
5957 
5958     case BUILT_IN_STRNCPY:
5959       target = expand_builtin_strncpy (exp, target);
5960       if (target)
5961 	return target;
5962       break;
5963 
5964     case BUILT_IN_STPCPY:
5965       target = expand_builtin_stpcpy (exp, target, mode);
5966       if (target)
5967 	return target;
5968       break;
5969 
5970     case BUILT_IN_MEMCPY:
5971       target = expand_builtin_memcpy (exp, target);
5972       if (target)
5973 	return target;
5974       break;
5975 
5976     case BUILT_IN_MEMPCPY:
5977       target = expand_builtin_mempcpy (exp, target, mode);
5978       if (target)
5979 	return target;
5980       break;
5981 
5982     case BUILT_IN_MEMSET:
5983       target = expand_builtin_memset (exp, target, mode);
5984       if (target)
5985 	return target;
5986       break;
5987 
5988     case BUILT_IN_BZERO:
5989       target = expand_builtin_bzero (exp);
5990       if (target)
5991 	return target;
5992       break;
5993 
5994     case BUILT_IN_STRCMP:
5995       target = expand_builtin_strcmp (exp, target);
5996       if (target)
5997 	return target;
5998       break;
5999 
6000     case BUILT_IN_STRNCMP:
6001       target = expand_builtin_strncmp (exp, target, mode);
6002       if (target)
6003 	return target;
6004       break;
6005 
6006     case BUILT_IN_BCMP:
6007     case BUILT_IN_MEMCMP:
6008       target = expand_builtin_memcmp (exp, target);
6009       if (target)
6010 	return target;
6011       break;
6012 
6013     case BUILT_IN_SETJMP:
6014       /* This should have been lowered to the builtins below.  */
6015       gcc_unreachable ();
6016 
6017     case BUILT_IN_SETJMP_SETUP:
6018       /* __builtin_setjmp_setup is passed a pointer to an array of five words
6019           and the receiver label.  */
6020       if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6021 	{
6022 	  rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6023 				      VOIDmode, EXPAND_NORMAL);
6024 	  tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
6025 	  rtx_insn *label_r = label_rtx (label);
6026 
6027 	  /* This is copied from the handling of non-local gotos.  */
6028 	  expand_builtin_setjmp_setup (buf_addr, label_r);
6029 	  nonlocal_goto_handler_labels
6030 	    = gen_rtx_INSN_LIST (VOIDmode, label_r,
6031 				 nonlocal_goto_handler_labels);
6032 	  /* ??? Do not let expand_label treat us as such since we would
6033 	     not want to be both on the list of non-local labels and on
6034 	     the list of forced labels.  */
6035 	  FORCED_LABEL (label) = 0;
6036 	  return const0_rtx;
6037 	}
6038       break;
6039 
6040     case BUILT_IN_SETJMP_RECEIVER:
6041        /* __builtin_setjmp_receiver is passed the receiver label.  */
6042       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6043 	{
6044 	  tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
6045 	  rtx_insn *label_r = label_rtx (label);
6046 
6047 	  expand_builtin_setjmp_receiver (label_r);
6048 	  return const0_rtx;
6049 	}
6050       break;
6051 
6052       /* __builtin_longjmp is passed a pointer to an array of five words.
6053 	 It's similar to the C library longjmp function but works with
6054 	 __builtin_setjmp above.  */
6055     case BUILT_IN_LONGJMP:
6056       if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6057 	{
6058 	  rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
6059 				      VOIDmode, EXPAND_NORMAL);
6060 	  rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
6061 
6062 	  if (value != const1_rtx)
6063 	    {
6064 	      error ("%<__builtin_longjmp%> second argument must be 1");
6065 	      return const0_rtx;
6066 	    }
6067 
6068 	  expand_builtin_longjmp (buf_addr, value);
6069 	  return const0_rtx;
6070 	}
6071       break;
6072 
6073     case BUILT_IN_NONLOCAL_GOTO:
6074       target = expand_builtin_nonlocal_goto (exp);
6075       if (target)
6076 	return target;
6077       break;
6078 
6079       /* This updates the setjmp buffer that is its argument with the value
6080 	 of the current stack pointer.  */
6081     case BUILT_IN_UPDATE_SETJMP_BUF:
6082       if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
6083 	{
6084 	  rtx buf_addr
6085 	    = expand_normal (CALL_EXPR_ARG (exp, 0));
6086 
6087 	  expand_builtin_update_setjmp_buf (buf_addr);
6088 	  return const0_rtx;
6089 	}
6090       break;
6091 
6092     case BUILT_IN_TRAP:
6093       expand_builtin_trap ();
6094       return const0_rtx;
6095 
6096     case BUILT_IN_UNREACHABLE:
6097       expand_builtin_unreachable ();
6098       return const0_rtx;
6099 
6100     CASE_FLT_FN (BUILT_IN_SIGNBIT):
6101     case BUILT_IN_SIGNBITD32:
6102     case BUILT_IN_SIGNBITD64:
6103     case BUILT_IN_SIGNBITD128:
6104       target = expand_builtin_signbit (exp, target);
6105       if (target)
6106 	return target;
6107       break;
6108 
6109       /* Various hooks for the DWARF 2 __throw routine.  */
6110     case BUILT_IN_UNWIND_INIT:
6111       expand_builtin_unwind_init ();
6112       return const0_rtx;
6113     case BUILT_IN_DWARF_CFA:
6114       return virtual_cfa_rtx;
6115 #ifdef DWARF2_UNWIND_INFO
6116     case BUILT_IN_DWARF_SP_COLUMN:
6117       return expand_builtin_dwarf_sp_column ();
6118     case BUILT_IN_INIT_DWARF_REG_SIZES:
6119       expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
6120       return const0_rtx;
6121 #endif
6122     case BUILT_IN_FROB_RETURN_ADDR:
6123       return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
6124     case BUILT_IN_EXTRACT_RETURN_ADDR:
6125       return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
6126     case BUILT_IN_EH_RETURN:
6127       expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
6128 				CALL_EXPR_ARG (exp, 1));
6129       return const0_rtx;
6130     case BUILT_IN_EH_RETURN_DATA_REGNO:
6131       return expand_builtin_eh_return_data_regno (exp);
6132     case BUILT_IN_EXTEND_POINTER:
6133       return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
6134     case BUILT_IN_EH_POINTER:
6135       return expand_builtin_eh_pointer (exp);
6136     case BUILT_IN_EH_FILTER:
6137       return expand_builtin_eh_filter (exp);
6138     case BUILT_IN_EH_COPY_VALUES:
6139       return expand_builtin_eh_copy_values (exp);
6140 
6141     case BUILT_IN_VA_START:
6142       return expand_builtin_va_start (exp);
6143     case BUILT_IN_VA_END:
6144       return expand_builtin_va_end (exp);
6145     case BUILT_IN_VA_COPY:
6146       return expand_builtin_va_copy (exp);
6147     case BUILT_IN_EXPECT:
6148       return expand_builtin_expect (exp, target);
6149     case BUILT_IN_ASSUME_ALIGNED:
6150       return expand_builtin_assume_aligned (exp, target);
6151     case BUILT_IN_PREFETCH:
6152       expand_builtin_prefetch (exp);
6153       return const0_rtx;
6154 
6155     case BUILT_IN_INIT_TRAMPOLINE:
6156       return expand_builtin_init_trampoline (exp, true);
6157     case BUILT_IN_INIT_HEAP_TRAMPOLINE:
6158       return expand_builtin_init_trampoline (exp, false);
6159     case BUILT_IN_ADJUST_TRAMPOLINE:
6160       return expand_builtin_adjust_trampoline (exp);
6161 
6162     case BUILT_IN_FORK:
6163     case BUILT_IN_EXECL:
6164     case BUILT_IN_EXECV:
6165     case BUILT_IN_EXECLP:
6166     case BUILT_IN_EXECLE:
6167     case BUILT_IN_EXECVP:
6168     case BUILT_IN_EXECVE:
6169       target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
6170       if (target)
6171 	return target;
6172       break;
6173 
6174     case BUILT_IN_SYNC_FETCH_AND_ADD_1:
6175     case BUILT_IN_SYNC_FETCH_AND_ADD_2:
6176     case BUILT_IN_SYNC_FETCH_AND_ADD_4:
6177     case BUILT_IN_SYNC_FETCH_AND_ADD_8:
6178     case BUILT_IN_SYNC_FETCH_AND_ADD_16:
6179       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
6180       target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
6181       if (target)
6182 	return target;
6183       break;
6184 
6185     case BUILT_IN_SYNC_FETCH_AND_SUB_1:
6186     case BUILT_IN_SYNC_FETCH_AND_SUB_2:
6187     case BUILT_IN_SYNC_FETCH_AND_SUB_4:
6188     case BUILT_IN_SYNC_FETCH_AND_SUB_8:
6189     case BUILT_IN_SYNC_FETCH_AND_SUB_16:
6190       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
6191       target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
6192       if (target)
6193 	return target;
6194       break;
6195 
6196     case BUILT_IN_SYNC_FETCH_AND_OR_1:
6197     case BUILT_IN_SYNC_FETCH_AND_OR_2:
6198     case BUILT_IN_SYNC_FETCH_AND_OR_4:
6199     case BUILT_IN_SYNC_FETCH_AND_OR_8:
6200     case BUILT_IN_SYNC_FETCH_AND_OR_16:
6201       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
6202       target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
6203       if (target)
6204 	return target;
6205       break;
6206 
6207     case BUILT_IN_SYNC_FETCH_AND_AND_1:
6208     case BUILT_IN_SYNC_FETCH_AND_AND_2:
6209     case BUILT_IN_SYNC_FETCH_AND_AND_4:
6210     case BUILT_IN_SYNC_FETCH_AND_AND_8:
6211     case BUILT_IN_SYNC_FETCH_AND_AND_16:
6212       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
6213       target = expand_builtin_sync_operation (mode, exp, AND, false, target);
6214       if (target)
6215 	return target;
6216       break;
6217 
6218     case BUILT_IN_SYNC_FETCH_AND_XOR_1:
6219     case BUILT_IN_SYNC_FETCH_AND_XOR_2:
6220     case BUILT_IN_SYNC_FETCH_AND_XOR_4:
6221     case BUILT_IN_SYNC_FETCH_AND_XOR_8:
6222     case BUILT_IN_SYNC_FETCH_AND_XOR_16:
6223       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
6224       target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
6225       if (target)
6226 	return target;
6227       break;
6228 
6229     case BUILT_IN_SYNC_FETCH_AND_NAND_1:
6230     case BUILT_IN_SYNC_FETCH_AND_NAND_2:
6231     case BUILT_IN_SYNC_FETCH_AND_NAND_4:
6232     case BUILT_IN_SYNC_FETCH_AND_NAND_8:
6233     case BUILT_IN_SYNC_FETCH_AND_NAND_16:
6234       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
6235       target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
6236       if (target)
6237 	return target;
6238       break;
6239 
6240     case BUILT_IN_SYNC_ADD_AND_FETCH_1:
6241     case BUILT_IN_SYNC_ADD_AND_FETCH_2:
6242     case BUILT_IN_SYNC_ADD_AND_FETCH_4:
6243     case BUILT_IN_SYNC_ADD_AND_FETCH_8:
6244     case BUILT_IN_SYNC_ADD_AND_FETCH_16:
6245       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
6246       target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
6247       if (target)
6248 	return target;
6249       break;
6250 
6251     case BUILT_IN_SYNC_SUB_AND_FETCH_1:
6252     case BUILT_IN_SYNC_SUB_AND_FETCH_2:
6253     case BUILT_IN_SYNC_SUB_AND_FETCH_4:
6254     case BUILT_IN_SYNC_SUB_AND_FETCH_8:
6255     case BUILT_IN_SYNC_SUB_AND_FETCH_16:
6256       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
6257       target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
6258       if (target)
6259 	return target;
6260       break;
6261 
6262     case BUILT_IN_SYNC_OR_AND_FETCH_1:
6263     case BUILT_IN_SYNC_OR_AND_FETCH_2:
6264     case BUILT_IN_SYNC_OR_AND_FETCH_4:
6265     case BUILT_IN_SYNC_OR_AND_FETCH_8:
6266     case BUILT_IN_SYNC_OR_AND_FETCH_16:
6267       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
6268       target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
6269       if (target)
6270 	return target;
6271       break;
6272 
6273     case BUILT_IN_SYNC_AND_AND_FETCH_1:
6274     case BUILT_IN_SYNC_AND_AND_FETCH_2:
6275     case BUILT_IN_SYNC_AND_AND_FETCH_4:
6276     case BUILT_IN_SYNC_AND_AND_FETCH_8:
6277     case BUILT_IN_SYNC_AND_AND_FETCH_16:
6278       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
6279       target = expand_builtin_sync_operation (mode, exp, AND, true, target);
6280       if (target)
6281 	return target;
6282       break;
6283 
6284     case BUILT_IN_SYNC_XOR_AND_FETCH_1:
6285     case BUILT_IN_SYNC_XOR_AND_FETCH_2:
6286     case BUILT_IN_SYNC_XOR_AND_FETCH_4:
6287     case BUILT_IN_SYNC_XOR_AND_FETCH_8:
6288     case BUILT_IN_SYNC_XOR_AND_FETCH_16:
6289       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
6290       target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
6291       if (target)
6292 	return target;
6293       break;
6294 
6295     case BUILT_IN_SYNC_NAND_AND_FETCH_1:
6296     case BUILT_IN_SYNC_NAND_AND_FETCH_2:
6297     case BUILT_IN_SYNC_NAND_AND_FETCH_4:
6298     case BUILT_IN_SYNC_NAND_AND_FETCH_8:
6299     case BUILT_IN_SYNC_NAND_AND_FETCH_16:
6300       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
6301       target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
6302       if (target)
6303 	return target;
6304       break;
6305 
6306     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
6307     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
6308     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
6309     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
6310     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
6311       if (mode == VOIDmode)
6312 	mode = TYPE_MODE (boolean_type_node);
6313       if (!target || !register_operand (target, mode))
6314 	target = gen_reg_rtx (mode);
6315 
6316       mode = get_builtin_sync_mode
6317 				(fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
6318       target = expand_builtin_compare_and_swap (mode, exp, true, target);
6319       if (target)
6320 	return target;
6321       break;
6322 
6323     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
6324     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
6325     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
6326     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
6327     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
6328       mode = get_builtin_sync_mode
6329 				(fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
6330       target = expand_builtin_compare_and_swap (mode, exp, false, target);
6331       if (target)
6332 	return target;
6333       break;
6334 
6335     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
6336     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
6337     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
6338     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
6339     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
6340       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
6341       target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
6342       if (target)
6343 	return target;
6344       break;
6345 
6346     case BUILT_IN_SYNC_LOCK_RELEASE_1:
6347     case BUILT_IN_SYNC_LOCK_RELEASE_2:
6348     case BUILT_IN_SYNC_LOCK_RELEASE_4:
6349     case BUILT_IN_SYNC_LOCK_RELEASE_8:
6350     case BUILT_IN_SYNC_LOCK_RELEASE_16:
6351       mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
6352       expand_builtin_sync_lock_release (mode, exp);
6353       return const0_rtx;
6354 
6355     case BUILT_IN_SYNC_SYNCHRONIZE:
6356       expand_builtin_sync_synchronize ();
6357       return const0_rtx;
6358 
6359     case BUILT_IN_ATOMIC_EXCHANGE_1:
6360     case BUILT_IN_ATOMIC_EXCHANGE_2:
6361     case BUILT_IN_ATOMIC_EXCHANGE_4:
6362     case BUILT_IN_ATOMIC_EXCHANGE_8:
6363     case BUILT_IN_ATOMIC_EXCHANGE_16:
6364       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
6365       target = expand_builtin_atomic_exchange (mode, exp, target);
6366       if (target)
6367 	return target;
6368       break;
6369 
6370     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
6371     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
6372     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
6373     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
6374     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
6375       {
6376 	unsigned int nargs, z;
6377 	vec<tree, va_gc> *vec;
6378 
6379 	mode =
6380 	    get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
6381 	target = expand_builtin_atomic_compare_exchange (mode, exp, target);
6382 	if (target)
6383 	  return target;
6384 
6385 	/* If this is turned into an external library call, the weak parameter
6386 	   must be dropped to match the expected parameter list.  */
6387 	nargs = call_expr_nargs (exp);
6388 	vec_alloc (vec, nargs - 1);
6389 	for (z = 0; z < 3; z++)
6390 	  vec->quick_push (CALL_EXPR_ARG (exp, z));
6391 	/* Skip the boolean weak parameter.  */
6392 	for (z = 4; z < 6; z++)
6393 	  vec->quick_push (CALL_EXPR_ARG (exp, z));
6394 	exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
6395 	break;
6396       }
6397 
6398     case BUILT_IN_ATOMIC_LOAD_1:
6399     case BUILT_IN_ATOMIC_LOAD_2:
6400     case BUILT_IN_ATOMIC_LOAD_4:
6401     case BUILT_IN_ATOMIC_LOAD_8:
6402     case BUILT_IN_ATOMIC_LOAD_16:
6403       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
6404       target = expand_builtin_atomic_load (mode, exp, target);
6405       if (target)
6406 	return target;
6407       break;
6408 
6409     case BUILT_IN_ATOMIC_STORE_1:
6410     case BUILT_IN_ATOMIC_STORE_2:
6411     case BUILT_IN_ATOMIC_STORE_4:
6412     case BUILT_IN_ATOMIC_STORE_8:
6413     case BUILT_IN_ATOMIC_STORE_16:
6414       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
6415       target = expand_builtin_atomic_store (mode, exp);
6416       if (target)
6417 	return const0_rtx;
6418       break;
6419 
6420     case BUILT_IN_ATOMIC_ADD_FETCH_1:
6421     case BUILT_IN_ATOMIC_ADD_FETCH_2:
6422     case BUILT_IN_ATOMIC_ADD_FETCH_4:
6423     case BUILT_IN_ATOMIC_ADD_FETCH_8:
6424     case BUILT_IN_ATOMIC_ADD_FETCH_16:
6425       {
6426 	enum built_in_function lib;
6427 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
6428 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
6429 				       (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
6430 	target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
6431 						 ignore, lib);
6432 	if (target)
6433 	  return target;
6434 	break;
6435       }
6436     case BUILT_IN_ATOMIC_SUB_FETCH_1:
6437     case BUILT_IN_ATOMIC_SUB_FETCH_2:
6438     case BUILT_IN_ATOMIC_SUB_FETCH_4:
6439     case BUILT_IN_ATOMIC_SUB_FETCH_8:
6440     case BUILT_IN_ATOMIC_SUB_FETCH_16:
6441       {
6442 	enum built_in_function lib;
6443 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
6444 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
6445 				       (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
6446 	target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
6447 						 ignore, lib);
6448 	if (target)
6449 	  return target;
6450 	break;
6451       }
6452     case BUILT_IN_ATOMIC_AND_FETCH_1:
6453     case BUILT_IN_ATOMIC_AND_FETCH_2:
6454     case BUILT_IN_ATOMIC_AND_FETCH_4:
6455     case BUILT_IN_ATOMIC_AND_FETCH_8:
6456     case BUILT_IN_ATOMIC_AND_FETCH_16:
6457       {
6458 	enum built_in_function lib;
6459 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
6460 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
6461 				       (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
6462 	target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
6463 						 ignore, lib);
6464 	if (target)
6465 	  return target;
6466 	break;
6467       }
6468     case BUILT_IN_ATOMIC_NAND_FETCH_1:
6469     case BUILT_IN_ATOMIC_NAND_FETCH_2:
6470     case BUILT_IN_ATOMIC_NAND_FETCH_4:
6471     case BUILT_IN_ATOMIC_NAND_FETCH_8:
6472     case BUILT_IN_ATOMIC_NAND_FETCH_16:
6473       {
6474 	enum built_in_function lib;
6475 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
6476 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
6477 				       (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
6478 	target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
6479 						 ignore, lib);
6480 	if (target)
6481 	  return target;
6482 	break;
6483       }
6484     case BUILT_IN_ATOMIC_XOR_FETCH_1:
6485     case BUILT_IN_ATOMIC_XOR_FETCH_2:
6486     case BUILT_IN_ATOMIC_XOR_FETCH_4:
6487     case BUILT_IN_ATOMIC_XOR_FETCH_8:
6488     case BUILT_IN_ATOMIC_XOR_FETCH_16:
6489       {
6490 	enum built_in_function lib;
6491 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
6492 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
6493 				       (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
6494 	target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
6495 						 ignore, lib);
6496 	if (target)
6497 	  return target;
6498 	break;
6499       }
6500     case BUILT_IN_ATOMIC_OR_FETCH_1:
6501     case BUILT_IN_ATOMIC_OR_FETCH_2:
6502     case BUILT_IN_ATOMIC_OR_FETCH_4:
6503     case BUILT_IN_ATOMIC_OR_FETCH_8:
6504     case BUILT_IN_ATOMIC_OR_FETCH_16:
6505       {
6506 	enum built_in_function lib;
6507 	mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
6508 	lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
6509 				       (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
6510 	target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
6511 						 ignore, lib);
6512 	if (target)
6513 	  return target;
6514 	break;
6515       }
6516     case BUILT_IN_ATOMIC_FETCH_ADD_1:
6517     case BUILT_IN_ATOMIC_FETCH_ADD_2:
6518     case BUILT_IN_ATOMIC_FETCH_ADD_4:
6519     case BUILT_IN_ATOMIC_FETCH_ADD_8:
6520     case BUILT_IN_ATOMIC_FETCH_ADD_16:
6521       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
6522       target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
6523 					       ignore, BUILT_IN_NONE);
6524       if (target)
6525 	return target;
6526       break;
6527 
6528     case BUILT_IN_ATOMIC_FETCH_SUB_1:
6529     case BUILT_IN_ATOMIC_FETCH_SUB_2:
6530     case BUILT_IN_ATOMIC_FETCH_SUB_4:
6531     case BUILT_IN_ATOMIC_FETCH_SUB_8:
6532     case BUILT_IN_ATOMIC_FETCH_SUB_16:
6533       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
6534       target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
6535 					       ignore, BUILT_IN_NONE);
6536       if (target)
6537 	return target;
6538       break;
6539 
6540     case BUILT_IN_ATOMIC_FETCH_AND_1:
6541     case BUILT_IN_ATOMIC_FETCH_AND_2:
6542     case BUILT_IN_ATOMIC_FETCH_AND_4:
6543     case BUILT_IN_ATOMIC_FETCH_AND_8:
6544     case BUILT_IN_ATOMIC_FETCH_AND_16:
6545       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
6546       target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
6547 					       ignore, BUILT_IN_NONE);
6548       if (target)
6549 	return target;
6550       break;
6551 
6552     case BUILT_IN_ATOMIC_FETCH_NAND_1:
6553     case BUILT_IN_ATOMIC_FETCH_NAND_2:
6554     case BUILT_IN_ATOMIC_FETCH_NAND_4:
6555     case BUILT_IN_ATOMIC_FETCH_NAND_8:
6556     case BUILT_IN_ATOMIC_FETCH_NAND_16:
6557       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
6558       target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
6559 					       ignore, BUILT_IN_NONE);
6560       if (target)
6561 	return target;
6562       break;
6563 
6564     case BUILT_IN_ATOMIC_FETCH_XOR_1:
6565     case BUILT_IN_ATOMIC_FETCH_XOR_2:
6566     case BUILT_IN_ATOMIC_FETCH_XOR_4:
6567     case BUILT_IN_ATOMIC_FETCH_XOR_8:
6568     case BUILT_IN_ATOMIC_FETCH_XOR_16:
6569       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
6570       target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
6571 					       ignore, BUILT_IN_NONE);
6572       if (target)
6573 	return target;
6574       break;
6575 
6576     case BUILT_IN_ATOMIC_FETCH_OR_1:
6577     case BUILT_IN_ATOMIC_FETCH_OR_2:
6578     case BUILT_IN_ATOMIC_FETCH_OR_4:
6579     case BUILT_IN_ATOMIC_FETCH_OR_8:
6580     case BUILT_IN_ATOMIC_FETCH_OR_16:
6581       mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
6582       target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
6583 					       ignore, BUILT_IN_NONE);
6584       if (target)
6585 	return target;
6586       break;
6587 
6588     case BUILT_IN_ATOMIC_TEST_AND_SET:
6589       return expand_builtin_atomic_test_and_set (exp, target);
6590 
6591     case BUILT_IN_ATOMIC_CLEAR:
6592       return expand_builtin_atomic_clear (exp);
6593 
6594     case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
6595       return expand_builtin_atomic_always_lock_free (exp);
6596 
6597     case BUILT_IN_ATOMIC_IS_LOCK_FREE:
6598       target = expand_builtin_atomic_is_lock_free (exp);
6599       if (target)
6600         return target;
6601       break;
6602 
6603     case BUILT_IN_ATOMIC_THREAD_FENCE:
6604       expand_builtin_atomic_thread_fence (exp);
6605       return const0_rtx;
6606 
6607     case BUILT_IN_ATOMIC_SIGNAL_FENCE:
6608       expand_builtin_atomic_signal_fence (exp);
6609       return const0_rtx;
6610 
6611     case BUILT_IN_OBJECT_SIZE:
6612       return expand_builtin_object_size (exp);
6613 
6614     case BUILT_IN_MEMCPY_CHK:
6615     case BUILT_IN_MEMPCPY_CHK:
6616     case BUILT_IN_MEMMOVE_CHK:
6617     case BUILT_IN_MEMSET_CHK:
6618       target = expand_builtin_memory_chk (exp, target, mode, fcode);
6619       if (target)
6620 	return target;
6621       break;
6622 
6623     case BUILT_IN_STRCPY_CHK:
6624     case BUILT_IN_STPCPY_CHK:
6625     case BUILT_IN_STRNCPY_CHK:
6626     case BUILT_IN_STPNCPY_CHK:
6627     case BUILT_IN_STRCAT_CHK:
6628     case BUILT_IN_STRNCAT_CHK:
6629     case BUILT_IN_SNPRINTF_CHK:
6630     case BUILT_IN_VSNPRINTF_CHK:
6631       maybe_emit_chk_warning (exp, fcode);
6632       break;
6633 
6634     case BUILT_IN_SPRINTF_CHK:
6635     case BUILT_IN_VSPRINTF_CHK:
6636       maybe_emit_sprintf_chk_warning (exp, fcode);
6637       break;
6638 
6639     case BUILT_IN_FREE:
6640       if (warn_free_nonheap_object)
6641 	maybe_emit_free_warning (exp);
6642       break;
6643 
6644     case BUILT_IN_THREAD_POINTER:
6645       return expand_builtin_thread_pointer (exp, target);
6646 
6647     case BUILT_IN_SET_THREAD_POINTER:
6648       expand_builtin_set_thread_pointer (exp);
6649       return const0_rtx;
6650 
6651     case BUILT_IN_CILK_DETACH:
6652       expand_builtin_cilk_detach (exp);
6653       return const0_rtx;
6654 
6655     case BUILT_IN_CILK_POP_FRAME:
6656       expand_builtin_cilk_pop_frame (exp);
6657       return const0_rtx;
6658 
6659     case BUILT_IN_CHKP_INIT_PTR_BOUNDS:
6660     case BUILT_IN_CHKP_NULL_PTR_BOUNDS:
6661     case BUILT_IN_CHKP_COPY_PTR_BOUNDS:
6662     case BUILT_IN_CHKP_CHECK_PTR_LBOUNDS:
6663     case BUILT_IN_CHKP_CHECK_PTR_UBOUNDS:
6664     case BUILT_IN_CHKP_CHECK_PTR_BOUNDS:
6665     case BUILT_IN_CHKP_SET_PTR_BOUNDS:
6666     case BUILT_IN_CHKP_NARROW_PTR_BOUNDS:
6667     case BUILT_IN_CHKP_STORE_PTR_BOUNDS:
6668     case BUILT_IN_CHKP_GET_PTR_LBOUND:
6669     case BUILT_IN_CHKP_GET_PTR_UBOUND:
6670       /* We allow user CHKP builtins if Pointer Bounds
6671 	 Checker is off.  */
6672       if (!chkp_function_instrumented_p (current_function_decl))
6673 	{
6674 	  if (fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS
6675 	      || fcode == BUILT_IN_CHKP_NARROW_PTR_BOUNDS
6676 	      || fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
6677 	      || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
6678 	      || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
6679 	    return expand_normal (CALL_EXPR_ARG (exp, 0));
6680 	  else if (fcode == BUILT_IN_CHKP_GET_PTR_LBOUND)
6681 	    return expand_normal (size_zero_node);
6682 	  else if (fcode == BUILT_IN_CHKP_GET_PTR_UBOUND)
6683 	    return expand_normal (size_int (-1));
6684 	  else
6685 	    return const0_rtx;
6686 	}
6687       /* FALLTHROUGH */
6688 
6689     case BUILT_IN_CHKP_BNDMK:
6690     case BUILT_IN_CHKP_BNDSTX:
6691     case BUILT_IN_CHKP_BNDCL:
6692     case BUILT_IN_CHKP_BNDCU:
6693     case BUILT_IN_CHKP_BNDLDX:
6694     case BUILT_IN_CHKP_BNDRET:
6695     case BUILT_IN_CHKP_INTERSECT:
6696     case BUILT_IN_CHKP_NARROW:
6697     case BUILT_IN_CHKP_EXTRACT_LOWER:
6698     case BUILT_IN_CHKP_EXTRACT_UPPER:
6699       /* Software implementation of Pointer Bounds Checker is NYI.
6700 	 Target support is required.  */
6701       error ("Your target platform does not support -fcheck-pointer-bounds");
6702       break;
6703 
6704     case BUILT_IN_ACC_ON_DEVICE:
6705       /* Do library call, if we failed to expand the builtin when
6706 	 folding.  */
6707       break;
6708 
6709     default:	/* just do library call, if unknown builtin */
6710       break;
6711     }
6712 
6713   /* The switch statement above can drop through to cause the function
6714      to be called normally.  */
6715   return expand_call (exp, target, ignore);
6716 }
6717 
6718 /* Similar to expand_builtin but is used for instrumented calls.  */
6719 
6720 rtx
expand_builtin_with_bounds(tree exp,rtx target,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode,int ignore)6721 expand_builtin_with_bounds (tree exp, rtx target,
6722 			    rtx subtarget ATTRIBUTE_UNUSED,
6723 			    machine_mode mode, int ignore)
6724 {
6725   tree fndecl = get_callee_fndecl (exp);
6726   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
6727 
6728   gcc_assert (CALL_WITH_BOUNDS_P (exp));
6729 
6730   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6731     return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
6732 
6733   gcc_assert (fcode > BEGIN_CHKP_BUILTINS
6734 	      && fcode < END_CHKP_BUILTINS);
6735 
6736   switch (fcode)
6737     {
6738     case BUILT_IN_CHKP_MEMCPY_NOBND_NOCHK_CHKP:
6739       target = expand_builtin_memcpy_with_bounds (exp, target);
6740       if (target)
6741 	return target;
6742       break;
6743 
6744     case BUILT_IN_CHKP_MEMPCPY_NOBND_NOCHK_CHKP:
6745       target = expand_builtin_mempcpy_with_bounds (exp, target, mode);
6746       if (target)
6747 	return target;
6748       break;
6749 
6750     case BUILT_IN_CHKP_MEMSET_NOBND_NOCHK_CHKP:
6751       target = expand_builtin_memset_with_bounds (exp, target, mode);
6752       if (target)
6753 	return target;
6754       break;
6755 
6756     default:
6757       break;
6758     }
6759 
6760   /* The switch statement above can drop through to cause the function
6761      to be called normally.  */
6762   return expand_call (exp, target, ignore);
6763  }
6764 
6765 /* Determine whether a tree node represents a call to a built-in
6766    function.  If the tree T is a call to a built-in function with
6767    the right number of arguments of the appropriate types, return
6768    the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
6769    Otherwise the return value is END_BUILTINS.  */
6770 
6771 enum built_in_function
builtin_mathfn_code(const_tree t)6772 builtin_mathfn_code (const_tree t)
6773 {
6774   const_tree fndecl, arg, parmlist;
6775   const_tree argtype, parmtype;
6776   const_call_expr_arg_iterator iter;
6777 
6778   if (TREE_CODE (t) != CALL_EXPR
6779       || TREE_CODE (CALL_EXPR_FN (t)) != ADDR_EXPR)
6780     return END_BUILTINS;
6781 
6782   fndecl = get_callee_fndecl (t);
6783   if (fndecl == NULL_TREE
6784       || TREE_CODE (fndecl) != FUNCTION_DECL
6785       || ! DECL_BUILT_IN (fndecl)
6786       || DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
6787     return END_BUILTINS;
6788 
6789   parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
6790   init_const_call_expr_arg_iterator (t, &iter);
6791   for (; parmlist; parmlist = TREE_CHAIN (parmlist))
6792     {
6793       /* If a function doesn't take a variable number of arguments,
6794 	 the last element in the list will have type `void'.  */
6795       parmtype = TREE_VALUE (parmlist);
6796       if (VOID_TYPE_P (parmtype))
6797 	{
6798 	  if (more_const_call_expr_args_p (&iter))
6799 	    return END_BUILTINS;
6800 	  return DECL_FUNCTION_CODE (fndecl);
6801 	}
6802 
6803       if (! more_const_call_expr_args_p (&iter))
6804 	return END_BUILTINS;
6805 
6806       arg = next_const_call_expr_arg (&iter);
6807       argtype = TREE_TYPE (arg);
6808 
6809       if (SCALAR_FLOAT_TYPE_P (parmtype))
6810 	{
6811 	  if (! SCALAR_FLOAT_TYPE_P (argtype))
6812 	    return END_BUILTINS;
6813 	}
6814       else if (COMPLEX_FLOAT_TYPE_P (parmtype))
6815 	{
6816 	  if (! COMPLEX_FLOAT_TYPE_P (argtype))
6817 	    return END_BUILTINS;
6818 	}
6819       else if (POINTER_TYPE_P (parmtype))
6820 	{
6821 	  if (! POINTER_TYPE_P (argtype))
6822 	    return END_BUILTINS;
6823 	}
6824       else if (INTEGRAL_TYPE_P (parmtype))
6825 	{
6826 	  if (! INTEGRAL_TYPE_P (argtype))
6827 	    return END_BUILTINS;
6828 	}
6829       else
6830 	return END_BUILTINS;
6831     }
6832 
6833   /* Variable-length argument list.  */
6834   return DECL_FUNCTION_CODE (fndecl);
6835 }
6836 
6837 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
6838    evaluate to a constant.  */
6839 
6840 static tree
fold_builtin_constant_p(tree arg)6841 fold_builtin_constant_p (tree arg)
6842 {
6843   /* We return 1 for a numeric type that's known to be a constant
6844      value at compile-time or for an aggregate type that's a
6845      literal constant.  */
6846   STRIP_NOPS (arg);
6847 
6848   /* If we know this is a constant, emit the constant of one.  */
6849   if (CONSTANT_CLASS_P (arg)
6850       || (TREE_CODE (arg) == CONSTRUCTOR
6851 	  && TREE_CONSTANT (arg)))
6852     return integer_one_node;
6853   if (TREE_CODE (arg) == ADDR_EXPR)
6854     {
6855        tree op = TREE_OPERAND (arg, 0);
6856        if (TREE_CODE (op) == STRING_CST
6857 	   || (TREE_CODE (op) == ARRAY_REF
6858 	       && integer_zerop (TREE_OPERAND (op, 1))
6859 	       && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
6860 	 return integer_one_node;
6861     }
6862 
6863   /* If this expression has side effects, show we don't know it to be a
6864      constant.  Likewise if it's a pointer or aggregate type since in
6865      those case we only want literals, since those are only optimized
6866      when generating RTL, not later.
6867      And finally, if we are compiling an initializer, not code, we
6868      need to return a definite result now; there's not going to be any
6869      more optimization done.  */
6870   if (TREE_SIDE_EFFECTS (arg)
6871       || AGGREGATE_TYPE_P (TREE_TYPE (arg))
6872       || POINTER_TYPE_P (TREE_TYPE (arg))
6873       || cfun == 0
6874       || folding_initializer
6875       || force_folding_builtin_constant_p)
6876     return integer_zero_node;
6877 
6878   return NULL_TREE;
6879 }
6880 
6881 /* Create builtin_expect with PRED and EXPECTED as its arguments and
6882    return it as a truthvalue.  */
6883 
6884 static tree
build_builtin_expect_predicate(location_t loc,tree pred,tree expected,tree predictor)6885 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
6886 				tree predictor)
6887 {
6888   tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
6889 
6890   fn = builtin_decl_explicit (BUILT_IN_EXPECT);
6891   arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
6892   ret_type = TREE_TYPE (TREE_TYPE (fn));
6893   pred_type = TREE_VALUE (arg_types);
6894   expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
6895 
6896   pred = fold_convert_loc (loc, pred_type, pred);
6897   expected = fold_convert_loc (loc, expected_type, expected);
6898   call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
6899 				   predictor);
6900 
6901   return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
6902 		 build_int_cst (ret_type, 0));
6903 }
6904 
6905 /* Fold a call to builtin_expect with arguments ARG0 and ARG1.  Return
6906    NULL_TREE if no simplification is possible.  */
6907 
6908 tree
fold_builtin_expect(location_t loc,tree arg0,tree arg1,tree arg2)6909 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2)
6910 {
6911   tree inner, fndecl, inner_arg0;
6912   enum tree_code code;
6913 
6914   /* Distribute the expected value over short-circuiting operators.
6915      See through the cast from truthvalue_type_node to long.  */
6916   inner_arg0 = arg0;
6917   while (CONVERT_EXPR_P (inner_arg0)
6918 	 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
6919 	 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
6920     inner_arg0 = TREE_OPERAND (inner_arg0, 0);
6921 
6922   /* If this is a builtin_expect within a builtin_expect keep the
6923      inner one.  See through a comparison against a constant.  It
6924      might have been added to create a thruthvalue.  */
6925   inner = inner_arg0;
6926 
6927   if (COMPARISON_CLASS_P (inner)
6928       && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6929     inner = TREE_OPERAND (inner, 0);
6930 
6931   if (TREE_CODE (inner) == CALL_EXPR
6932       && (fndecl = get_callee_fndecl (inner))
6933       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
6934       && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT)
6935     return arg0;
6936 
6937   inner = inner_arg0;
6938   code = TREE_CODE (inner);
6939   if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6940     {
6941       tree op0 = TREE_OPERAND (inner, 0);
6942       tree op1 = TREE_OPERAND (inner, 1);
6943 
6944       op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2);
6945       op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2);
6946       inner = build2 (code, TREE_TYPE (inner), op0, op1);
6947 
6948       return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
6949     }
6950 
6951   /* If the argument isn't invariant then there's nothing else we can do.  */
6952   if (!TREE_CONSTANT (inner_arg0))
6953     return NULL_TREE;
6954 
6955   /* If we expect that a comparison against the argument will fold to
6956      a constant return the constant.  In practice, this means a true
6957      constant or the address of a non-weak symbol.  */
6958   inner = inner_arg0;
6959   STRIP_NOPS (inner);
6960   if (TREE_CODE (inner) == ADDR_EXPR)
6961     {
6962       do
6963 	{
6964 	  inner = TREE_OPERAND (inner, 0);
6965 	}
6966       while (TREE_CODE (inner) == COMPONENT_REF
6967 	     || TREE_CODE (inner) == ARRAY_REF);
6968       if ((TREE_CODE (inner) == VAR_DECL
6969            || TREE_CODE (inner) == FUNCTION_DECL)
6970 	  && DECL_WEAK (inner))
6971 	return NULL_TREE;
6972     }
6973 
6974   /* Otherwise, ARG0 already has the proper type for the return value.  */
6975   return arg0;
6976 }
6977 
6978 /* Fold a call to __builtin_classify_type with argument ARG.  */
6979 
6980 static tree
fold_builtin_classify_type(tree arg)6981 fold_builtin_classify_type (tree arg)
6982 {
6983   if (arg == 0)
6984     return build_int_cst (integer_type_node, no_type_class);
6985 
6986   return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
6987 }
6988 
6989 /* Fold a call to __builtin_strlen with argument ARG.  */
6990 
6991 static tree
fold_builtin_strlen(location_t loc,tree type,tree arg)6992 fold_builtin_strlen (location_t loc, tree type, tree arg)
6993 {
6994   if (!validate_arg (arg, POINTER_TYPE))
6995     return NULL_TREE;
6996   else
6997     {
6998       tree len = c_strlen (arg, 0);
6999 
7000       if (len)
7001 	return fold_convert_loc (loc, type, len);
7002 
7003       return NULL_TREE;
7004     }
7005 }
7006 
7007 /* Fold a call to __builtin_inf or __builtin_huge_val.  */
7008 
7009 static tree
fold_builtin_inf(location_t loc,tree type,int warn)7010 fold_builtin_inf (location_t loc, tree type, int warn)
7011 {
7012   REAL_VALUE_TYPE real;
7013 
7014   /* __builtin_inff is intended to be usable to define INFINITY on all
7015      targets.  If an infinity is not available, INFINITY expands "to a
7016      positive constant of type float that overflows at translation
7017      time", footnote "In this case, using INFINITY will violate the
7018      constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
7019      Thus we pedwarn to ensure this constraint violation is
7020      diagnosed.  */
7021   if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
7022     pedwarn (loc, 0, "target format does not support infinity");
7023 
7024   real_inf (&real);
7025   return build_real (type, real);
7026 }
7027 
7028 /* Fold function call to builtin sincos, sincosf, or sincosl.  Return
7029    NULL_TREE if no simplification can be made.  */
7030 
7031 static tree
fold_builtin_sincos(location_t loc,tree arg0,tree arg1,tree arg2)7032 fold_builtin_sincos (location_t loc,
7033 		     tree arg0, tree arg1, tree arg2)
7034 {
7035   tree type;
7036   tree fndecl, call = NULL_TREE;
7037 
7038   if (!validate_arg (arg0, REAL_TYPE)
7039       || !validate_arg (arg1, POINTER_TYPE)
7040       || !validate_arg (arg2, POINTER_TYPE))
7041     return NULL_TREE;
7042 
7043   type = TREE_TYPE (arg0);
7044 
7045   /* Calculate the result when the argument is a constant.  */
7046   built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
7047   if (fn == END_BUILTINS)
7048     return NULL_TREE;
7049 
7050   /* Canonicalize sincos to cexpi.  */
7051   if (TREE_CODE (arg0) == REAL_CST)
7052     {
7053       tree complex_type = build_complex_type (type);
7054       call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
7055     }
7056   if (!call)
7057     {
7058       if (!targetm.libc_has_function (function_c99_math_complex)
7059 	  || !builtin_decl_implicit_p (fn))
7060 	return NULL_TREE;
7061       fndecl = builtin_decl_explicit (fn);
7062       call = build_call_expr_loc (loc, fndecl, 1, arg0);
7063       call = builtin_save_expr (call);
7064     }
7065 
7066   return build2 (COMPOUND_EXPR, void_type_node,
7067 		 build2 (MODIFY_EXPR, void_type_node,
7068 			 build_fold_indirect_ref_loc (loc, arg1),
7069 			 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
7070 		 build2 (MODIFY_EXPR, void_type_node,
7071 			 build_fold_indirect_ref_loc (loc, arg2),
7072 			 fold_build1_loc (loc, REALPART_EXPR, type, call)));
7073 }
7074 
7075 /* Fold function call to builtin memchr.  ARG1, ARG2 and LEN are the
7076    arguments to the call, and TYPE is its return type.
7077    Return NULL_TREE if no simplification can be made.  */
7078 
7079 static tree
fold_builtin_memchr(location_t loc,tree arg1,tree arg2,tree len,tree type)7080 fold_builtin_memchr (location_t loc, tree arg1, tree arg2, tree len, tree type)
7081 {
7082   if (!validate_arg (arg1, POINTER_TYPE)
7083       || !validate_arg (arg2, INTEGER_TYPE)
7084       || !validate_arg (len, INTEGER_TYPE))
7085     return NULL_TREE;
7086   else
7087     {
7088       const char *p1;
7089 
7090       if (TREE_CODE (arg2) != INTEGER_CST
7091 	  || !tree_fits_uhwi_p (len))
7092 	return NULL_TREE;
7093 
7094       p1 = c_getstr (arg1);
7095       if (p1 && compare_tree_int (len, strlen (p1) + 1) <= 0)
7096 	{
7097 	  char c;
7098 	  const char *r;
7099 	  tree tem;
7100 
7101 	  if (target_char_cast (arg2, &c))
7102 	    return NULL_TREE;
7103 
7104 	  r = (const char *) memchr (p1, c, tree_to_uhwi (len));
7105 
7106 	  if (r == NULL)
7107 	    return build_int_cst (TREE_TYPE (arg1), 0);
7108 
7109 	  tem = fold_build_pointer_plus_hwi_loc (loc, arg1, r - p1);
7110 	  return fold_convert_loc (loc, type, tem);
7111 	}
7112       return NULL_TREE;
7113     }
7114 }
7115 
7116 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
7117    Return NULL_TREE if no simplification can be made.  */
7118 
7119 static tree
fold_builtin_memcmp(location_t loc,tree arg1,tree arg2,tree len)7120 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
7121 {
7122   if (!validate_arg (arg1, POINTER_TYPE)
7123       || !validate_arg (arg2, POINTER_TYPE)
7124       || !validate_arg (len, INTEGER_TYPE))
7125     return NULL_TREE;
7126 
7127   /* If the LEN parameter is zero, return zero.  */
7128   if (integer_zerop (len))
7129     return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7130 			      arg1, arg2);
7131 
7132   /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
7133   if (operand_equal_p (arg1, arg2, 0))
7134     return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7135 
7136   /* If len parameter is one, return an expression corresponding to
7137      (*(const unsigned char*)arg1 - (const unsigned char*)arg2).  */
7138   if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7139     {
7140       tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7141       tree cst_uchar_ptr_node
7142 	= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7143 
7144       tree ind1
7145 	= fold_convert_loc (loc, integer_type_node,
7146 			    build1 (INDIRECT_REF, cst_uchar_node,
7147 				    fold_convert_loc (loc,
7148 						      cst_uchar_ptr_node,
7149 						      arg1)));
7150       tree ind2
7151 	= fold_convert_loc (loc, integer_type_node,
7152 			    build1 (INDIRECT_REF, cst_uchar_node,
7153 				    fold_convert_loc (loc,
7154 						      cst_uchar_ptr_node,
7155 						      arg2)));
7156       return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7157     }
7158 
7159   return NULL_TREE;
7160 }
7161 
7162 /* Fold function call to builtin strcmp with arguments ARG1 and ARG2.
7163    Return NULL_TREE if no simplification can be made.  */
7164 
7165 static tree
fold_builtin_strcmp(location_t loc,tree arg1,tree arg2)7166 fold_builtin_strcmp (location_t loc, tree arg1, tree arg2)
7167 {
7168   if (!validate_arg (arg1, POINTER_TYPE)
7169       || !validate_arg (arg2, POINTER_TYPE))
7170     return NULL_TREE;
7171 
7172   /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
7173   if (operand_equal_p (arg1, arg2, 0))
7174     return integer_zero_node;
7175 
7176   /* If the second arg is "", return *(const unsigned char*)arg1.  */
7177   const char *p2 = c_getstr (arg2);
7178   if (p2 && *p2 == '\0')
7179     {
7180       tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7181       tree cst_uchar_ptr_node
7182 	= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7183 
7184       return fold_convert_loc (loc, integer_type_node,
7185 			       build1 (INDIRECT_REF, cst_uchar_node,
7186 				       fold_convert_loc (loc,
7187 							 cst_uchar_ptr_node,
7188 							 arg1)));
7189     }
7190 
7191   /* If the first arg is "", return -*(const unsigned char*)arg2.  */
7192   const char *p1 = c_getstr (arg1);
7193   if (p1 && *p1 == '\0')
7194     {
7195       tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7196       tree cst_uchar_ptr_node
7197 	= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7198 
7199       tree temp
7200 	= fold_convert_loc (loc, integer_type_node,
7201 			    build1 (INDIRECT_REF, cst_uchar_node,
7202 				    fold_convert_loc (loc,
7203 						      cst_uchar_ptr_node,
7204 						      arg2)));
7205       return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7206     }
7207 
7208   return NULL_TREE;
7209 }
7210 
7211 /* Fold function call to builtin strncmp with arguments ARG1, ARG2, and LEN.
7212    Return NULL_TREE if no simplification can be made.  */
7213 
7214 static tree
fold_builtin_strncmp(location_t loc,tree arg1,tree arg2,tree len)7215 fold_builtin_strncmp (location_t loc, tree arg1, tree arg2, tree len)
7216 {
7217   if (!validate_arg (arg1, POINTER_TYPE)
7218       || !validate_arg (arg2, POINTER_TYPE)
7219       || !validate_arg (len, INTEGER_TYPE))
7220     return NULL_TREE;
7221 
7222   /* If the LEN parameter is zero, return zero.  */
7223   if (integer_zerop (len))
7224     return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
7225 			      arg1, arg2);
7226 
7227   /* If ARG1 and ARG2 are the same (and not volatile), return zero.  */
7228   if (operand_equal_p (arg1, arg2, 0))
7229     return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
7230 
7231   /* If the second arg is "", and the length is greater than zero,
7232      return *(const unsigned char*)arg1.  */
7233   const char *p2 = c_getstr (arg2);
7234   if (p2 && *p2 == '\0'
7235       && TREE_CODE (len) == INTEGER_CST
7236       && tree_int_cst_sgn (len) == 1)
7237     {
7238       tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7239       tree cst_uchar_ptr_node
7240 	= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7241 
7242       return fold_convert_loc (loc, integer_type_node,
7243 			       build1 (INDIRECT_REF, cst_uchar_node,
7244 				       fold_convert_loc (loc,
7245 							 cst_uchar_ptr_node,
7246 							 arg1)));
7247     }
7248 
7249   /* If the first arg is "", and the length is greater than zero,
7250      return -*(const unsigned char*)arg2.  */
7251   const char *p1 = c_getstr (arg1);
7252   if (p1 && *p1 == '\0'
7253       && TREE_CODE (len) == INTEGER_CST
7254       && tree_int_cst_sgn (len) == 1)
7255     {
7256       tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7257       tree cst_uchar_ptr_node
7258 	= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7259 
7260       tree temp = fold_convert_loc (loc, integer_type_node,
7261 				    build1 (INDIRECT_REF, cst_uchar_node,
7262 					    fold_convert_loc (loc,
7263 							      cst_uchar_ptr_node,
7264 							      arg2)));
7265       return fold_build1_loc (loc, NEGATE_EXPR, integer_type_node, temp);
7266     }
7267 
7268   /* If len parameter is one, return an expression corresponding to
7269      (*(const unsigned char*)arg1 - (const unsigned char*)arg2).  */
7270   if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
7271     {
7272       tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
7273       tree cst_uchar_ptr_node
7274 	= build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
7275 
7276       tree ind1 = fold_convert_loc (loc, integer_type_node,
7277 				    build1 (INDIRECT_REF, cst_uchar_node,
7278 					    fold_convert_loc (loc,
7279 							      cst_uchar_ptr_node,
7280 							      arg1)));
7281       tree ind2 = fold_convert_loc (loc, integer_type_node,
7282 				    build1 (INDIRECT_REF, cst_uchar_node,
7283 					    fold_convert_loc (loc,
7284 							      cst_uchar_ptr_node,
7285 							      arg2)));
7286       return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
7287     }
7288 
7289   return NULL_TREE;
7290 }
7291 
7292 /* Fold a call to builtin isascii with argument ARG.  */
7293 
7294 static tree
fold_builtin_isascii(location_t loc,tree arg)7295 fold_builtin_isascii (location_t loc, tree arg)
7296 {
7297   if (!validate_arg (arg, INTEGER_TYPE))
7298     return NULL_TREE;
7299   else
7300     {
7301       /* Transform isascii(c) -> ((c & ~0x7f) == 0).  */
7302       arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
7303 			 build_int_cst (integer_type_node,
7304 					~ (unsigned HOST_WIDE_INT) 0x7f));
7305       return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
7306 			      arg, integer_zero_node);
7307     }
7308 }
7309 
7310 /* Fold a call to builtin toascii with argument ARG.  */
7311 
7312 static tree
fold_builtin_toascii(location_t loc,tree arg)7313 fold_builtin_toascii (location_t loc, tree arg)
7314 {
7315   if (!validate_arg (arg, INTEGER_TYPE))
7316     return NULL_TREE;
7317 
7318   /* Transform toascii(c) -> (c & 0x7f).  */
7319   return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
7320 			  build_int_cst (integer_type_node, 0x7f));
7321 }
7322 
7323 /* Fold a call to builtin isdigit with argument ARG.  */
7324 
7325 static tree
fold_builtin_isdigit(location_t loc,tree arg)7326 fold_builtin_isdigit (location_t loc, tree arg)
7327 {
7328   if (!validate_arg (arg, INTEGER_TYPE))
7329     return NULL_TREE;
7330   else
7331     {
7332       /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9.  */
7333       /* According to the C standard, isdigit is unaffected by locale.
7334 	 However, it definitely is affected by the target character set.  */
7335       unsigned HOST_WIDE_INT target_digit0
7336 	= lang_hooks.to_target_charset ('0');
7337 
7338       if (target_digit0 == 0)
7339 	return NULL_TREE;
7340 
7341       arg = fold_convert_loc (loc, unsigned_type_node, arg);
7342       arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
7343 			 build_int_cst (unsigned_type_node, target_digit0));
7344       return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
7345 			  build_int_cst (unsigned_type_node, 9));
7346     }
7347 }
7348 
7349 /* Fold a call to fabs, fabsf or fabsl with argument ARG.  */
7350 
7351 static tree
fold_builtin_fabs(location_t loc,tree arg,tree type)7352 fold_builtin_fabs (location_t loc, tree arg, tree type)
7353 {
7354   if (!validate_arg (arg, REAL_TYPE))
7355     return NULL_TREE;
7356 
7357   arg = fold_convert_loc (loc, type, arg);
7358   return fold_build1_loc (loc, ABS_EXPR, type, arg);
7359 }
7360 
7361 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG.  */
7362 
7363 static tree
fold_builtin_abs(location_t loc,tree arg,tree type)7364 fold_builtin_abs (location_t loc, tree arg, tree type)
7365 {
7366   if (!validate_arg (arg, INTEGER_TYPE))
7367     return NULL_TREE;
7368 
7369   arg = fold_convert_loc (loc, type, arg);
7370   return fold_build1_loc (loc, ABS_EXPR, type, arg);
7371 }
7372 
7373 /* Fold a call to fma, fmaf, or fmal with arguments ARG[012].  */
7374 
7375 static tree
fold_builtin_fma(location_t loc,tree arg0,tree arg1,tree arg2,tree type)7376 fold_builtin_fma (location_t loc, tree arg0, tree arg1, tree arg2, tree type)
7377 {
7378   /* ??? Only expand to FMA_EXPR if it's directly supported.  */
7379   if (validate_arg (arg0, REAL_TYPE)
7380       && validate_arg (arg1, REAL_TYPE)
7381       && validate_arg (arg2, REAL_TYPE)
7382       && optab_handler (fma_optab, TYPE_MODE (type)) != CODE_FOR_nothing)
7383     return fold_build3_loc (loc, FMA_EXPR, type, arg0, arg1, arg2);
7384 
7385   return NULL_TREE;
7386 }
7387 
7388 /* Fold a call to builtin carg(a+bi) -> atan2(b,a).  */
7389 
7390 static tree
fold_builtin_carg(location_t loc,tree arg,tree type)7391 fold_builtin_carg (location_t loc, tree arg, tree type)
7392 {
7393   if (validate_arg (arg, COMPLEX_TYPE)
7394       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
7395     {
7396       tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
7397 
7398       if (atan2_fn)
7399         {
7400   	  tree new_arg = builtin_save_expr (arg);
7401 	  tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
7402 	  tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
7403 	  return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
7404 	}
7405     }
7406 
7407   return NULL_TREE;
7408 }
7409 
7410 /* Fold a call to builtin frexp, we can assume the base is 2.  */
7411 
7412 static tree
fold_builtin_frexp(location_t loc,tree arg0,tree arg1,tree rettype)7413 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
7414 {
7415   if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7416     return NULL_TREE;
7417 
7418   STRIP_NOPS (arg0);
7419 
7420   if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7421     return NULL_TREE;
7422 
7423   arg1 = build_fold_indirect_ref_loc (loc, arg1);
7424 
7425   /* Proceed if a valid pointer type was passed in.  */
7426   if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
7427     {
7428       const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7429       tree frac, exp;
7430 
7431       switch (value->cl)
7432       {
7433       case rvc_zero:
7434 	/* For +-0, return (*exp = 0, +-0).  */
7435 	exp = integer_zero_node;
7436 	frac = arg0;
7437 	break;
7438       case rvc_nan:
7439       case rvc_inf:
7440 	/* For +-NaN or +-Inf, *exp is unspecified, return arg0.  */
7441 	return omit_one_operand_loc (loc, rettype, arg0, arg1);
7442       case rvc_normal:
7443 	{
7444 	  /* Since the frexp function always expects base 2, and in
7445 	     GCC normalized significands are already in the range
7446 	     [0.5, 1.0), we have exactly what frexp wants.  */
7447 	  REAL_VALUE_TYPE frac_rvt = *value;
7448 	  SET_REAL_EXP (&frac_rvt, 0);
7449 	  frac = build_real (rettype, frac_rvt);
7450 	  exp = build_int_cst (integer_type_node, REAL_EXP (value));
7451 	}
7452 	break;
7453       default:
7454 	gcc_unreachable ();
7455       }
7456 
7457       /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7458       arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
7459       TREE_SIDE_EFFECTS (arg1) = 1;
7460       return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
7461     }
7462 
7463   return NULL_TREE;
7464 }
7465 
7466 /* Fold a call to builtin modf.  */
7467 
7468 static tree
fold_builtin_modf(location_t loc,tree arg0,tree arg1,tree rettype)7469 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
7470 {
7471   if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
7472     return NULL_TREE;
7473 
7474   STRIP_NOPS (arg0);
7475 
7476   if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
7477     return NULL_TREE;
7478 
7479   arg1 = build_fold_indirect_ref_loc (loc, arg1);
7480 
7481   /* Proceed if a valid pointer type was passed in.  */
7482   if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
7483     {
7484       const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
7485       REAL_VALUE_TYPE trunc, frac;
7486 
7487       switch (value->cl)
7488       {
7489       case rvc_nan:
7490       case rvc_zero:
7491 	/* For +-NaN or +-0, return (*arg1 = arg0, arg0).  */
7492 	trunc = frac = *value;
7493 	break;
7494       case rvc_inf:
7495 	/* For +-Inf, return (*arg1 = arg0, +-0).  */
7496 	frac = dconst0;
7497 	frac.sign = value->sign;
7498 	trunc = *value;
7499 	break;
7500       case rvc_normal:
7501 	/* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)).  */
7502 	real_trunc (&trunc, VOIDmode, value);
7503 	real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
7504 	/* If the original number was negative and already
7505 	   integral, then the fractional part is -0.0.  */
7506 	if (value->sign && frac.cl == rvc_zero)
7507 	  frac.sign = value->sign;
7508 	break;
7509       }
7510 
7511       /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
7512       arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
7513 			  build_real (rettype, trunc));
7514       TREE_SIDE_EFFECTS (arg1) = 1;
7515       return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
7516 			  build_real (rettype, frac));
7517     }
7518 
7519   return NULL_TREE;
7520 }
7521 
7522 /* Given a location LOC, an interclass builtin function decl FNDECL
7523    and its single argument ARG, return an folded expression computing
7524    the same, or NULL_TREE if we either couldn't or didn't want to fold
7525    (the latter happen if there's an RTL instruction available).  */
7526 
7527 static tree
fold_builtin_interclass_mathfn(location_t loc,tree fndecl,tree arg)7528 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
7529 {
7530   machine_mode mode;
7531 
7532   if (!validate_arg (arg, REAL_TYPE))
7533     return NULL_TREE;
7534 
7535   if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
7536     return NULL_TREE;
7537 
7538   mode = TYPE_MODE (TREE_TYPE (arg));
7539 
7540   bool is_ibm_extended = MODE_COMPOSITE_P (mode);
7541 
7542   /* If there is no optab, try generic code.  */
7543   switch (DECL_FUNCTION_CODE (fndecl))
7544     {
7545       tree result;
7546 
7547     CASE_FLT_FN (BUILT_IN_ISINF):
7548       {
7549 	/* isinf(x) -> isgreater(fabs(x),DBL_MAX).  */
7550 	tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7551 	tree type = TREE_TYPE (arg);
7552 	REAL_VALUE_TYPE r;
7553 	char buf[128];
7554 
7555 	if (is_ibm_extended)
7556 	  {
7557 	    /* NaN and Inf are encoded in the high-order double value
7558 	       only.  The low-order value is not significant.  */
7559 	    type = double_type_node;
7560 	    mode = DFmode;
7561 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7562 	  }
7563 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7564 	real_from_string (&r, buf);
7565 	result = build_call_expr (isgr_fn, 2,
7566 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
7567 				  build_real (type, r));
7568 	return result;
7569       }
7570     CASE_FLT_FN (BUILT_IN_FINITE):
7571     case BUILT_IN_ISFINITE:
7572       {
7573 	/* isfinite(x) -> islessequal(fabs(x),DBL_MAX).  */
7574 	tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7575 	tree type = TREE_TYPE (arg);
7576 	REAL_VALUE_TYPE r;
7577 	char buf[128];
7578 
7579 	if (is_ibm_extended)
7580 	  {
7581 	    /* NaN and Inf are encoded in the high-order double value
7582 	       only.  The low-order value is not significant.  */
7583 	    type = double_type_node;
7584 	    mode = DFmode;
7585 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7586 	  }
7587 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7588 	real_from_string (&r, buf);
7589 	result = build_call_expr (isle_fn, 2,
7590 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
7591 				  build_real (type, r));
7592 	/*result = fold_build2_loc (loc, UNGT_EXPR,
7593 				  TREE_TYPE (TREE_TYPE (fndecl)),
7594 				  fold_build1_loc (loc, ABS_EXPR, type, arg),
7595 				  build_real (type, r));
7596 	result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
7597 				  TREE_TYPE (TREE_TYPE (fndecl)),
7598 				  result);*/
7599 	return result;
7600       }
7601     case BUILT_IN_ISNORMAL:
7602       {
7603 	/* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
7604 	   islessequal(fabs(x),DBL_MAX).  */
7605 	tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
7606 	tree type = TREE_TYPE (arg);
7607 	tree orig_arg, max_exp, min_exp;
7608 	machine_mode orig_mode = mode;
7609 	REAL_VALUE_TYPE rmax, rmin;
7610 	char buf[128];
7611 
7612 	orig_arg = arg = builtin_save_expr (arg);
7613 	if (is_ibm_extended)
7614 	  {
7615 	    /* Use double to test the normal range of IBM extended
7616 	       precision.  Emin for IBM extended precision is
7617 	       different to emin for IEEE double, being 53 higher
7618 	       since the low double exponent is at least 53 lower
7619 	       than the high double exponent.  */
7620 	    type = double_type_node;
7621 	    mode = DFmode;
7622 	    arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
7623 	  }
7624 	arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
7625 
7626 	get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf));
7627 	real_from_string (&rmax, buf);
7628 	sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
7629 	real_from_string (&rmin, buf);
7630 	max_exp = build_real (type, rmax);
7631 	min_exp = build_real (type, rmin);
7632 
7633 	max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
7634 	if (is_ibm_extended)
7635 	  {
7636 	    /* Testing the high end of the range is done just using
7637 	       the high double, using the same test as isfinite().
7638 	       For the subnormal end of the range we first test the
7639 	       high double, then if its magnitude is equal to the
7640 	       limit of 0x1p-969, we test whether the low double is
7641 	       non-zero and opposite sign to the high double.  */
7642 	    tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
7643 	    tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
7644 	    tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
7645 	    tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
7646 				       arg, min_exp);
7647 	    tree as_complex = build1 (VIEW_CONVERT_EXPR,
7648 				      complex_double_type_node, orig_arg);
7649 	    tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
7650 	    tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
7651 	    tree zero = build_real (type, dconst0);
7652 	    tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
7653 	    tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
7654 	    tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
7655 	    tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
7656 				      fold_build3 (COND_EXPR,
7657 						   integer_type_node,
7658 						   hilt, logt, lolt));
7659 	    eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
7660 				  eq_min, ok_lo);
7661 	    min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
7662 				   gt_min, eq_min);
7663 	  }
7664 	else
7665 	  {
7666 	    tree const isge_fn
7667 	      = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
7668 	    min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
7669 	  }
7670 	result = fold_build2 (BIT_AND_EXPR, integer_type_node,
7671 			      max_exp, min_exp);
7672 	return result;
7673       }
7674     default:
7675       break;
7676     }
7677 
7678   return NULL_TREE;
7679 }
7680 
7681 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
7682    ARG is the argument for the call.  */
7683 
7684 static tree
fold_builtin_classify(location_t loc,tree fndecl,tree arg,int builtin_index)7685 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
7686 {
7687   tree type = TREE_TYPE (TREE_TYPE (fndecl));
7688 
7689   if (!validate_arg (arg, REAL_TYPE))
7690     return NULL_TREE;
7691 
7692   switch (builtin_index)
7693     {
7694     case BUILT_IN_ISINF:
7695       if (!HONOR_INFINITIES (arg))
7696 	return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7697 
7698       return NULL_TREE;
7699 
7700     case BUILT_IN_ISINF_SIGN:
7701       {
7702 	/* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
7703 	/* In a boolean context, GCC will fold the inner COND_EXPR to
7704 	   1.  So e.g. "if (isinf_sign(x))" would be folded to just
7705 	   "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
7706 	tree signbit_fn = mathfn_built_in_1
7707 	  (TREE_TYPE (arg), CFN_BUILT_IN_SIGNBIT, 0);
7708 	tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
7709 	tree tmp = NULL_TREE;
7710 
7711 	arg = builtin_save_expr (arg);
7712 
7713 	if (signbit_fn && isinf_fn)
7714 	  {
7715 	    tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
7716 	    tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
7717 
7718 	    signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7719 					signbit_call, integer_zero_node);
7720 	    isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
7721 				      isinf_call, integer_zero_node);
7722 
7723 	    tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
7724 			       integer_minus_one_node, integer_one_node);
7725 	    tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7726 			       isinf_call, tmp,
7727 			       integer_zero_node);
7728 	  }
7729 
7730 	return tmp;
7731       }
7732 
7733     case BUILT_IN_ISFINITE:
7734       if (!HONOR_NANS (arg)
7735 	  && !HONOR_INFINITIES (arg))
7736 	return omit_one_operand_loc (loc, type, integer_one_node, arg);
7737 
7738       return NULL_TREE;
7739 
7740     case BUILT_IN_ISNAN:
7741       if (!HONOR_NANS (arg))
7742 	return omit_one_operand_loc (loc, type, integer_zero_node, arg);
7743 
7744       {
7745 	bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
7746 	if (is_ibm_extended)
7747 	  {
7748 	    /* NaN and Inf are encoded in the high-order double value
7749 	       only.  The low-order value is not significant.  */
7750 	    arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
7751 	  }
7752       }
7753       arg = builtin_save_expr (arg);
7754       return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
7755 
7756     default:
7757       gcc_unreachable ();
7758     }
7759 }
7760 
7761 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
7762    This builtin will generate code to return the appropriate floating
7763    point classification depending on the value of the floating point
7764    number passed in.  The possible return values must be supplied as
7765    int arguments to the call in the following order: FP_NAN, FP_INFINITE,
7766    FP_NORMAL, FP_SUBNORMAL and FP_ZERO.  The ellipses is for exactly
7767    one floating point argument which is "type generic".  */
7768 
7769 static tree
fold_builtin_fpclassify(location_t loc,tree * args,int nargs)7770 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
7771 {
7772   tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
7773     arg, type, res, tmp;
7774   machine_mode mode;
7775   REAL_VALUE_TYPE r;
7776   char buf[128];
7777 
7778   /* Verify the required arguments in the original call.  */
7779   if (nargs != 6
7780       || !validate_arg (args[0], INTEGER_TYPE)
7781       || !validate_arg (args[1], INTEGER_TYPE)
7782       || !validate_arg (args[2], INTEGER_TYPE)
7783       || !validate_arg (args[3], INTEGER_TYPE)
7784       || !validate_arg (args[4], INTEGER_TYPE)
7785       || !validate_arg (args[5], REAL_TYPE))
7786     return NULL_TREE;
7787 
7788   fp_nan = args[0];
7789   fp_infinite = args[1];
7790   fp_normal = args[2];
7791   fp_subnormal = args[3];
7792   fp_zero = args[4];
7793   arg = args[5];
7794   type = TREE_TYPE (arg);
7795   mode = TYPE_MODE (type);
7796   arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
7797 
7798   /* fpclassify(x) ->
7799        isnan(x) ? FP_NAN :
7800          (fabs(x) == Inf ? FP_INFINITE :
7801 	   (fabs(x) >= DBL_MIN ? FP_NORMAL :
7802 	     (x == 0 ? FP_ZERO : FP_SUBNORMAL))).  */
7803 
7804   tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7805 		     build_real (type, dconst0));
7806   res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
7807 		     tmp, fp_zero, fp_subnormal);
7808 
7809   sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
7810   real_from_string (&r, buf);
7811   tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
7812 		     arg, build_real (type, r));
7813   res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
7814 
7815   if (HONOR_INFINITIES (mode))
7816     {
7817       real_inf (&r);
7818       tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
7819 			 build_real (type, r));
7820       res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
7821 			 fp_infinite, res);
7822     }
7823 
7824   if (HONOR_NANS (mode))
7825     {
7826       tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
7827       res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
7828     }
7829 
7830   return res;
7831 }
7832 
7833 /* Fold a call to an unordered comparison function such as
7834    __builtin_isgreater().  FNDECL is the FUNCTION_DECL for the function
7835    being called and ARG0 and ARG1 are the arguments for the call.
7836    UNORDERED_CODE and ORDERED_CODE are comparison codes that give
7837    the opposite of the desired result.  UNORDERED_CODE is used
7838    for modes that can hold NaNs and ORDERED_CODE is used for
7839    the rest.  */
7840 
7841 static tree
fold_builtin_unordered_cmp(location_t loc,tree fndecl,tree arg0,tree arg1,enum tree_code unordered_code,enum tree_code ordered_code)7842 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
7843 			    enum tree_code unordered_code,
7844 			    enum tree_code ordered_code)
7845 {
7846   tree type = TREE_TYPE (TREE_TYPE (fndecl));
7847   enum tree_code code;
7848   tree type0, type1;
7849   enum tree_code code0, code1;
7850   tree cmp_type = NULL_TREE;
7851 
7852   type0 = TREE_TYPE (arg0);
7853   type1 = TREE_TYPE (arg1);
7854 
7855   code0 = TREE_CODE (type0);
7856   code1 = TREE_CODE (type1);
7857 
7858   if (code0 == REAL_TYPE && code1 == REAL_TYPE)
7859     /* Choose the wider of two real types.  */
7860     cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
7861       ? type0 : type1;
7862   else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
7863     cmp_type = type0;
7864   else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
7865     cmp_type = type1;
7866 
7867   arg0 = fold_convert_loc (loc, cmp_type, arg0);
7868   arg1 = fold_convert_loc (loc, cmp_type, arg1);
7869 
7870   if (unordered_code == UNORDERED_EXPR)
7871     {
7872       if (!HONOR_NANS (arg0))
7873 	return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
7874       return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
7875     }
7876 
7877   code = HONOR_NANS (arg0) ? unordered_code : ordered_code;
7878   return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
7879 		      fold_build2_loc (loc, code, type, arg0, arg1));
7880 }
7881 
7882 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
7883    arithmetics if it can never overflow, or into internal functions that
7884    return both result of arithmetics and overflowed boolean flag in
7885    a complex integer result, or some other check for overflow.  */
7886 
7887 static tree
fold_builtin_arith_overflow(location_t loc,enum built_in_function fcode,tree arg0,tree arg1,tree arg2)7888 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
7889 			     tree arg0, tree arg1, tree arg2)
7890 {
7891   enum internal_fn ifn = IFN_LAST;
7892   tree type = TREE_TYPE (TREE_TYPE (arg2));
7893   tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
7894   switch (fcode)
7895     {
7896     case BUILT_IN_ADD_OVERFLOW:
7897     case BUILT_IN_SADD_OVERFLOW:
7898     case BUILT_IN_SADDL_OVERFLOW:
7899     case BUILT_IN_SADDLL_OVERFLOW:
7900     case BUILT_IN_UADD_OVERFLOW:
7901     case BUILT_IN_UADDL_OVERFLOW:
7902     case BUILT_IN_UADDLL_OVERFLOW:
7903       ifn = IFN_ADD_OVERFLOW;
7904       break;
7905     case BUILT_IN_SUB_OVERFLOW:
7906     case BUILT_IN_SSUB_OVERFLOW:
7907     case BUILT_IN_SSUBL_OVERFLOW:
7908     case BUILT_IN_SSUBLL_OVERFLOW:
7909     case BUILT_IN_USUB_OVERFLOW:
7910     case BUILT_IN_USUBL_OVERFLOW:
7911     case BUILT_IN_USUBLL_OVERFLOW:
7912       ifn = IFN_SUB_OVERFLOW;
7913       break;
7914     case BUILT_IN_MUL_OVERFLOW:
7915     case BUILT_IN_SMUL_OVERFLOW:
7916     case BUILT_IN_SMULL_OVERFLOW:
7917     case BUILT_IN_SMULLL_OVERFLOW:
7918     case BUILT_IN_UMUL_OVERFLOW:
7919     case BUILT_IN_UMULL_OVERFLOW:
7920     case BUILT_IN_UMULLL_OVERFLOW:
7921       ifn = IFN_MUL_OVERFLOW;
7922       break;
7923     default:
7924       gcc_unreachable ();
7925     }
7926   tree ctype = build_complex_type (type);
7927   tree call = build_call_expr_internal_loc (loc, ifn, ctype,
7928 					    2, arg0, arg1);
7929   tree tgt = save_expr (call);
7930   tree intres = build1_loc (loc, REALPART_EXPR, type, tgt);
7931   tree ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
7932   ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
7933   tree store
7934     = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
7935   return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
7936 }
7937 
7938 /* Fold a call to built-in function FNDECL with 0 arguments.
7939    This function returns NULL_TREE if no simplification was possible.  */
7940 
7941 static tree
fold_builtin_0(location_t loc,tree fndecl)7942 fold_builtin_0 (location_t loc, tree fndecl)
7943 {
7944   tree type = TREE_TYPE (TREE_TYPE (fndecl));
7945   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7946   switch (fcode)
7947     {
7948     CASE_FLT_FN (BUILT_IN_INF):
7949     case BUILT_IN_INFD32:
7950     case BUILT_IN_INFD64:
7951     case BUILT_IN_INFD128:
7952       return fold_builtin_inf (loc, type, true);
7953 
7954     CASE_FLT_FN (BUILT_IN_HUGE_VAL):
7955       return fold_builtin_inf (loc, type, false);
7956 
7957     case BUILT_IN_CLASSIFY_TYPE:
7958       return fold_builtin_classify_type (NULL_TREE);
7959 
7960     default:
7961       break;
7962     }
7963   return NULL_TREE;
7964 }
7965 
7966 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
7967    This function returns NULL_TREE if no simplification was possible.  */
7968 
7969 static tree
fold_builtin_1(location_t loc,tree fndecl,tree arg0)7970 fold_builtin_1 (location_t loc, tree fndecl, tree arg0)
7971 {
7972   tree type = TREE_TYPE (TREE_TYPE (fndecl));
7973   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7974 
7975   if (TREE_CODE (arg0) == ERROR_MARK)
7976     return NULL_TREE;
7977 
7978   if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
7979     return ret;
7980 
7981   switch (fcode)
7982     {
7983     case BUILT_IN_CONSTANT_P:
7984       {
7985 	tree val = fold_builtin_constant_p (arg0);
7986 
7987 	/* Gimplification will pull the CALL_EXPR for the builtin out of
7988 	   an if condition.  When not optimizing, we'll not CSE it back.
7989 	   To avoid link error types of regressions, return false now.  */
7990 	if (!val && !optimize)
7991 	  val = integer_zero_node;
7992 
7993 	return val;
7994       }
7995 
7996     case BUILT_IN_CLASSIFY_TYPE:
7997       return fold_builtin_classify_type (arg0);
7998 
7999     case BUILT_IN_STRLEN:
8000       return fold_builtin_strlen (loc, type, arg0);
8001 
8002     CASE_FLT_FN (BUILT_IN_FABS):
8003     case BUILT_IN_FABSD32:
8004     case BUILT_IN_FABSD64:
8005     case BUILT_IN_FABSD128:
8006       return fold_builtin_fabs (loc, arg0, type);
8007 
8008     case BUILT_IN_ABS:
8009     case BUILT_IN_LABS:
8010     case BUILT_IN_LLABS:
8011     case BUILT_IN_IMAXABS:
8012       return fold_builtin_abs (loc, arg0, type);
8013 
8014     CASE_FLT_FN (BUILT_IN_CONJ):
8015       if (validate_arg (arg0, COMPLEX_TYPE)
8016 	&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8017 	return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
8018     break;
8019 
8020     CASE_FLT_FN (BUILT_IN_CREAL):
8021       if (validate_arg (arg0, COMPLEX_TYPE)
8022 	&& TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8023 	return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
8024     break;
8025 
8026     CASE_FLT_FN (BUILT_IN_CIMAG):
8027       if (validate_arg (arg0, COMPLEX_TYPE)
8028 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
8029 	return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
8030     break;
8031 
8032     CASE_FLT_FN (BUILT_IN_CARG):
8033       return fold_builtin_carg (loc, arg0, type);
8034 
8035     case BUILT_IN_ISASCII:
8036       return fold_builtin_isascii (loc, arg0);
8037 
8038     case BUILT_IN_TOASCII:
8039       return fold_builtin_toascii (loc, arg0);
8040 
8041     case BUILT_IN_ISDIGIT:
8042       return fold_builtin_isdigit (loc, arg0);
8043 
8044     CASE_FLT_FN (BUILT_IN_FINITE):
8045     case BUILT_IN_FINITED32:
8046     case BUILT_IN_FINITED64:
8047     case BUILT_IN_FINITED128:
8048     case BUILT_IN_ISFINITE:
8049       {
8050 	tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
8051 	if (ret)
8052 	  return ret;
8053 	return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8054       }
8055 
8056     CASE_FLT_FN (BUILT_IN_ISINF):
8057     case BUILT_IN_ISINFD32:
8058     case BUILT_IN_ISINFD64:
8059     case BUILT_IN_ISINFD128:
8060       {
8061 	tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
8062 	if (ret)
8063 	  return ret;
8064 	return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8065       }
8066 
8067     case BUILT_IN_ISNORMAL:
8068       return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
8069 
8070     case BUILT_IN_ISINF_SIGN:
8071       return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
8072 
8073     CASE_FLT_FN (BUILT_IN_ISNAN):
8074     case BUILT_IN_ISNAND32:
8075     case BUILT_IN_ISNAND64:
8076     case BUILT_IN_ISNAND128:
8077       return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
8078 
8079     case BUILT_IN_FREE:
8080       if (integer_zerop (arg0))
8081 	return build_empty_stmt (loc);
8082       break;
8083 
8084     default:
8085       break;
8086     }
8087 
8088   return NULL_TREE;
8089 
8090 }
8091 
8092 /* Fold a call to built-in function FNDECL with 2 arguments, ARG0 and ARG1.
8093    This function returns NULL_TREE if no simplification was possible.  */
8094 
8095 static tree
fold_builtin_2(location_t loc,tree fndecl,tree arg0,tree arg1)8096 fold_builtin_2 (location_t loc, tree fndecl, tree arg0, tree arg1)
8097 {
8098   tree type = TREE_TYPE (TREE_TYPE (fndecl));
8099   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8100 
8101   if (TREE_CODE (arg0) == ERROR_MARK
8102       || TREE_CODE (arg1) == ERROR_MARK)
8103     return NULL_TREE;
8104 
8105   if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
8106     return ret;
8107 
8108   switch (fcode)
8109     {
8110     CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
8111     CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
8112       if (validate_arg (arg0, REAL_TYPE)
8113 	  && validate_arg (arg1, POINTER_TYPE))
8114 	return do_mpfr_lgamma_r (arg0, arg1, type);
8115     break;
8116 
8117     CASE_FLT_FN (BUILT_IN_FREXP):
8118       return fold_builtin_frexp (loc, arg0, arg1, type);
8119 
8120     CASE_FLT_FN (BUILT_IN_MODF):
8121       return fold_builtin_modf (loc, arg0, arg1, type);
8122 
8123     case BUILT_IN_STRSTR:
8124       return fold_builtin_strstr (loc, arg0, arg1, type);
8125 
8126     case BUILT_IN_STRSPN:
8127       return fold_builtin_strspn (loc, arg0, arg1);
8128 
8129     case BUILT_IN_STRCSPN:
8130       return fold_builtin_strcspn (loc, arg0, arg1);
8131 
8132     case BUILT_IN_STRCHR:
8133     case BUILT_IN_INDEX:
8134       return fold_builtin_strchr (loc, arg0, arg1, type);
8135 
8136     case BUILT_IN_STRRCHR:
8137     case BUILT_IN_RINDEX:
8138       return fold_builtin_strrchr (loc, arg0, arg1, type);
8139 
8140     case BUILT_IN_STRCMP:
8141       return fold_builtin_strcmp (loc, arg0, arg1);
8142 
8143     case BUILT_IN_STRPBRK:
8144       return fold_builtin_strpbrk (loc, arg0, arg1, type);
8145 
8146     case BUILT_IN_EXPECT:
8147       return fold_builtin_expect (loc, arg0, arg1, NULL_TREE);
8148 
8149     case BUILT_IN_ISGREATER:
8150       return fold_builtin_unordered_cmp (loc, fndecl,
8151 					 arg0, arg1, UNLE_EXPR, LE_EXPR);
8152     case BUILT_IN_ISGREATEREQUAL:
8153       return fold_builtin_unordered_cmp (loc, fndecl,
8154 					 arg0, arg1, UNLT_EXPR, LT_EXPR);
8155     case BUILT_IN_ISLESS:
8156       return fold_builtin_unordered_cmp (loc, fndecl,
8157 					 arg0, arg1, UNGE_EXPR, GE_EXPR);
8158     case BUILT_IN_ISLESSEQUAL:
8159       return fold_builtin_unordered_cmp (loc, fndecl,
8160 					 arg0, arg1, UNGT_EXPR, GT_EXPR);
8161     case BUILT_IN_ISLESSGREATER:
8162       return fold_builtin_unordered_cmp (loc, fndecl,
8163 					 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
8164     case BUILT_IN_ISUNORDERED:
8165       return fold_builtin_unordered_cmp (loc, fndecl,
8166 					 arg0, arg1, UNORDERED_EXPR,
8167 					 NOP_EXPR);
8168 
8169       /* We do the folding for va_start in the expander.  */
8170     case BUILT_IN_VA_START:
8171       break;
8172 
8173     case BUILT_IN_OBJECT_SIZE:
8174       return fold_builtin_object_size (arg0, arg1);
8175 
8176     case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
8177       return fold_builtin_atomic_always_lock_free (arg0, arg1);
8178 
8179     case BUILT_IN_ATOMIC_IS_LOCK_FREE:
8180       return fold_builtin_atomic_is_lock_free (arg0, arg1);
8181 
8182     default:
8183       break;
8184     }
8185   return NULL_TREE;
8186 }
8187 
8188 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
8189    and ARG2.
8190    This function returns NULL_TREE if no simplification was possible.  */
8191 
8192 static tree
fold_builtin_3(location_t loc,tree fndecl,tree arg0,tree arg1,tree arg2)8193 fold_builtin_3 (location_t loc, tree fndecl,
8194 		tree arg0, tree arg1, tree arg2)
8195 {
8196   tree type = TREE_TYPE (TREE_TYPE (fndecl));
8197   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8198 
8199   if (TREE_CODE (arg0) == ERROR_MARK
8200       || TREE_CODE (arg1) == ERROR_MARK
8201       || TREE_CODE (arg2) == ERROR_MARK)
8202     return NULL_TREE;
8203 
8204   if (tree ret = fold_const_call (as_combined_fn (fcode), type,
8205 				  arg0, arg1, arg2))
8206     return ret;
8207 
8208   switch (fcode)
8209     {
8210 
8211     CASE_FLT_FN (BUILT_IN_SINCOS):
8212       return fold_builtin_sincos (loc, arg0, arg1, arg2);
8213 
8214     CASE_FLT_FN (BUILT_IN_FMA):
8215       return fold_builtin_fma (loc, arg0, arg1, arg2, type);
8216 
8217     CASE_FLT_FN (BUILT_IN_REMQUO):
8218       if (validate_arg (arg0, REAL_TYPE)
8219 	  && validate_arg (arg1, REAL_TYPE)
8220 	  && validate_arg (arg2, POINTER_TYPE))
8221 	return do_mpfr_remquo (arg0, arg1, arg2);
8222     break;
8223 
8224     case BUILT_IN_STRNCMP:
8225       return fold_builtin_strncmp (loc, arg0, arg1, arg2);
8226 
8227     case BUILT_IN_MEMCHR:
8228       return fold_builtin_memchr (loc, arg0, arg1, arg2, type);
8229 
8230     case BUILT_IN_BCMP:
8231     case BUILT_IN_MEMCMP:
8232       return fold_builtin_memcmp (loc, arg0, arg1, arg2);;
8233 
8234     case BUILT_IN_EXPECT:
8235       return fold_builtin_expect (loc, arg0, arg1, arg2);
8236 
8237     case BUILT_IN_ADD_OVERFLOW:
8238     case BUILT_IN_SUB_OVERFLOW:
8239     case BUILT_IN_MUL_OVERFLOW:
8240     case BUILT_IN_SADD_OVERFLOW:
8241     case BUILT_IN_SADDL_OVERFLOW:
8242     case BUILT_IN_SADDLL_OVERFLOW:
8243     case BUILT_IN_SSUB_OVERFLOW:
8244     case BUILT_IN_SSUBL_OVERFLOW:
8245     case BUILT_IN_SSUBLL_OVERFLOW:
8246     case BUILT_IN_SMUL_OVERFLOW:
8247     case BUILT_IN_SMULL_OVERFLOW:
8248     case BUILT_IN_SMULLL_OVERFLOW:
8249     case BUILT_IN_UADD_OVERFLOW:
8250     case BUILT_IN_UADDL_OVERFLOW:
8251     case BUILT_IN_UADDLL_OVERFLOW:
8252     case BUILT_IN_USUB_OVERFLOW:
8253     case BUILT_IN_USUBL_OVERFLOW:
8254     case BUILT_IN_USUBLL_OVERFLOW:
8255     case BUILT_IN_UMUL_OVERFLOW:
8256     case BUILT_IN_UMULL_OVERFLOW:
8257     case BUILT_IN_UMULLL_OVERFLOW:
8258       return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
8259 
8260     default:
8261       break;
8262     }
8263   return NULL_TREE;
8264 }
8265 
8266 /* Fold a call to built-in function FNDECL.  ARGS is an array of NARGS
8267    arguments.  IGNORE is true if the result of the
8268    function call is ignored.  This function returns NULL_TREE if no
8269    simplification was possible.  */
8270 
8271 tree
fold_builtin_n(location_t loc,tree fndecl,tree * args,int nargs,bool)8272 fold_builtin_n (location_t loc, tree fndecl, tree *args, int nargs, bool)
8273 {
8274   tree ret = NULL_TREE;
8275 
8276   switch (nargs)
8277     {
8278     case 0:
8279       ret = fold_builtin_0 (loc, fndecl);
8280       break;
8281     case 1:
8282       ret = fold_builtin_1 (loc, fndecl, args[0]);
8283       break;
8284     case 2:
8285       ret = fold_builtin_2 (loc, fndecl, args[0], args[1]);
8286       break;
8287     case 3:
8288       ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
8289       break;
8290     default:
8291       ret = fold_builtin_varargs (loc, fndecl, args, nargs);
8292       break;
8293     }
8294   if (ret)
8295     {
8296       ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
8297       SET_EXPR_LOCATION (ret, loc);
8298       TREE_NO_WARNING (ret) = 1;
8299       return ret;
8300     }
8301   return NULL_TREE;
8302 }
8303 
8304 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
8305    list ARGS along with N new arguments in NEWARGS.  SKIP is the number
8306    of arguments in ARGS to be omitted.  OLDNARGS is the number of
8307    elements in ARGS.  */
8308 
8309 static tree
rewrite_call_expr_valist(location_t loc,int oldnargs,tree * args,int skip,tree fndecl,int n,va_list newargs)8310 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
8311 			  int skip, tree fndecl, int n, va_list newargs)
8312 {
8313   int nargs = oldnargs - skip + n;
8314   tree *buffer;
8315 
8316   if (n > 0)
8317     {
8318       int i, j;
8319 
8320       buffer = XALLOCAVEC (tree, nargs);
8321       for (i = 0; i < n; i++)
8322 	buffer[i] = va_arg (newargs, tree);
8323       for (j = skip; j < oldnargs; j++, i++)
8324 	buffer[i] = args[j];
8325     }
8326   else
8327     buffer = args + skip;
8328 
8329   return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
8330 }
8331 
8332 /* Return true if FNDECL shouldn't be folded right now.
8333    If a built-in function has an inline attribute always_inline
8334    wrapper, defer folding it after always_inline functions have
8335    been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
8336    might not be performed.  */
8337 
8338 bool
avoid_folding_inline_builtin(tree fndecl)8339 avoid_folding_inline_builtin (tree fndecl)
8340 {
8341   return (DECL_DECLARED_INLINE_P (fndecl)
8342 	  && DECL_DISREGARD_INLINE_LIMITS (fndecl)
8343 	  && cfun
8344 	  && !cfun->always_inline_functions_inlined
8345 	  && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
8346 }
8347 
8348 /* A wrapper function for builtin folding that prevents warnings for
8349    "statement without effect" and the like, caused by removing the
8350    call node earlier than the warning is generated.  */
8351 
8352 tree
fold_call_expr(location_t loc,tree exp,bool ignore)8353 fold_call_expr (location_t loc, tree exp, bool ignore)
8354 {
8355   tree ret = NULL_TREE;
8356   tree fndecl = get_callee_fndecl (exp);
8357   if (fndecl
8358       && TREE_CODE (fndecl) == FUNCTION_DECL
8359       && DECL_BUILT_IN (fndecl)
8360       /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
8361 	 yet.  Defer folding until we see all the arguments
8362 	 (after inlining).  */
8363       && !CALL_EXPR_VA_ARG_PACK (exp))
8364     {
8365       int nargs = call_expr_nargs (exp);
8366 
8367       /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
8368 	 instead last argument is __builtin_va_arg_pack ().  Defer folding
8369 	 even in that case, until arguments are finalized.  */
8370       if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
8371 	{
8372 	  tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
8373 	  if (fndecl2
8374 	      && TREE_CODE (fndecl2) == FUNCTION_DECL
8375 	      && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8376 	      && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8377 	    return NULL_TREE;
8378 	}
8379 
8380       if (avoid_folding_inline_builtin (fndecl))
8381 	return NULL_TREE;
8382 
8383       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8384         return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
8385 				     CALL_EXPR_ARGP (exp), ignore);
8386       else
8387 	{
8388 	  tree *args = CALL_EXPR_ARGP (exp);
8389 	  ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
8390 	  if (ret)
8391 	    return ret;
8392 	}
8393     }
8394   return NULL_TREE;
8395 }
8396 
8397 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
8398    N arguments are passed in the array ARGARRAY.  Return a folded
8399    expression or NULL_TREE if no simplification was possible.  */
8400 
8401 tree
fold_builtin_call_array(location_t loc,tree,tree fn,int n,tree * argarray)8402 fold_builtin_call_array (location_t loc, tree,
8403 			 tree fn,
8404 			 int n,
8405 			 tree *argarray)
8406 {
8407   if (TREE_CODE (fn) != ADDR_EXPR)
8408     return NULL_TREE;
8409 
8410   tree fndecl = TREE_OPERAND (fn, 0);
8411   if (TREE_CODE (fndecl) == FUNCTION_DECL
8412       && DECL_BUILT_IN (fndecl))
8413     {
8414       /* If last argument is __builtin_va_arg_pack (), arguments to this
8415 	 function are not finalized yet.  Defer folding until they are.  */
8416       if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
8417 	{
8418 	  tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
8419 	  if (fndecl2
8420 	      && TREE_CODE (fndecl2) == FUNCTION_DECL
8421 	      && DECL_BUILT_IN_CLASS (fndecl2) == BUILT_IN_NORMAL
8422 	      && DECL_FUNCTION_CODE (fndecl2) == BUILT_IN_VA_ARG_PACK)
8423 	    return NULL_TREE;
8424 	}
8425       if (avoid_folding_inline_builtin (fndecl))
8426 	return NULL_TREE;
8427       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
8428 	return targetm.fold_builtin (fndecl, n, argarray, false);
8429       else
8430 	return fold_builtin_n (loc, fndecl, argarray, n, false);
8431     }
8432 
8433   return NULL_TREE;
8434 }
8435 
8436 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
8437    along with N new arguments specified as the "..." parameters.  SKIP
8438    is the number of arguments in EXP to be omitted.  This function is used
8439    to do varargs-to-varargs transformations.  */
8440 
8441 static tree
rewrite_call_expr(location_t loc,tree exp,int skip,tree fndecl,int n,...)8442 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
8443 {
8444   va_list ap;
8445   tree t;
8446 
8447   va_start (ap, n);
8448   t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
8449 				CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
8450   va_end (ap);
8451 
8452   return t;
8453 }
8454 
8455 /* Validate a single argument ARG against a tree code CODE representing
8456    a type.  */
8457 
8458 static bool
validate_arg(const_tree arg,enum tree_code code)8459 validate_arg (const_tree arg, enum tree_code code)
8460 {
8461   if (!arg)
8462     return false;
8463   else if (code == POINTER_TYPE)
8464     return POINTER_TYPE_P (TREE_TYPE (arg));
8465   else if (code == INTEGER_TYPE)
8466     return INTEGRAL_TYPE_P (TREE_TYPE (arg));
8467   return code == TREE_CODE (TREE_TYPE (arg));
8468 }
8469 
8470 /* This function validates the types of a function call argument list
8471    against a specified list of tree_codes.  If the last specifier is a 0,
8472    that represents an ellipses, otherwise the last specifier must be a
8473    VOID_TYPE.
8474 
8475    This is the GIMPLE version of validate_arglist.  Eventually we want to
8476    completely convert builtins.c to work from GIMPLEs and the tree based
8477    validate_arglist will then be removed.  */
8478 
8479 bool
validate_gimple_arglist(const gcall * call,...)8480 validate_gimple_arglist (const gcall *call, ...)
8481 {
8482   enum tree_code code;
8483   bool res = 0;
8484   va_list ap;
8485   const_tree arg;
8486   size_t i;
8487 
8488   va_start (ap, call);
8489   i = 0;
8490 
8491   do
8492     {
8493       code = (enum tree_code) va_arg (ap, int);
8494       switch (code)
8495 	{
8496 	case 0:
8497 	  /* This signifies an ellipses, any further arguments are all ok.  */
8498 	  res = true;
8499 	  goto end;
8500 	case VOID_TYPE:
8501 	  /* This signifies an endlink, if no arguments remain, return
8502 	     true, otherwise return false.  */
8503 	  res = (i == gimple_call_num_args (call));
8504 	  goto end;
8505 	default:
8506 	  /* If no parameters remain or the parameter's code does not
8507 	     match the specified code, return false.  Otherwise continue
8508 	     checking any remaining arguments.  */
8509 	  arg = gimple_call_arg (call, i++);
8510 	  if (!validate_arg (arg, code))
8511 	    goto end;
8512 	  break;
8513 	}
8514     }
8515   while (1);
8516 
8517   /* We need gotos here since we can only have one VA_CLOSE in a
8518      function.  */
8519  end: ;
8520   va_end (ap);
8521 
8522   return res;
8523 }
8524 
8525 /* Default target-specific builtin expander that does nothing.  */
8526 
8527 rtx
default_expand_builtin(tree exp ATTRIBUTE_UNUSED,rtx target ATTRIBUTE_UNUSED,rtx subtarget ATTRIBUTE_UNUSED,machine_mode mode ATTRIBUTE_UNUSED,int ignore ATTRIBUTE_UNUSED)8528 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
8529 			rtx target ATTRIBUTE_UNUSED,
8530 			rtx subtarget ATTRIBUTE_UNUSED,
8531 			machine_mode mode ATTRIBUTE_UNUSED,
8532 			int ignore ATTRIBUTE_UNUSED)
8533 {
8534   return NULL_RTX;
8535 }
8536 
8537 /* Returns true is EXP represents data that would potentially reside
8538    in a readonly section.  */
8539 
8540 bool
readonly_data_expr(tree exp)8541 readonly_data_expr (tree exp)
8542 {
8543   STRIP_NOPS (exp);
8544 
8545   if (TREE_CODE (exp) != ADDR_EXPR)
8546     return false;
8547 
8548   exp = get_base_address (TREE_OPERAND (exp, 0));
8549   if (!exp)
8550     return false;
8551 
8552   /* Make sure we call decl_readonly_section only for trees it
8553      can handle (since it returns true for everything it doesn't
8554      understand).  */
8555   if (TREE_CODE (exp) == STRING_CST
8556       || TREE_CODE (exp) == CONSTRUCTOR
8557       || (TREE_CODE (exp) == VAR_DECL && TREE_STATIC (exp)))
8558     return decl_readonly_section (exp, 0);
8559   else
8560     return false;
8561 }
8562 
8563 /* Simplify a call to the strstr builtin.  S1 and S2 are the arguments
8564    to the call, and TYPE is its return type.
8565 
8566    Return NULL_TREE if no simplification was possible, otherwise return the
8567    simplified form of the call as a tree.
8568 
8569    The simplified form may be a constant or other expression which
8570    computes the same value, but in a more efficient manner (including
8571    calls to other builtin functions).
8572 
8573    The call may contain arguments which need to be evaluated, but
8574    which are not useful to determine the result of the call.  In
8575    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
8576    COMPOUND_EXPR will be an argument which must be evaluated.
8577    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
8578    COMPOUND_EXPR in the chain will contain the tree for the simplified
8579    form of the builtin function call.  */
8580 
8581 static tree
fold_builtin_strstr(location_t loc,tree s1,tree s2,tree type)8582 fold_builtin_strstr (location_t loc, tree s1, tree s2, tree type)
8583 {
8584   if (!validate_arg (s1, POINTER_TYPE)
8585       || !validate_arg (s2, POINTER_TYPE))
8586     return NULL_TREE;
8587   else
8588     {
8589       tree fn;
8590       const char *p1, *p2;
8591 
8592       p2 = c_getstr (s2);
8593       if (p2 == NULL)
8594 	return NULL_TREE;
8595 
8596       p1 = c_getstr (s1);
8597       if (p1 != NULL)
8598 	{
8599 	  const char *r = strstr (p1, p2);
8600 	  tree tem;
8601 
8602 	  if (r == NULL)
8603 	    return build_int_cst (TREE_TYPE (s1), 0);
8604 
8605 	  /* Return an offset into the constant string argument.  */
8606 	  tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8607 	  return fold_convert_loc (loc, type, tem);
8608 	}
8609 
8610       /* The argument is const char *, and the result is char *, so we need
8611 	 a type conversion here to avoid a warning.  */
8612       if (p2[0] == '\0')
8613 	return fold_convert_loc (loc, type, s1);
8614 
8615       if (p2[1] != '\0')
8616 	return NULL_TREE;
8617 
8618       fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8619       if (!fn)
8620 	return NULL_TREE;
8621 
8622       /* New argument list transforming strstr(s1, s2) to
8623 	 strchr(s1, s2[0]).  */
8624       return build_call_expr_loc (loc, fn, 2, s1,
8625 				  build_int_cst (integer_type_node, p2[0]));
8626     }
8627 }
8628 
8629 /* Simplify a call to the strchr builtin.  S1 and S2 are the arguments to
8630    the call, and TYPE is its return type.
8631 
8632    Return NULL_TREE if no simplification was possible, otherwise return the
8633    simplified form of the call as a tree.
8634 
8635    The simplified form may be a constant or other expression which
8636    computes the same value, but in a more efficient manner (including
8637    calls to other builtin functions).
8638 
8639    The call may contain arguments which need to be evaluated, but
8640    which are not useful to determine the result of the call.  In
8641    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
8642    COMPOUND_EXPR will be an argument which must be evaluated.
8643    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
8644    COMPOUND_EXPR in the chain will contain the tree for the simplified
8645    form of the builtin function call.  */
8646 
8647 static tree
fold_builtin_strchr(location_t loc,tree s1,tree s2,tree type)8648 fold_builtin_strchr (location_t loc, tree s1, tree s2, tree type)
8649 {
8650   if (!validate_arg (s1, POINTER_TYPE)
8651       || !validate_arg (s2, INTEGER_TYPE))
8652     return NULL_TREE;
8653   else
8654     {
8655       const char *p1;
8656 
8657       if (TREE_CODE (s2) != INTEGER_CST)
8658 	return NULL_TREE;
8659 
8660       p1 = c_getstr (s1);
8661       if (p1 != NULL)
8662 	{
8663 	  char c;
8664 	  const char *r;
8665 	  tree tem;
8666 
8667 	  if (target_char_cast (s2, &c))
8668 	    return NULL_TREE;
8669 
8670 	  r = strchr (p1, c);
8671 
8672 	  if (r == NULL)
8673 	    return build_int_cst (TREE_TYPE (s1), 0);
8674 
8675 	  /* Return an offset into the constant string argument.  */
8676 	  tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8677 	  return fold_convert_loc (loc, type, tem);
8678 	}
8679       return NULL_TREE;
8680     }
8681 }
8682 
8683 /* Simplify a call to the strrchr builtin.  S1 and S2 are the arguments to
8684    the call, and TYPE is its return type.
8685 
8686    Return NULL_TREE if no simplification was possible, otherwise return the
8687    simplified form of the call as a tree.
8688 
8689    The simplified form may be a constant or other expression which
8690    computes the same value, but in a more efficient manner (including
8691    calls to other builtin functions).
8692 
8693    The call may contain arguments which need to be evaluated, but
8694    which are not useful to determine the result of the call.  In
8695    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
8696    COMPOUND_EXPR will be an argument which must be evaluated.
8697    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
8698    COMPOUND_EXPR in the chain will contain the tree for the simplified
8699    form of the builtin function call.  */
8700 
8701 static tree
fold_builtin_strrchr(location_t loc,tree s1,tree s2,tree type)8702 fold_builtin_strrchr (location_t loc, tree s1, tree s2, tree type)
8703 {
8704   if (!validate_arg (s1, POINTER_TYPE)
8705       || !validate_arg (s2, INTEGER_TYPE))
8706     return NULL_TREE;
8707   else
8708     {
8709       tree fn;
8710       const char *p1;
8711 
8712       if (TREE_CODE (s2) != INTEGER_CST)
8713 	return NULL_TREE;
8714 
8715       p1 = c_getstr (s1);
8716       if (p1 != NULL)
8717 	{
8718 	  char c;
8719 	  const char *r;
8720 	  tree tem;
8721 
8722 	  if (target_char_cast (s2, &c))
8723 	    return NULL_TREE;
8724 
8725 	  r = strrchr (p1, c);
8726 
8727 	  if (r == NULL)
8728 	    return build_int_cst (TREE_TYPE (s1), 0);
8729 
8730 	  /* Return an offset into the constant string argument.  */
8731 	  tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8732 	  return fold_convert_loc (loc, type, tem);
8733 	}
8734 
8735       if (! integer_zerop (s2))
8736 	return NULL_TREE;
8737 
8738       fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8739       if (!fn)
8740 	return NULL_TREE;
8741 
8742       /* Transform strrchr(s1, '\0') to strchr(s1, '\0').  */
8743       return build_call_expr_loc (loc, fn, 2, s1, s2);
8744     }
8745 }
8746 
8747 /* Simplify a call to the strpbrk builtin.  S1 and S2 are the arguments
8748    to the call, and TYPE is its return type.
8749 
8750    Return NULL_TREE if no simplification was possible, otherwise return the
8751    simplified form of the call as a tree.
8752 
8753    The simplified form may be a constant or other expression which
8754    computes the same value, but in a more efficient manner (including
8755    calls to other builtin functions).
8756 
8757    The call may contain arguments which need to be evaluated, but
8758    which are not useful to determine the result of the call.  In
8759    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
8760    COMPOUND_EXPR will be an argument which must be evaluated.
8761    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
8762    COMPOUND_EXPR in the chain will contain the tree for the simplified
8763    form of the builtin function call.  */
8764 
8765 static tree
fold_builtin_strpbrk(location_t loc,tree s1,tree s2,tree type)8766 fold_builtin_strpbrk (location_t loc, tree s1, tree s2, tree type)
8767 {
8768   if (!validate_arg (s1, POINTER_TYPE)
8769       || !validate_arg (s2, POINTER_TYPE))
8770     return NULL_TREE;
8771   else
8772     {
8773       tree fn;
8774       const char *p1, *p2;
8775 
8776       p2 = c_getstr (s2);
8777       if (p2 == NULL)
8778 	return NULL_TREE;
8779 
8780       p1 = c_getstr (s1);
8781       if (p1 != NULL)
8782 	{
8783 	  const char *r = strpbrk (p1, p2);
8784 	  tree tem;
8785 
8786 	  if (r == NULL)
8787 	    return build_int_cst (TREE_TYPE (s1), 0);
8788 
8789 	  /* Return an offset into the constant string argument.  */
8790 	  tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
8791 	  return fold_convert_loc (loc, type, tem);
8792 	}
8793 
8794       if (p2[0] == '\0')
8795 	/* strpbrk(x, "") == NULL.
8796 	   Evaluate and ignore s1 in case it had side-effects.  */
8797 	return omit_one_operand_loc (loc, TREE_TYPE (s1), integer_zero_node, s1);
8798 
8799       if (p2[1] != '\0')
8800 	return NULL_TREE;  /* Really call strpbrk.  */
8801 
8802       fn = builtin_decl_implicit (BUILT_IN_STRCHR);
8803       if (!fn)
8804 	return NULL_TREE;
8805 
8806       /* New argument list transforming strpbrk(s1, s2) to
8807 	 strchr(s1, s2[0]).  */
8808       return build_call_expr_loc (loc, fn, 2, s1,
8809 				  build_int_cst (integer_type_node, p2[0]));
8810     }
8811 }
8812 
8813 /* Simplify a call to the strspn builtin.  S1 and S2 are the arguments
8814    to the call.
8815 
8816    Return NULL_TREE if no simplification was possible, otherwise return the
8817    simplified form of the call as a tree.
8818 
8819    The simplified form may be a constant or other expression which
8820    computes the same value, but in a more efficient manner (including
8821    calls to other builtin functions).
8822 
8823    The call may contain arguments which need to be evaluated, but
8824    which are not useful to determine the result of the call.  In
8825    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
8826    COMPOUND_EXPR will be an argument which must be evaluated.
8827    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
8828    COMPOUND_EXPR in the chain will contain the tree for the simplified
8829    form of the builtin function call.  */
8830 
8831 static tree
fold_builtin_strspn(location_t loc,tree s1,tree s2)8832 fold_builtin_strspn (location_t loc, tree s1, tree s2)
8833 {
8834   if (!validate_arg (s1, POINTER_TYPE)
8835       || !validate_arg (s2, POINTER_TYPE))
8836     return NULL_TREE;
8837   else
8838     {
8839       const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
8840 
8841       /* If either argument is "", return NULL_TREE.  */
8842       if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
8843 	/* Evaluate and ignore both arguments in case either one has
8844 	   side-effects.  */
8845 	return omit_two_operands_loc (loc, size_type_node, size_zero_node,
8846 				  s1, s2);
8847       return NULL_TREE;
8848     }
8849 }
8850 
8851 /* Simplify a call to the strcspn builtin.  S1 and S2 are the arguments
8852    to the call.
8853 
8854    Return NULL_TREE if no simplification was possible, otherwise return the
8855    simplified form of the call as a tree.
8856 
8857    The simplified form may be a constant or other expression which
8858    computes the same value, but in a more efficient manner (including
8859    calls to other builtin functions).
8860 
8861    The call may contain arguments which need to be evaluated, but
8862    which are not useful to determine the result of the call.  In
8863    this case we return a chain of COMPOUND_EXPRs.  The LHS of each
8864    COMPOUND_EXPR will be an argument which must be evaluated.
8865    COMPOUND_EXPRs are chained through their RHS.  The RHS of the last
8866    COMPOUND_EXPR in the chain will contain the tree for the simplified
8867    form of the builtin function call.  */
8868 
8869 static tree
fold_builtin_strcspn(location_t loc,tree s1,tree s2)8870 fold_builtin_strcspn (location_t loc, tree s1, tree s2)
8871 {
8872   if (!validate_arg (s1, POINTER_TYPE)
8873       || !validate_arg (s2, POINTER_TYPE))
8874     return NULL_TREE;
8875   else
8876     {
8877       /* If the first argument is "", return NULL_TREE.  */
8878       const char *p1 = c_getstr (s1);
8879       if (p1 && *p1 == '\0')
8880 	{
8881 	  /* Evaluate and ignore argument s2 in case it has
8882 	     side-effects.  */
8883 	  return omit_one_operand_loc (loc, size_type_node,
8884 				   size_zero_node, s2);
8885 	}
8886 
8887       /* If the second argument is "", return __builtin_strlen(s1).  */
8888       const char *p2 = c_getstr (s2);
8889       if (p2 && *p2 == '\0')
8890 	{
8891 	  tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
8892 
8893 	  /* If the replacement _DECL isn't initialized, don't do the
8894 	     transformation.  */
8895 	  if (!fn)
8896 	    return NULL_TREE;
8897 
8898 	  return build_call_expr_loc (loc, fn, 1, s1);
8899 	}
8900       return NULL_TREE;
8901     }
8902 }
8903 
8904 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
8905    produced.  False otherwise.  This is done so that we don't output the error
8906    or warning twice or three times.  */
8907 
8908 bool
fold_builtin_next_arg(tree exp,bool va_start_p)8909 fold_builtin_next_arg (tree exp, bool va_start_p)
8910 {
8911   tree fntype = TREE_TYPE (current_function_decl);
8912   int nargs = call_expr_nargs (exp);
8913   tree arg;
8914   /* There is good chance the current input_location points inside the
8915      definition of the va_start macro (perhaps on the token for
8916      builtin) in a system header, so warnings will not be emitted.
8917      Use the location in real source code.  */
8918   source_location current_location =
8919     linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
8920 					      NULL);
8921 
8922   if (!stdarg_p (fntype))
8923     {
8924       error ("%<va_start%> used in function with fixed args");
8925       return true;
8926     }
8927 
8928   if (va_start_p)
8929     {
8930       if (va_start_p && (nargs != 2))
8931 	{
8932 	  error ("wrong number of arguments to function %<va_start%>");
8933 	  return true;
8934 	}
8935       arg = CALL_EXPR_ARG (exp, 1);
8936     }
8937   /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
8938      when we checked the arguments and if needed issued a warning.  */
8939   else
8940     {
8941       if (nargs == 0)
8942 	{
8943 	  /* Evidently an out of date version of <stdarg.h>; can't validate
8944 	     va_start's second argument, but can still work as intended.  */
8945 	  warning_at (current_location,
8946 		      OPT_Wvarargs,
8947 		   "%<__builtin_next_arg%> called without an argument");
8948 	  return true;
8949 	}
8950       else if (nargs > 1)
8951 	{
8952 	  error ("wrong number of arguments to function %<__builtin_next_arg%>");
8953 	  return true;
8954 	}
8955       arg = CALL_EXPR_ARG (exp, 0);
8956     }
8957 
8958   if (TREE_CODE (arg) == SSA_NAME)
8959     arg = SSA_NAME_VAR (arg);
8960 
8961   /* We destructively modify the call to be __builtin_va_start (ap, 0)
8962      or __builtin_next_arg (0) the first time we see it, after checking
8963      the arguments and if needed issuing a warning.  */
8964   if (!integer_zerop (arg))
8965     {
8966       tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8967 
8968       /* Strip off all nops for the sake of the comparison.  This
8969 	 is not quite the same as STRIP_NOPS.  It does more.
8970 	 We must also strip off INDIRECT_EXPR for C++ reference
8971 	 parameters.  */
8972       while (CONVERT_EXPR_P (arg)
8973 	     || TREE_CODE (arg) == INDIRECT_REF)
8974 	arg = TREE_OPERAND (arg, 0);
8975       if (arg != last_parm)
8976 	{
8977 	  /* FIXME: Sometimes with the tree optimizers we can get the
8978 	     not the last argument even though the user used the last
8979 	     argument.  We just warn and set the arg to be the last
8980 	     argument so that we will get wrong-code because of
8981 	     it.  */
8982 	  warning_at (current_location,
8983 		      OPT_Wvarargs,
8984 		      "second parameter of %<va_start%> not last named argument");
8985 	}
8986 
8987       /* Undefined by C99 7.15.1.4p4 (va_start):
8988          "If the parameter parmN is declared with the register storage
8989          class, with a function or array type, or with a type that is
8990          not compatible with the type that results after application of
8991          the default argument promotions, the behavior is undefined."
8992       */
8993       else if (DECL_REGISTER (arg))
8994 	{
8995 	  warning_at (current_location,
8996 		      OPT_Wvarargs,
8997 		      "undefined behavior when second parameter of "
8998 		      "%<va_start%> is declared with %<register%> storage");
8999 	}
9000 
9001       /* We want to verify the second parameter just once before the tree
9002 	 optimizers are run and then avoid keeping it in the tree,
9003 	 as otherwise we could warn even for correct code like:
9004 	 void foo (int i, ...)
9005 	 { va_list ap; i++; va_start (ap, i); va_end (ap); }  */
9006       if (va_start_p)
9007 	CALL_EXPR_ARG (exp, 1) = integer_zero_node;
9008       else
9009 	CALL_EXPR_ARG (exp, 0) = integer_zero_node;
9010     }
9011   return false;
9012 }
9013 
9014 
9015 /* Expand a call EXP to __builtin_object_size.  */
9016 
9017 static rtx
expand_builtin_object_size(tree exp)9018 expand_builtin_object_size (tree exp)
9019 {
9020   tree ost;
9021   int object_size_type;
9022   tree fndecl = get_callee_fndecl (exp);
9023 
9024   if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9025     {
9026       error ("%Kfirst argument of %D must be a pointer, second integer constant",
9027 	     exp, fndecl);
9028       expand_builtin_trap ();
9029       return const0_rtx;
9030     }
9031 
9032   ost = CALL_EXPR_ARG (exp, 1);
9033   STRIP_NOPS (ost);
9034 
9035   if (TREE_CODE (ost) != INTEGER_CST
9036       || tree_int_cst_sgn (ost) < 0
9037       || compare_tree_int (ost, 3) > 0)
9038     {
9039       error ("%Klast argument of %D is not integer constant between 0 and 3",
9040 	     exp, fndecl);
9041       expand_builtin_trap ();
9042       return const0_rtx;
9043     }
9044 
9045   object_size_type = tree_to_shwi (ost);
9046 
9047   return object_size_type < 2 ? constm1_rtx : const0_rtx;
9048 }
9049 
9050 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
9051    FCODE is the BUILT_IN_* to use.
9052    Return NULL_RTX if we failed; the caller should emit a normal call,
9053    otherwise try to get the result in TARGET, if convenient (and in
9054    mode MODE if that's convenient).  */
9055 
9056 static rtx
expand_builtin_memory_chk(tree exp,rtx target,machine_mode mode,enum built_in_function fcode)9057 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
9058 			   enum built_in_function fcode)
9059 {
9060   tree dest, src, len, size;
9061 
9062   if (!validate_arglist (exp,
9063 			 POINTER_TYPE,
9064 			 fcode == BUILT_IN_MEMSET_CHK
9065 			 ? INTEGER_TYPE : POINTER_TYPE,
9066 			 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
9067     return NULL_RTX;
9068 
9069   dest = CALL_EXPR_ARG (exp, 0);
9070   src = CALL_EXPR_ARG (exp, 1);
9071   len = CALL_EXPR_ARG (exp, 2);
9072   size = CALL_EXPR_ARG (exp, 3);
9073 
9074   if (! tree_fits_uhwi_p (size))
9075     return NULL_RTX;
9076 
9077   if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
9078     {
9079       tree fn;
9080 
9081       if (! integer_all_onesp (size) && tree_int_cst_lt (size, len))
9082 	{
9083 	  warning_at (tree_nonartificial_location (exp),
9084 		      0, "%Kcall to %D will always overflow destination buffer",
9085 		      exp, get_callee_fndecl (exp));
9086 	  return NULL_RTX;
9087 	}
9088 
9089       fn = NULL_TREE;
9090       /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
9091 	 mem{cpy,pcpy,move,set} is available.  */
9092       switch (fcode)
9093 	{
9094 	case BUILT_IN_MEMCPY_CHK:
9095 	  fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
9096 	  break;
9097 	case BUILT_IN_MEMPCPY_CHK:
9098 	  fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
9099 	  break;
9100 	case BUILT_IN_MEMMOVE_CHK:
9101 	  fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
9102 	  break;
9103 	case BUILT_IN_MEMSET_CHK:
9104 	  fn = builtin_decl_explicit (BUILT_IN_MEMSET);
9105 	  break;
9106 	default:
9107 	  break;
9108 	}
9109 
9110       if (! fn)
9111 	return NULL_RTX;
9112 
9113       fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
9114       gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9115       CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9116       return expand_expr (fn, target, mode, EXPAND_NORMAL);
9117     }
9118   else if (fcode == BUILT_IN_MEMSET_CHK)
9119     return NULL_RTX;
9120   else
9121     {
9122       unsigned int dest_align = get_pointer_alignment (dest);
9123 
9124       /* If DEST is not a pointer type, call the normal function.  */
9125       if (dest_align == 0)
9126 	return NULL_RTX;
9127 
9128       /* If SRC and DEST are the same (and not volatile), do nothing.  */
9129       if (operand_equal_p (src, dest, 0))
9130 	{
9131 	  tree expr;
9132 
9133 	  if (fcode != BUILT_IN_MEMPCPY_CHK)
9134 	    {
9135 	      /* Evaluate and ignore LEN in case it has side-effects.  */
9136 	      expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
9137 	      return expand_expr (dest, target, mode, EXPAND_NORMAL);
9138 	    }
9139 
9140 	  expr = fold_build_pointer_plus (dest, len);
9141 	  return expand_expr (expr, target, mode, EXPAND_NORMAL);
9142 	}
9143 
9144       /* __memmove_chk special case.  */
9145       if (fcode == BUILT_IN_MEMMOVE_CHK)
9146 	{
9147 	  unsigned int src_align = get_pointer_alignment (src);
9148 
9149 	  if (src_align == 0)
9150 	    return NULL_RTX;
9151 
9152 	  /* If src is categorized for a readonly section we can use
9153 	     normal __memcpy_chk.  */
9154 	  if (readonly_data_expr (src))
9155 	    {
9156 	      tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
9157 	      if (!fn)
9158 		return NULL_RTX;
9159 	      fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
9160 					  dest, src, len, size);
9161 	      gcc_assert (TREE_CODE (fn) == CALL_EXPR);
9162 	      CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
9163 	      return expand_expr (fn, target, mode, EXPAND_NORMAL);
9164 	    }
9165 	}
9166       return NULL_RTX;
9167     }
9168 }
9169 
9170 /* Emit warning if a buffer overflow is detected at compile time.  */
9171 
9172 static void
maybe_emit_chk_warning(tree exp,enum built_in_function fcode)9173 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
9174 {
9175   int is_strlen = 0;
9176   tree len, size;
9177   location_t loc = tree_nonartificial_location (exp);
9178 
9179   switch (fcode)
9180     {
9181     case BUILT_IN_STRCPY_CHK:
9182     case BUILT_IN_STPCPY_CHK:
9183     /* For __strcat_chk the warning will be emitted only if overflowing
9184        by at least strlen (dest) + 1 bytes.  */
9185     case BUILT_IN_STRCAT_CHK:
9186       len = CALL_EXPR_ARG (exp, 1);
9187       size = CALL_EXPR_ARG (exp, 2);
9188       is_strlen = 1;
9189       break;
9190     case BUILT_IN_STRNCAT_CHK:
9191     case BUILT_IN_STRNCPY_CHK:
9192     case BUILT_IN_STPNCPY_CHK:
9193       len = CALL_EXPR_ARG (exp, 2);
9194       size = CALL_EXPR_ARG (exp, 3);
9195       break;
9196     case BUILT_IN_SNPRINTF_CHK:
9197     case BUILT_IN_VSNPRINTF_CHK:
9198       len = CALL_EXPR_ARG (exp, 1);
9199       size = CALL_EXPR_ARG (exp, 3);
9200       break;
9201     default:
9202       gcc_unreachable ();
9203     }
9204 
9205   if (!len || !size)
9206     return;
9207 
9208   if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9209     return;
9210 
9211   if (is_strlen)
9212     {
9213       len = c_strlen (len, 1);
9214       if (! len || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9215 	return;
9216     }
9217   else if (fcode == BUILT_IN_STRNCAT_CHK)
9218     {
9219       tree src = CALL_EXPR_ARG (exp, 1);
9220       if (! src || ! tree_fits_uhwi_p (len) || tree_int_cst_lt (len, size))
9221 	return;
9222       src = c_strlen (src, 1);
9223       if (! src || ! tree_fits_uhwi_p (src))
9224 	{
9225 	  warning_at (loc, 0, "%Kcall to %D might overflow destination buffer",
9226 		      exp, get_callee_fndecl (exp));
9227 	  return;
9228 	}
9229       else if (tree_int_cst_lt (src, size))
9230 	return;
9231     }
9232   else if (! tree_fits_uhwi_p (len) || ! tree_int_cst_lt (size, len))
9233     return;
9234 
9235   warning_at (loc, 0, "%Kcall to %D will always overflow destination buffer",
9236 	      exp, get_callee_fndecl (exp));
9237 }
9238 
9239 /* Emit warning if a buffer overflow is detected at compile time
9240    in __sprintf_chk/__vsprintf_chk calls.  */
9241 
9242 static void
maybe_emit_sprintf_chk_warning(tree exp,enum built_in_function fcode)9243 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
9244 {
9245   tree size, len, fmt;
9246   const char *fmt_str;
9247   int nargs = call_expr_nargs (exp);
9248 
9249   /* Verify the required arguments in the original call.  */
9250 
9251   if (nargs < 4)
9252     return;
9253   size = CALL_EXPR_ARG (exp, 2);
9254   fmt = CALL_EXPR_ARG (exp, 3);
9255 
9256   if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
9257     return;
9258 
9259   /* Check whether the format is a literal string constant.  */
9260   fmt_str = c_getstr (fmt);
9261   if (fmt_str == NULL)
9262     return;
9263 
9264   if (!init_target_chars ())
9265     return;
9266 
9267   /* If the format doesn't contain % args or %%, we know its size.  */
9268   if (strchr (fmt_str, target_percent) == 0)
9269     len = build_int_cstu (size_type_node, strlen (fmt_str));
9270   /* If the format is "%s" and first ... argument is a string literal,
9271      we know it too.  */
9272   else if (fcode == BUILT_IN_SPRINTF_CHK
9273 	   && strcmp (fmt_str, target_percent_s) == 0)
9274     {
9275       tree arg;
9276 
9277       if (nargs < 5)
9278 	return;
9279       arg = CALL_EXPR_ARG (exp, 4);
9280       if (! POINTER_TYPE_P (TREE_TYPE (arg)))
9281 	return;
9282 
9283       len = c_strlen (arg, 1);
9284       if (!len || ! tree_fits_uhwi_p (len))
9285 	return;
9286     }
9287   else
9288     return;
9289 
9290   if (! tree_int_cst_lt (len, size))
9291     warning_at (tree_nonartificial_location (exp),
9292 		0, "%Kcall to %D will always overflow destination buffer",
9293 		exp, get_callee_fndecl (exp));
9294 }
9295 
9296 /* Emit warning if a free is called with address of a variable.  */
9297 
9298 static void
maybe_emit_free_warning(tree exp)9299 maybe_emit_free_warning (tree exp)
9300 {
9301   tree arg = CALL_EXPR_ARG (exp, 0);
9302 
9303   STRIP_NOPS (arg);
9304   if (TREE_CODE (arg) != ADDR_EXPR)
9305     return;
9306 
9307   arg = get_base_address (TREE_OPERAND (arg, 0));
9308   if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
9309     return;
9310 
9311   if (SSA_VAR_P (arg))
9312     warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9313 		"%Kattempt to free a non-heap object %qD", exp, arg);
9314   else
9315     warning_at (tree_nonartificial_location (exp), OPT_Wfree_nonheap_object,
9316 		"%Kattempt to free a non-heap object", exp);
9317 }
9318 
9319 /* Fold a call to __builtin_object_size with arguments PTR and OST,
9320    if possible.  */
9321 
9322 static tree
fold_builtin_object_size(tree ptr,tree ost)9323 fold_builtin_object_size (tree ptr, tree ost)
9324 {
9325   unsigned HOST_WIDE_INT bytes;
9326   int object_size_type;
9327 
9328   if (!validate_arg (ptr, POINTER_TYPE)
9329       || !validate_arg (ost, INTEGER_TYPE))
9330     return NULL_TREE;
9331 
9332   STRIP_NOPS (ost);
9333 
9334   if (TREE_CODE (ost) != INTEGER_CST
9335       || tree_int_cst_sgn (ost) < 0
9336       || compare_tree_int (ost, 3) > 0)
9337     return NULL_TREE;
9338 
9339   object_size_type = tree_to_shwi (ost);
9340 
9341   /* __builtin_object_size doesn't evaluate side-effects in its arguments;
9342      if there are any side-effects, it returns (size_t) -1 for types 0 and 1
9343      and (size_t) 0 for types 2 and 3.  */
9344   if (TREE_SIDE_EFFECTS (ptr))
9345     return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
9346 
9347   if (TREE_CODE (ptr) == ADDR_EXPR)
9348     {
9349       bytes = compute_builtin_object_size (ptr, object_size_type);
9350       if (wi::fits_to_tree_p (bytes, size_type_node))
9351 	return build_int_cstu (size_type_node, bytes);
9352     }
9353   else if (TREE_CODE (ptr) == SSA_NAME)
9354     {
9355       /* If object size is not known yet, delay folding until
9356        later.  Maybe subsequent passes will help determining
9357        it.  */
9358       bytes = compute_builtin_object_size (ptr, object_size_type);
9359       if (bytes != (unsigned HOST_WIDE_INT) (object_size_type < 2 ? -1 : 0)
9360           && wi::fits_to_tree_p (bytes, size_type_node))
9361 	return build_int_cstu (size_type_node, bytes);
9362     }
9363 
9364   return NULL_TREE;
9365 }
9366 
9367 /* Builtins with folding operations that operate on "..." arguments
9368    need special handling; we need to store the arguments in a convenient
9369    data structure before attempting any folding.  Fortunately there are
9370    only a few builtins that fall into this category.  FNDECL is the
9371    function, EXP is the CALL_EXPR for the call.  */
9372 
9373 static tree
fold_builtin_varargs(location_t loc,tree fndecl,tree * args,int nargs)9374 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
9375 {
9376   enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9377   tree ret = NULL_TREE;
9378 
9379   switch (fcode)
9380     {
9381     case BUILT_IN_FPCLASSIFY:
9382       ret = fold_builtin_fpclassify (loc, args, nargs);
9383       break;
9384 
9385     default:
9386       break;
9387     }
9388   if (ret)
9389     {
9390       ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
9391       SET_EXPR_LOCATION (ret, loc);
9392       TREE_NO_WARNING (ret) = 1;
9393       return ret;
9394     }
9395   return NULL_TREE;
9396 }
9397 
9398 /* Initialize format string characters in the target charset.  */
9399 
9400 bool
init_target_chars(void)9401 init_target_chars (void)
9402 {
9403   static bool init;
9404   if (!init)
9405     {
9406       target_newline = lang_hooks.to_target_charset ('\n');
9407       target_percent = lang_hooks.to_target_charset ('%');
9408       target_c = lang_hooks.to_target_charset ('c');
9409       target_s = lang_hooks.to_target_charset ('s');
9410       if (target_newline == 0 || target_percent == 0 || target_c == 0
9411 	  || target_s == 0)
9412 	return false;
9413 
9414       target_percent_c[0] = target_percent;
9415       target_percent_c[1] = target_c;
9416       target_percent_c[2] = '\0';
9417 
9418       target_percent_s[0] = target_percent;
9419       target_percent_s[1] = target_s;
9420       target_percent_s[2] = '\0';
9421 
9422       target_percent_s_newline[0] = target_percent;
9423       target_percent_s_newline[1] = target_s;
9424       target_percent_s_newline[2] = target_newline;
9425       target_percent_s_newline[3] = '\0';
9426 
9427       init = true;
9428     }
9429   return true;
9430 }
9431 
9432 /* Helper function for do_mpfr_arg*().  Ensure M is a normal number
9433    and no overflow/underflow occurred.  INEXACT is true if M was not
9434    exactly calculated.  TYPE is the tree type for the result.  This
9435    function assumes that you cleared the MPFR flags and then
9436    calculated M to see if anything subsequently set a flag prior to
9437    entering this function.  Return NULL_TREE if any checks fail.  */
9438 
9439 static tree
do_mpfr_ckconv(mpfr_srcptr m,tree type,int inexact)9440 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
9441 {
9442   /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9443      overflow/underflow occurred.  If -frounding-math, proceed iff the
9444      result of calling FUNC was exact.  */
9445   if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
9446       && (!flag_rounding_math || !inexact))
9447     {
9448       REAL_VALUE_TYPE rr;
9449 
9450       real_from_mpfr (&rr, m, type, GMP_RNDN);
9451       /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
9452 	 check for overflow/underflow.  If the REAL_VALUE_TYPE is zero
9453 	 but the mpft_t is not, then we underflowed in the
9454 	 conversion.  */
9455       if (real_isfinite (&rr)
9456 	  && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
9457         {
9458 	  REAL_VALUE_TYPE rmode;
9459 
9460 	  real_convert (&rmode, TYPE_MODE (type), &rr);
9461 	  /* Proceed iff the specified mode can hold the value.  */
9462 	  if (real_identical (&rmode, &rr))
9463 	    return build_real (type, rmode);
9464 	}
9465     }
9466   return NULL_TREE;
9467 }
9468 
9469 /* Helper function for do_mpc_arg*().  Ensure M is a normal complex
9470    number and no overflow/underflow occurred.  INEXACT is true if M
9471    was not exactly calculated.  TYPE is the tree type for the result.
9472    This function assumes that you cleared the MPFR flags and then
9473    calculated M to see if anything subsequently set a flag prior to
9474    entering this function.  Return NULL_TREE if any checks fail, if
9475    FORCE_CONVERT is true, then bypass the checks.  */
9476 
9477 static tree
do_mpc_ckconv(mpc_srcptr m,tree type,int inexact,int force_convert)9478 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
9479 {
9480   /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
9481      overflow/underflow occurred.  If -frounding-math, proceed iff the
9482      result of calling FUNC was exact.  */
9483   if (force_convert
9484       || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
9485 	  && !mpfr_overflow_p () && !mpfr_underflow_p ()
9486 	  && (!flag_rounding_math || !inexact)))
9487     {
9488       REAL_VALUE_TYPE re, im;
9489 
9490       real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), GMP_RNDN);
9491       real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), GMP_RNDN);
9492       /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
9493 	 check for overflow/underflow.  If the REAL_VALUE_TYPE is zero
9494 	 but the mpft_t is not, then we underflowed in the
9495 	 conversion.  */
9496       if (force_convert
9497 	  || (real_isfinite (&re) && real_isfinite (&im)
9498 	      && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
9499 	      && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
9500         {
9501 	  REAL_VALUE_TYPE re_mode, im_mode;
9502 
9503 	  real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
9504 	  real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
9505 	  /* Proceed iff the specified mode can hold the value.  */
9506 	  if (force_convert
9507 	      || (real_identical (&re_mode, &re)
9508 		  && real_identical (&im_mode, &im)))
9509 	    return build_complex (type, build_real (TREE_TYPE (type), re_mode),
9510 				  build_real (TREE_TYPE (type), im_mode));
9511 	}
9512     }
9513   return NULL_TREE;
9514 }
9515 
9516 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
9517    the pointer *(ARG_QUO) and return the result.  The type is taken
9518    from the type of ARG0 and is used for setting the precision of the
9519    calculation and results.  */
9520 
9521 static tree
do_mpfr_remquo(tree arg0,tree arg1,tree arg_quo)9522 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
9523 {
9524   tree const type = TREE_TYPE (arg0);
9525   tree result = NULL_TREE;
9526 
9527   STRIP_NOPS (arg0);
9528   STRIP_NOPS (arg1);
9529 
9530   /* To proceed, MPFR must exactly represent the target floating point
9531      format, which only happens when the target base equals two.  */
9532   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9533       && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
9534       && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
9535     {
9536       const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
9537       const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
9538 
9539       if (real_isfinite (ra0) && real_isfinite (ra1))
9540         {
9541 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9542 	  const int prec = fmt->p;
9543 	  const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9544 	  tree result_rem;
9545 	  long integer_quo;
9546 	  mpfr_t m0, m1;
9547 
9548 	  mpfr_inits2 (prec, m0, m1, NULL);
9549 	  mpfr_from_real (m0, ra0, GMP_RNDN);
9550 	  mpfr_from_real (m1, ra1, GMP_RNDN);
9551 	  mpfr_clear_flags ();
9552 	  mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
9553 	  /* Remquo is independent of the rounding mode, so pass
9554 	     inexact=0 to do_mpfr_ckconv().  */
9555 	  result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
9556 	  mpfr_clears (m0, m1, NULL);
9557 	  if (result_rem)
9558 	    {
9559 	      /* MPFR calculates quo in the host's long so it may
9560 		 return more bits in quo than the target int can hold
9561 		 if sizeof(host long) > sizeof(target int).  This can
9562 		 happen even for native compilers in LP64 mode.  In
9563 		 these cases, modulo the quo value with the largest
9564 		 number that the target int can hold while leaving one
9565 		 bit for the sign.  */
9566 	      if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
9567 		integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
9568 
9569 	      /* Dereference the quo pointer argument.  */
9570 	      arg_quo = build_fold_indirect_ref (arg_quo);
9571 	      /* Proceed iff a valid pointer type was passed in.  */
9572 	      if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
9573 	        {
9574 		  /* Set the value. */
9575 		  tree result_quo
9576 		    = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
9577 				   build_int_cst (TREE_TYPE (arg_quo),
9578 						  integer_quo));
9579 		  TREE_SIDE_EFFECTS (result_quo) = 1;
9580 		  /* Combine the quo assignment with the rem.  */
9581 		  result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9582 						    result_quo, result_rem));
9583 		}
9584 	    }
9585 	}
9586     }
9587   return result;
9588 }
9589 
9590 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
9591    resulting value as a tree with type TYPE.  The mpfr precision is
9592    set to the precision of TYPE.  We assume that this mpfr function
9593    returns zero if the result could be calculated exactly within the
9594    requested precision.  In addition, the integer pointer represented
9595    by ARG_SG will be dereferenced and set to the appropriate signgam
9596    (-1,1) value.  */
9597 
9598 static tree
do_mpfr_lgamma_r(tree arg,tree arg_sg,tree type)9599 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
9600 {
9601   tree result = NULL_TREE;
9602 
9603   STRIP_NOPS (arg);
9604 
9605   /* To proceed, MPFR must exactly represent the target floating point
9606      format, which only happens when the target base equals two.  Also
9607      verify ARG is a constant and that ARG_SG is an int pointer.  */
9608   if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
9609       && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
9610       && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
9611       && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
9612     {
9613       const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
9614 
9615       /* In addition to NaN and Inf, the argument cannot be zero or a
9616 	 negative integer.  */
9617       if (real_isfinite (ra)
9618 	  && ra->cl != rvc_zero
9619 	  && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
9620         {
9621 	  const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
9622 	  const int prec = fmt->p;
9623 	  const mp_rnd_t rnd = fmt->round_towards_zero? GMP_RNDZ : GMP_RNDN;
9624 	  int inexact, sg;
9625 	  mpfr_t m;
9626 	  tree result_lg;
9627 
9628 	  mpfr_init2 (m, prec);
9629 	  mpfr_from_real (m, ra, GMP_RNDN);
9630 	  mpfr_clear_flags ();
9631 	  inexact = mpfr_lgamma (m, &sg, m, rnd);
9632 	  result_lg = do_mpfr_ckconv (m, type, inexact);
9633 	  mpfr_clear (m);
9634 	  if (result_lg)
9635 	    {
9636 	      tree result_sg;
9637 
9638 	      /* Dereference the arg_sg pointer argument.  */
9639 	      arg_sg = build_fold_indirect_ref (arg_sg);
9640 	      /* Assign the signgam value into *arg_sg. */
9641 	      result_sg = fold_build2 (MODIFY_EXPR,
9642 				       TREE_TYPE (arg_sg), arg_sg,
9643 				       build_int_cst (TREE_TYPE (arg_sg), sg));
9644 	      TREE_SIDE_EFFECTS (result_sg) = 1;
9645 	      /* Combine the signgam assignment with the lgamma result.  */
9646 	      result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
9647 						result_sg, result_lg));
9648 	    }
9649 	}
9650     }
9651 
9652   return result;
9653 }
9654 
9655 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
9656    mpc function FUNC on it and return the resulting value as a tree
9657    with type TYPE.  The mpfr precision is set to the precision of
9658    TYPE.  We assume that function FUNC returns zero if the result
9659    could be calculated exactly within the requested precision.  If
9660    DO_NONFINITE is true, then fold expressions containing Inf or NaN
9661    in the arguments and/or results.  */
9662 
9663 tree
do_mpc_arg2(tree arg0,tree arg1,tree type,int do_nonfinite,int (* func)(mpc_ptr,mpc_srcptr,mpc_srcptr,mpc_rnd_t))9664 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
9665 	     int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
9666 {
9667   tree result = NULL_TREE;
9668 
9669   STRIP_NOPS (arg0);
9670   STRIP_NOPS (arg1);
9671 
9672   /* To proceed, MPFR must exactly represent the target floating point
9673      format, which only happens when the target base equals two.  */
9674   if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
9675       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
9676       && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
9677       && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
9678       && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
9679     {
9680       const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
9681       const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
9682       const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
9683       const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
9684 
9685       if (do_nonfinite
9686 	  || (real_isfinite (re0) && real_isfinite (im0)
9687 	      && real_isfinite (re1) && real_isfinite (im1)))
9688         {
9689 	  const struct real_format *const fmt =
9690 	    REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
9691 	  const int prec = fmt->p;
9692 	  const mp_rnd_t rnd = fmt->round_towards_zero ? GMP_RNDZ : GMP_RNDN;
9693 	  const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
9694 	  int inexact;
9695 	  mpc_t m0, m1;
9696 
9697 	  mpc_init2 (m0, prec);
9698 	  mpc_init2 (m1, prec);
9699 	  mpfr_from_real (mpc_realref (m0), re0, rnd);
9700 	  mpfr_from_real (mpc_imagref (m0), im0, rnd);
9701 	  mpfr_from_real (mpc_realref (m1), re1, rnd);
9702 	  mpfr_from_real (mpc_imagref (m1), im1, rnd);
9703 	  mpfr_clear_flags ();
9704 	  inexact = func (m0, m0, m1, crnd);
9705 	  result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
9706 	  mpc_clear (m0);
9707 	  mpc_clear (m1);
9708 	}
9709     }
9710 
9711   return result;
9712 }
9713 
9714 /* A wrapper function for builtin folding that prevents warnings for
9715    "statement without effect" and the like, caused by removing the
9716    call node earlier than the warning is generated.  */
9717 
9718 tree
fold_call_stmt(gcall * stmt,bool ignore)9719 fold_call_stmt (gcall *stmt, bool ignore)
9720 {
9721   tree ret = NULL_TREE;
9722   tree fndecl = gimple_call_fndecl (stmt);
9723   location_t loc = gimple_location (stmt);
9724   if (fndecl
9725       && TREE_CODE (fndecl) == FUNCTION_DECL
9726       && DECL_BUILT_IN (fndecl)
9727       && !gimple_call_va_arg_pack_p (stmt))
9728     {
9729       int nargs = gimple_call_num_args (stmt);
9730       tree *args = (nargs > 0
9731 		    ? gimple_call_arg_ptr (stmt, 0)
9732 		    : &error_mark_node);
9733 
9734       if (avoid_folding_inline_builtin (fndecl))
9735 	return NULL_TREE;
9736       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9737         {
9738 	  return targetm.fold_builtin (fndecl, nargs, args, ignore);
9739         }
9740       else
9741 	{
9742 	  ret = fold_builtin_n (loc, fndecl, args, nargs, ignore);
9743 	  if (ret)
9744 	    {
9745 	      /* Propagate location information from original call to
9746 		 expansion of builtin.  Otherwise things like
9747 		 maybe_emit_chk_warning, that operate on the expansion
9748 		 of a builtin, will use the wrong location information.  */
9749 	      if (gimple_has_location (stmt))
9750                 {
9751 		  tree realret = ret;
9752 		  if (TREE_CODE (ret) == NOP_EXPR)
9753 		    realret = TREE_OPERAND (ret, 0);
9754 		  if (CAN_HAVE_LOCATION_P (realret)
9755 		      && !EXPR_HAS_LOCATION (realret))
9756 		    SET_EXPR_LOCATION (realret, loc);
9757                   return realret;
9758                 }
9759 	      return ret;
9760 	    }
9761 	}
9762     }
9763   return NULL_TREE;
9764 }
9765 
9766 /* Look up the function in builtin_decl that corresponds to DECL
9767    and set ASMSPEC as its user assembler name.  DECL must be a
9768    function decl that declares a builtin.  */
9769 
9770 void
set_builtin_user_assembler_name(tree decl,const char * asmspec)9771 set_builtin_user_assembler_name (tree decl, const char *asmspec)
9772 {
9773   tree builtin;
9774   gcc_assert (TREE_CODE (decl) == FUNCTION_DECL
9775 	      && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
9776 	      && asmspec != 0);
9777 
9778   builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
9779   set_user_assembler_name (builtin, asmspec);
9780   switch (DECL_FUNCTION_CODE (decl))
9781     {
9782     case BUILT_IN_MEMCPY:
9783       init_block_move_fn (asmspec);
9784       memcpy_libfunc = set_user_assembler_libfunc ("memcpy", asmspec);
9785       break;
9786     case BUILT_IN_MEMSET:
9787       init_block_clear_fn (asmspec);
9788       memset_libfunc = set_user_assembler_libfunc ("memset", asmspec);
9789       break;
9790     case BUILT_IN_MEMMOVE:
9791       memmove_libfunc = set_user_assembler_libfunc ("memmove", asmspec);
9792       break;
9793     case BUILT_IN_MEMCMP:
9794       memcmp_libfunc = set_user_assembler_libfunc ("memcmp", asmspec);
9795       break;
9796     case BUILT_IN_ABORT:
9797       abort_libfunc = set_user_assembler_libfunc ("abort", asmspec);
9798       break;
9799     case BUILT_IN_FFS:
9800       if (INT_TYPE_SIZE < BITS_PER_WORD)
9801 	{
9802 	  set_user_assembler_libfunc ("ffs", asmspec);
9803 	  set_optab_libfunc (ffs_optab, mode_for_size (INT_TYPE_SIZE,
9804 						       MODE_INT, 0), "ffs");
9805 	}
9806       break;
9807     default:
9808       break;
9809     }
9810 }
9811 
9812 /* Return true if DECL is a builtin that expands to a constant or similarly
9813    simple code.  */
9814 bool
is_simple_builtin(tree decl)9815 is_simple_builtin (tree decl)
9816 {
9817   if (decl && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9818     switch (DECL_FUNCTION_CODE (decl))
9819       {
9820 	/* Builtins that expand to constants.  */
9821       case BUILT_IN_CONSTANT_P:
9822       case BUILT_IN_EXPECT:
9823       case BUILT_IN_OBJECT_SIZE:
9824       case BUILT_IN_UNREACHABLE:
9825 	/* Simple register moves or loads from stack.  */
9826       case BUILT_IN_ASSUME_ALIGNED:
9827       case BUILT_IN_RETURN_ADDRESS:
9828       case BUILT_IN_EXTRACT_RETURN_ADDR:
9829       case BUILT_IN_FROB_RETURN_ADDR:
9830       case BUILT_IN_RETURN:
9831       case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9832       case BUILT_IN_FRAME_ADDRESS:
9833       case BUILT_IN_VA_END:
9834       case BUILT_IN_STACK_SAVE:
9835       case BUILT_IN_STACK_RESTORE:
9836 	/* Exception state returns or moves registers around.  */
9837       case BUILT_IN_EH_FILTER:
9838       case BUILT_IN_EH_POINTER:
9839       case BUILT_IN_EH_COPY_VALUES:
9840 	return true;
9841 
9842       default:
9843 	return false;
9844       }
9845 
9846   return false;
9847 }
9848 
9849 /* Return true if DECL is a builtin that is not expensive, i.e., they are
9850    most probably expanded inline into reasonably simple code.  This is a
9851    superset of is_simple_builtin.  */
9852 bool
is_inexpensive_builtin(tree decl)9853 is_inexpensive_builtin (tree decl)
9854 {
9855   if (!decl)
9856     return false;
9857   else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
9858     return true;
9859   else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
9860     switch (DECL_FUNCTION_CODE (decl))
9861       {
9862       case BUILT_IN_ABS:
9863       case BUILT_IN_ALLOCA:
9864       case BUILT_IN_ALLOCA_WITH_ALIGN:
9865       case BUILT_IN_BSWAP16:
9866       case BUILT_IN_BSWAP32:
9867       case BUILT_IN_BSWAP64:
9868       case BUILT_IN_CLZ:
9869       case BUILT_IN_CLZIMAX:
9870       case BUILT_IN_CLZL:
9871       case BUILT_IN_CLZLL:
9872       case BUILT_IN_CTZ:
9873       case BUILT_IN_CTZIMAX:
9874       case BUILT_IN_CTZL:
9875       case BUILT_IN_CTZLL:
9876       case BUILT_IN_FFS:
9877       case BUILT_IN_FFSIMAX:
9878       case BUILT_IN_FFSL:
9879       case BUILT_IN_FFSLL:
9880       case BUILT_IN_IMAXABS:
9881       case BUILT_IN_FINITE:
9882       case BUILT_IN_FINITEF:
9883       case BUILT_IN_FINITEL:
9884       case BUILT_IN_FINITED32:
9885       case BUILT_IN_FINITED64:
9886       case BUILT_IN_FINITED128:
9887       case BUILT_IN_FPCLASSIFY:
9888       case BUILT_IN_ISFINITE:
9889       case BUILT_IN_ISINF_SIGN:
9890       case BUILT_IN_ISINF:
9891       case BUILT_IN_ISINFF:
9892       case BUILT_IN_ISINFL:
9893       case BUILT_IN_ISINFD32:
9894       case BUILT_IN_ISINFD64:
9895       case BUILT_IN_ISINFD128:
9896       case BUILT_IN_ISNAN:
9897       case BUILT_IN_ISNANF:
9898       case BUILT_IN_ISNANL:
9899       case BUILT_IN_ISNAND32:
9900       case BUILT_IN_ISNAND64:
9901       case BUILT_IN_ISNAND128:
9902       case BUILT_IN_ISNORMAL:
9903       case BUILT_IN_ISGREATER:
9904       case BUILT_IN_ISGREATEREQUAL:
9905       case BUILT_IN_ISLESS:
9906       case BUILT_IN_ISLESSEQUAL:
9907       case BUILT_IN_ISLESSGREATER:
9908       case BUILT_IN_ISUNORDERED:
9909       case BUILT_IN_VA_ARG_PACK:
9910       case BUILT_IN_VA_ARG_PACK_LEN:
9911       case BUILT_IN_VA_COPY:
9912       case BUILT_IN_TRAP:
9913       case BUILT_IN_SAVEREGS:
9914       case BUILT_IN_POPCOUNTL:
9915       case BUILT_IN_POPCOUNTLL:
9916       case BUILT_IN_POPCOUNTIMAX:
9917       case BUILT_IN_POPCOUNT:
9918       case BUILT_IN_PARITYL:
9919       case BUILT_IN_PARITYLL:
9920       case BUILT_IN_PARITYIMAX:
9921       case BUILT_IN_PARITY:
9922       case BUILT_IN_LABS:
9923       case BUILT_IN_LLABS:
9924       case BUILT_IN_PREFETCH:
9925       case BUILT_IN_ACC_ON_DEVICE:
9926 	return true;
9927 
9928       default:
9929 	return is_simple_builtin (decl);
9930       }
9931 
9932   return false;
9933 }
9934