1 /* Expand builtin functions.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "machmode.h"
25 #include "real.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "flags.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "except.h"
32 #include "function.h"
33 #include "insn-config.h"
34 #include "expr.h"
35 #include "optabs.h"
36 #include "libfuncs.h"
37 #include "recog.h"
38 #include "output.h"
39 #include "typeclass.h"
40 #include "toplev.h"
41 #include "predict.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "langhooks.h"
45
46 #define CALLED_AS_BUILT_IN(NODE) \
47 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
48
49 /* Register mappings for target machines without register windows. */
50 #ifndef INCOMING_REGNO
51 #define INCOMING_REGNO(OUT) (OUT)
52 #endif
53 #ifndef OUTGOING_REGNO
54 #define OUTGOING_REGNO(IN) (IN)
55 #endif
56
57 #ifndef PAD_VARARGS_DOWN
58 #define PAD_VARARGS_DOWN BYTES_BIG_ENDIAN
59 #endif
60
61 /* Define the names of the builtin function types and codes. */
62 const char *const built_in_class_names[4]
63 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
64
65 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT) STRINGX(X),
66 const char *const built_in_names[(int) END_BUILTINS] =
67 {
68 #include "builtins.def"
69 };
70 #undef DEF_BUILTIN
71
72 /* Setup an array of _DECL trees, make sure each element is
73 initialized to NULL_TREE. */
74 tree built_in_decls[(int) END_BUILTINS];
75
76 static int get_pointer_alignment PARAMS ((tree, unsigned int));
77 static tree c_strlen PARAMS ((tree));
78 static const char *c_getstr PARAMS ((tree));
79 static rtx c_readstr PARAMS ((const char *,
80 enum machine_mode));
81 static int target_char_cast PARAMS ((tree, char *));
82 static rtx get_memory_rtx PARAMS ((tree));
83 static int apply_args_size PARAMS ((void));
84 static int apply_result_size PARAMS ((void));
85 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
86 static rtx result_vector PARAMS ((int, rtx));
87 #endif
88 static rtx expand_builtin_setjmp PARAMS ((tree, rtx));
89 static void expand_builtin_prefetch PARAMS ((tree));
90 static rtx expand_builtin_apply_args PARAMS ((void));
91 static rtx expand_builtin_apply_args_1 PARAMS ((void));
92 static rtx expand_builtin_apply PARAMS ((rtx, rtx, rtx));
93 static void expand_builtin_return PARAMS ((rtx));
94 static enum type_class type_to_class PARAMS ((tree));
95 static rtx expand_builtin_classify_type PARAMS ((tree));
96 static rtx expand_builtin_mathfn PARAMS ((tree, rtx, rtx));
97 static rtx expand_builtin_constant_p PARAMS ((tree));
98 static rtx expand_builtin_args_info PARAMS ((tree));
99 static rtx expand_builtin_next_arg PARAMS ((tree));
100 static rtx expand_builtin_va_start PARAMS ((tree));
101 static rtx expand_builtin_va_end PARAMS ((tree));
102 static rtx expand_builtin_va_copy PARAMS ((tree));
103 static rtx expand_builtin_memcmp PARAMS ((tree, tree, rtx,
104 enum machine_mode));
105 static rtx expand_builtin_strcmp PARAMS ((tree, rtx,
106 enum machine_mode));
107 static rtx expand_builtin_strncmp PARAMS ((tree, rtx,
108 enum machine_mode));
109 static rtx builtin_memcpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
110 enum machine_mode));
111 static rtx expand_builtin_strcat PARAMS ((tree, rtx,
112 enum machine_mode));
113 static rtx expand_builtin_strncat PARAMS ((tree, rtx,
114 enum machine_mode));
115 static rtx expand_builtin_strspn PARAMS ((tree, rtx,
116 enum machine_mode));
117 static rtx expand_builtin_strcspn PARAMS ((tree, rtx,
118 enum machine_mode));
119 static rtx expand_builtin_memcpy PARAMS ((tree, rtx,
120 enum machine_mode));
121 static rtx expand_builtin_strcpy PARAMS ((tree, rtx,
122 enum machine_mode));
123 static rtx builtin_strncpy_read_str PARAMS ((PTR, HOST_WIDE_INT,
124 enum machine_mode));
125 static rtx expand_builtin_strncpy PARAMS ((tree, rtx,
126 enum machine_mode));
127 static rtx builtin_memset_read_str PARAMS ((PTR, HOST_WIDE_INT,
128 enum machine_mode));
129 static rtx builtin_memset_gen_str PARAMS ((PTR, HOST_WIDE_INT,
130 enum machine_mode));
131 static rtx expand_builtin_memset PARAMS ((tree, rtx,
132 enum machine_mode));
133 static rtx expand_builtin_bzero PARAMS ((tree));
134 static rtx expand_builtin_strlen PARAMS ((tree, rtx));
135 static rtx expand_builtin_strstr PARAMS ((tree, rtx,
136 enum machine_mode));
137 static rtx expand_builtin_strpbrk PARAMS ((tree, rtx,
138 enum machine_mode));
139 static rtx expand_builtin_strchr PARAMS ((tree, rtx,
140 enum machine_mode));
141 static rtx expand_builtin_strrchr PARAMS ((tree, rtx,
142 enum machine_mode));
143 static rtx expand_builtin_alloca PARAMS ((tree, rtx));
144 static rtx expand_builtin_ffs PARAMS ((tree, rtx, rtx));
145 static rtx expand_builtin_frame_address PARAMS ((tree));
146 static rtx expand_builtin_fputs PARAMS ((tree, int, int));
147 static tree stabilize_va_list PARAMS ((tree, int));
148 static rtx expand_builtin_expect PARAMS ((tree, rtx));
149 static tree fold_builtin_constant_p PARAMS ((tree));
150 static tree fold_builtin_classify_type PARAMS ((tree));
151 static tree fold_builtin_inf PARAMS ((tree, int));
152 static tree fold_builtin_nan PARAMS ((tree, tree, int));
153 static tree build_function_call_expr PARAMS ((tree, tree));
154 static int validate_arglist PARAMS ((tree, ...));
155
156 /* Return the alignment in bits of EXP, a pointer valued expression.
157 But don't return more than MAX_ALIGN no matter what.
158 The alignment returned is, by default, the alignment of the thing that
159 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
160
161 Otherwise, look at the expression to see if we can do better, i.e., if the
162 expression is actually pointing at an object whose alignment is tighter. */
163
164 static int
get_pointer_alignment(exp,max_align)165 get_pointer_alignment (exp, max_align)
166 tree exp;
167 unsigned int max_align;
168 {
169 unsigned int align, inner;
170
171 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
172 return 0;
173
174 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
175 align = MIN (align, max_align);
176
177 while (1)
178 {
179 switch (TREE_CODE (exp))
180 {
181 case NOP_EXPR:
182 case CONVERT_EXPR:
183 case NON_LVALUE_EXPR:
184 exp = TREE_OPERAND (exp, 0);
185 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
186 return align;
187
188 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
189 align = MIN (inner, max_align);
190 break;
191
192 case PLUS_EXPR:
193 /* If sum of pointer + int, restrict our maximum alignment to that
194 imposed by the integer. If not, we can't do any better than
195 ALIGN. */
196 if (! host_integerp (TREE_OPERAND (exp, 1), 1))
197 return align;
198
199 while (((tree_low_cst (TREE_OPERAND (exp, 1), 1))
200 & (max_align / BITS_PER_UNIT - 1))
201 != 0)
202 max_align >>= 1;
203
204 exp = TREE_OPERAND (exp, 0);
205 break;
206
207 case ADDR_EXPR:
208 /* See what we are pointing at and look at its alignment. */
209 exp = TREE_OPERAND (exp, 0);
210 if (TREE_CODE (exp) == FUNCTION_DECL)
211 align = FUNCTION_BOUNDARY;
212 else if (DECL_P (exp))
213 align = DECL_ALIGN (exp);
214 #ifdef CONSTANT_ALIGNMENT
215 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
216 align = CONSTANT_ALIGNMENT (exp, align);
217 #endif
218 return MIN (align, max_align);
219
220 default:
221 return align;
222 }
223 }
224 }
225
226 /* Compute the length of a C string. TREE_STRING_LENGTH is not the right
227 way, because it could contain a zero byte in the middle.
228 TREE_STRING_LENGTH is the size of the character array, not the string.
229
230 The value returned is of type `ssizetype'.
231
232 Unfortunately, string_constant can't access the values of const char
233 arrays with initializers, so neither can we do so here. */
234
235 static tree
c_strlen(src)236 c_strlen (src)
237 tree src;
238 {
239 tree offset_node;
240 HOST_WIDE_INT offset;
241 int max;
242 const char *ptr;
243
244 src = string_constant (src, &offset_node);
245 if (src == 0)
246 return 0;
247
248 max = TREE_STRING_LENGTH (src) - 1;
249 ptr = TREE_STRING_POINTER (src);
250
251 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
252 {
253 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
254 compute the offset to the following null if we don't know where to
255 start searching for it. */
256 int i;
257
258 for (i = 0; i < max; i++)
259 if (ptr[i] == 0)
260 return 0;
261
262 /* We don't know the starting offset, but we do know that the string
263 has no internal zero bytes. We can assume that the offset falls
264 within the bounds of the string; otherwise, the programmer deserves
265 what he gets. Subtract the offset from the length of the string,
266 and return that. This would perhaps not be valid if we were dealing
267 with named arrays in addition to literal string constants. */
268
269 return size_diffop (size_int (max), offset_node);
270 }
271
272 /* We have a known offset into the string. Start searching there for
273 a null character if we can represent it as a single HOST_WIDE_INT. */
274 if (offset_node == 0)
275 offset = 0;
276 else if (! host_integerp (offset_node, 0))
277 offset = -1;
278 else
279 offset = tree_low_cst (offset_node, 0);
280
281 /* If the offset is known to be out of bounds, warn, and call strlen at
282 runtime. */
283 if (offset < 0 || offset > max)
284 {
285 warning ("offset outside bounds of constant string");
286 return 0;
287 }
288
289 /* Use strlen to search for the first zero byte. Since any strings
290 constructed with build_string will have nulls appended, we win even
291 if we get handed something like (char[4])"abcd".
292
293 Since OFFSET is our starting index into the string, no further
294 calculation is needed. */
295 return ssize_int (strlen (ptr + offset));
296 }
297
298 /* Return a char pointer for a C string if it is a string constant
299 or sum of string constant and integer constant. */
300
301 static const char *
c_getstr(src)302 c_getstr (src)
303 tree src;
304 {
305 tree offset_node;
306
307 src = string_constant (src, &offset_node);
308 if (src == 0)
309 return 0;
310
311 if (offset_node == 0)
312 return TREE_STRING_POINTER (src);
313 else if (!host_integerp (offset_node, 1)
314 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
315 return 0;
316
317 return TREE_STRING_POINTER (src) + tree_low_cst (offset_node, 1);
318 }
319
320 /* Return a CONST_INT or CONST_DOUBLE corresponding to target reading
321 GET_MODE_BITSIZE (MODE) bits from string constant STR. */
322
323 static rtx
c_readstr(str,mode)324 c_readstr (str, mode)
325 const char *str;
326 enum machine_mode mode;
327 {
328 HOST_WIDE_INT c[2];
329 HOST_WIDE_INT ch;
330 unsigned int i, j;
331
332 if (GET_MODE_CLASS (mode) != MODE_INT)
333 abort ();
334 c[0] = 0;
335 c[1] = 0;
336 ch = 1;
337 for (i = 0; i < GET_MODE_SIZE (mode); i++)
338 {
339 j = i;
340 if (WORDS_BIG_ENDIAN)
341 j = GET_MODE_SIZE (mode) - i - 1;
342 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
343 && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
344 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
345 j *= BITS_PER_UNIT;
346 if (j > 2 * HOST_BITS_PER_WIDE_INT)
347 abort ();
348 if (ch)
349 ch = (unsigned char) str[i];
350 c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
351 }
352 return immed_double_const (c[0], c[1], mode);
353 }
354
355 /* Cast a target constant CST to target CHAR and if that value fits into
356 host char type, return zero and put that value into variable pointed by
357 P. */
358
359 static int
target_char_cast(cst,p)360 target_char_cast (cst, p)
361 tree cst;
362 char *p;
363 {
364 unsigned HOST_WIDE_INT val, hostval;
365
366 if (!host_integerp (cst, 1)
367 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
368 return 1;
369
370 val = tree_low_cst (cst, 1);
371 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
372 val &= (((unsigned HOST_WIDE_INT) 1) << CHAR_TYPE_SIZE) - 1;
373
374 hostval = val;
375 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
376 hostval &= (((unsigned HOST_WIDE_INT) 1) << HOST_BITS_PER_CHAR) - 1;
377
378 if (val != hostval)
379 return 1;
380
381 *p = hostval;
382 return 0;
383 }
384
385 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
386 times to get the address of either a higher stack frame, or a return
387 address located within it (depending on FNDECL_CODE). */
388
389 rtx
expand_builtin_return_addr(fndecl_code,count,tem)390 expand_builtin_return_addr (fndecl_code, count, tem)
391 enum built_in_function fndecl_code;
392 int count;
393 rtx tem;
394 {
395 int i;
396
397 /* Some machines need special handling before we can access
398 arbitrary frames. For example, on the sparc, we must first flush
399 all register windows to the stack. */
400 #ifdef SETUP_FRAME_ADDRESSES
401 if (count > 0)
402 SETUP_FRAME_ADDRESSES ();
403 #endif
404
405 /* On the sparc, the return address is not in the frame, it is in a
406 register. There is no way to access it off of the current frame
407 pointer, but it can be accessed off the previous frame pointer by
408 reading the value from the register window save area. */
409 #ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
410 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
411 count--;
412 #endif
413
414 /* Scan back COUNT frames to the specified frame. */
415 for (i = 0; i < count; i++)
416 {
417 /* Assume the dynamic chain pointer is in the word that the
418 frame address points to, unless otherwise specified. */
419 #ifdef DYNAMIC_CHAIN_ADDRESS
420 tem = DYNAMIC_CHAIN_ADDRESS (tem);
421 #endif
422 tem = memory_address (Pmode, tem);
423 tem = gen_rtx_MEM (Pmode, tem);
424 set_mem_alias_set (tem, get_frame_alias_set ());
425 tem = copy_to_reg (tem);
426 }
427
428 /* For __builtin_frame_address, return what we've got. */
429 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
430 return tem;
431
432 /* For __builtin_return_address, Get the return address from that
433 frame. */
434 #ifdef RETURN_ADDR_RTX
435 tem = RETURN_ADDR_RTX (count, tem);
436 #else
437 tem = memory_address (Pmode,
438 plus_constant (tem, GET_MODE_SIZE (Pmode)));
439 tem = gen_rtx_MEM (Pmode, tem);
440 set_mem_alias_set (tem, get_frame_alias_set ());
441 #endif
442 return tem;
443 }
444
445 /* Alias set used for setjmp buffer. */
446 static HOST_WIDE_INT setjmp_alias_set = -1;
447
448 /* Construct the leading half of a __builtin_setjmp call. Control will
449 return to RECEIVER_LABEL. This is used directly by sjlj exception
450 handling code. */
451
452 void
expand_builtin_setjmp_setup(buf_addr,receiver_label)453 expand_builtin_setjmp_setup (buf_addr, receiver_label)
454 rtx buf_addr;
455 rtx receiver_label;
456 {
457 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
458 rtx stack_save;
459 rtx mem;
460
461 if (setjmp_alias_set == -1)
462 setjmp_alias_set = new_alias_set ();
463
464 #ifdef POINTERS_EXTEND_UNSIGNED
465 if (GET_MODE (buf_addr) != Pmode)
466 buf_addr = convert_memory_address (Pmode, buf_addr);
467 #endif
468
469 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
470
471 emit_queue ();
472
473 /* We store the frame pointer and the address of receiver_label in
474 the buffer and use the rest of it for the stack save area, which
475 is machine-dependent. */
476
477 #ifndef BUILTIN_SETJMP_FRAME_VALUE
478 #define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
479 #endif
480
481 mem = gen_rtx_MEM (Pmode, buf_addr);
482 set_mem_alias_set (mem, setjmp_alias_set);
483 emit_move_insn (mem, BUILTIN_SETJMP_FRAME_VALUE);
484
485 mem = gen_rtx_MEM (Pmode, plus_constant (buf_addr, GET_MODE_SIZE (Pmode))),
486 set_mem_alias_set (mem, setjmp_alias_set);
487
488 emit_move_insn (validize_mem (mem),
489 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
490
491 stack_save = gen_rtx_MEM (sa_mode,
492 plus_constant (buf_addr,
493 2 * GET_MODE_SIZE (Pmode)));
494 set_mem_alias_set (stack_save, setjmp_alias_set);
495 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
496
497 /* If there is further processing to do, do it. */
498 #ifdef HAVE_builtin_setjmp_setup
499 if (HAVE_builtin_setjmp_setup)
500 emit_insn (gen_builtin_setjmp_setup (buf_addr));
501 #endif
502
503 /* Tell optimize_save_area_alloca that extra work is going to
504 need to go on during alloca. */
505 current_function_calls_setjmp = 1;
506
507 /* Set this so all the registers get saved in our frame; we need to be
508 able to copy the saved values for any registers from frames we unwind. */
509 current_function_has_nonlocal_label = 1;
510 }
511
512 /* Construct the trailing part of a __builtin_setjmp call.
513 This is used directly by sjlj exception handling code. */
514
515 void
expand_builtin_setjmp_receiver(receiver_label)516 expand_builtin_setjmp_receiver (receiver_label)
517 rtx receiver_label ATTRIBUTE_UNUSED;
518 {
519 /* Clobber the FP when we get here, so we have to make sure it's
520 marked as used by this function. */
521 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
522
523 /* Mark the static chain as clobbered here so life information
524 doesn't get messed up for it. */
525 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
526
527 /* Now put in the code to restore the frame pointer, and argument
528 pointer, if needed. The code below is from expand_end_bindings
529 in stmt.c; see detailed documentation there. */
530 #ifdef HAVE_nonlocal_goto
531 if (! HAVE_nonlocal_goto)
532 #endif
533 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
534
535 #if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
536 if (fixed_regs[ARG_POINTER_REGNUM])
537 {
538 #ifdef ELIMINABLE_REGS
539 size_t i;
540 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
541
542 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
543 if (elim_regs[i].from == ARG_POINTER_REGNUM
544 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
545 break;
546
547 if (i == ARRAY_SIZE (elim_regs))
548 #endif
549 {
550 /* Now restore our arg pointer from the address at which it
551 was saved in our stack frame. */
552 emit_move_insn (virtual_incoming_args_rtx,
553 copy_to_reg (get_arg_pointer_save_area (cfun)));
554 }
555 }
556 #endif
557
558 #ifdef HAVE_builtin_setjmp_receiver
559 if (HAVE_builtin_setjmp_receiver)
560 emit_insn (gen_builtin_setjmp_receiver (receiver_label));
561 else
562 #endif
563 #ifdef HAVE_nonlocal_goto_receiver
564 if (HAVE_nonlocal_goto_receiver)
565 emit_insn (gen_nonlocal_goto_receiver ());
566 else
567 #endif
568 { /* Nothing */ }
569
570 /* @@@ This is a kludge. Not all machine descriptions define a blockage
571 insn, but we must not allow the code we just generated to be reordered
572 by scheduling. Specifically, the update of the frame pointer must
573 happen immediately, not later. So emit an ASM_INPUT to act as blockage
574 insn. */
575 emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
576 }
577
578 /* __builtin_setjmp is passed a pointer to an array of five words (not
579 all will be used on all machines). It operates similarly to the C
580 library function of the same name, but is more efficient. Much of
581 the code below (and for longjmp) is copied from the handling of
582 non-local gotos.
583
584 NOTE: This is intended for use by GNAT and the exception handling
585 scheme in the compiler and will only work in the method used by
586 them. */
587
588 static rtx
expand_builtin_setjmp(arglist,target)589 expand_builtin_setjmp (arglist, target)
590 tree arglist;
591 rtx target;
592 {
593 rtx buf_addr, next_lab, cont_lab;
594
595 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
596 return NULL_RTX;
597
598 if (target == 0 || GET_CODE (target) != REG
599 || REGNO (target) < FIRST_PSEUDO_REGISTER)
600 target = gen_reg_rtx (TYPE_MODE (integer_type_node));
601
602 buf_addr = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
603
604 next_lab = gen_label_rtx ();
605 cont_lab = gen_label_rtx ();
606
607 expand_builtin_setjmp_setup (buf_addr, next_lab);
608
609 /* Set TARGET to zero and branch to the continue label. */
610 emit_move_insn (target, const0_rtx);
611 emit_jump_insn (gen_jump (cont_lab));
612 emit_barrier ();
613 emit_label (next_lab);
614
615 expand_builtin_setjmp_receiver (next_lab);
616
617 /* Set TARGET to one. */
618 emit_move_insn (target, const1_rtx);
619 emit_label (cont_lab);
620
621 /* Tell flow about the strange goings on. Putting `next_lab' on
622 `nonlocal_goto_handler_labels' to indicates that function
623 calls may traverse the arc back to this label. */
624
625 current_function_has_nonlocal_label = 1;
626 nonlocal_goto_handler_labels
627 = gen_rtx_EXPR_LIST (VOIDmode, next_lab, nonlocal_goto_handler_labels);
628
629 return target;
630 }
631
632 /* __builtin_longjmp is passed a pointer to an array of five words (not
633 all will be used on all machines). It operates similarly to the C
634 library function of the same name, but is more efficient. Much of
635 the code below is copied from the handling of non-local gotos.
636
637 NOTE: This is intended for use by GNAT and the exception handling
638 scheme in the compiler and will only work in the method used by
639 them. */
640
641 void
expand_builtin_longjmp(buf_addr,value)642 expand_builtin_longjmp (buf_addr, value)
643 rtx buf_addr, value;
644 {
645 rtx fp, lab, stack, insn, last;
646 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
647
648 if (setjmp_alias_set == -1)
649 setjmp_alias_set = new_alias_set ();
650
651 #ifdef POINTERS_EXTEND_UNSIGNED
652 if (GET_MODE (buf_addr) != Pmode)
653 buf_addr = convert_memory_address (Pmode, buf_addr);
654 #endif
655
656 buf_addr = force_reg (Pmode, buf_addr);
657
658 /* We used to store value in static_chain_rtx, but that fails if pointers
659 are smaller than integers. We instead require that the user must pass
660 a second argument of 1, because that is what builtin_setjmp will
661 return. This also makes EH slightly more efficient, since we are no
662 longer copying around a value that we don't care about. */
663 if (value != const1_rtx)
664 abort ();
665
666 current_function_calls_longjmp = 1;
667
668 last = get_last_insn ();
669 #ifdef HAVE_builtin_longjmp
670 if (HAVE_builtin_longjmp)
671 emit_insn (gen_builtin_longjmp (buf_addr));
672 else
673 #endif
674 {
675 fp = gen_rtx_MEM (Pmode, buf_addr);
676 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
677 GET_MODE_SIZE (Pmode)));
678
679 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
680 2 * GET_MODE_SIZE (Pmode)));
681 set_mem_alias_set (fp, setjmp_alias_set);
682 set_mem_alias_set (lab, setjmp_alias_set);
683 set_mem_alias_set (stack, setjmp_alias_set);
684
685 /* Pick up FP, label, and SP from the block and jump. This code is
686 from expand_goto in stmt.c; see there for detailed comments. */
687 #if HAVE_nonlocal_goto
688 if (HAVE_nonlocal_goto)
689 /* We have to pass a value to the nonlocal_goto pattern that will
690 get copied into the static_chain pointer, but it does not matter
691 what that value is, because builtin_setjmp does not use it. */
692 emit_insn (gen_nonlocal_goto (value, lab, stack, fp));
693 else
694 #endif
695 {
696 lab = copy_to_reg (lab);
697
698 emit_move_insn (hard_frame_pointer_rtx, fp);
699 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
700
701 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
702 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
703 emit_indirect_jump (lab);
704 }
705 }
706
707 /* Search backwards and mark the jump insn as a non-local goto.
708 Note that this precludes the use of __builtin_longjmp to a
709 __builtin_setjmp target in the same function. However, we've
710 already cautioned the user that these functions are for
711 internal exception handling use only. */
712 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
713 {
714 if (insn == last)
715 abort ();
716 if (GET_CODE (insn) == JUMP_INSN)
717 {
718 REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
719 REG_NOTES (insn));
720 break;
721 }
722 else if (GET_CODE (insn) == CALL_INSN)
723 break;
724 }
725 }
726
727 /* Expand a call to __builtin_prefetch. For a target that does not support
728 data prefetch, evaluate the memory address argument in case it has side
729 effects. */
730
731 static void
expand_builtin_prefetch(arglist)732 expand_builtin_prefetch (arglist)
733 tree arglist;
734 {
735 tree arg0, arg1, arg2;
736 rtx op0, op1, op2;
737
738 if (!validate_arglist (arglist, POINTER_TYPE, 0))
739 return;
740
741 arg0 = TREE_VALUE (arglist);
742 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
743 zero (read) and argument 2 (locality) defaults to 3 (high degree of
744 locality). */
745 if (TREE_CHAIN (arglist))
746 {
747 arg1 = TREE_VALUE (TREE_CHAIN (arglist));
748 if (TREE_CHAIN (TREE_CHAIN (arglist)))
749 arg2 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
750 else
751 arg2 = build_int_2 (3, 0);
752 }
753 else
754 {
755 arg1 = integer_zero_node;
756 arg2 = build_int_2 (3, 0);
757 }
758
759 /* Argument 0 is an address. */
760 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
761
762 /* Argument 1 (read/write flag) must be a compile-time constant int. */
763 if (TREE_CODE (arg1) != INTEGER_CST)
764 {
765 error ("second arg to `__builtin_prefetch' must be a constant");
766 arg1 = integer_zero_node;
767 }
768 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
769 /* Argument 1 must be either zero or one. */
770 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
771 {
772 warning ("invalid second arg to __builtin_prefetch; using zero");
773 op1 = const0_rtx;
774 }
775
776 /* Argument 2 (locality) must be a compile-time constant int. */
777 if (TREE_CODE (arg2) != INTEGER_CST)
778 {
779 error ("third arg to `__builtin_prefetch' must be a constant");
780 arg2 = integer_zero_node;
781 }
782 op2 = expand_expr (arg2, NULL_RTX, VOIDmode, 0);
783 /* Argument 2 must be 0, 1, 2, or 3. */
784 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
785 {
786 warning ("invalid third arg to __builtin_prefetch; using zero");
787 op2 = const0_rtx;
788 }
789
790 #ifdef HAVE_prefetch
791 if (HAVE_prefetch)
792 {
793 if ((! (*insn_data[(int) CODE_FOR_prefetch].operand[0].predicate)
794 (op0,
795 insn_data[(int) CODE_FOR_prefetch].operand[0].mode))
796 || (GET_MODE(op0) != Pmode))
797 {
798 #ifdef POINTERS_EXTEND_UNSIGNED
799 if (GET_MODE(op0) != Pmode)
800 op0 = convert_memory_address (Pmode, op0);
801 #endif
802 op0 = force_reg (Pmode, op0);
803 }
804 emit_insn (gen_prefetch (op0, op1, op2));
805 }
806 else
807 #endif
808 op0 = protect_from_queue (op0, 0);
809 /* Don't do anything with direct references to volatile memory, but
810 generate code to handle other side effects. */
811 if (GET_CODE (op0) != MEM && side_effects_p (op0))
812 emit_insn (op0);
813 }
814
815 /* Get a MEM rtx for expression EXP which is the address of an operand
816 to be used to be used in a string instruction (cmpstrsi, movstrsi, ..). */
817
818 static rtx
get_memory_rtx(exp)819 get_memory_rtx (exp)
820 tree exp;
821 {
822 rtx addr = expand_expr (exp, NULL_RTX, ptr_mode, EXPAND_SUM);
823 rtx mem;
824
825 #ifdef POINTERS_EXTEND_UNSIGNED
826 if (GET_MODE (addr) != Pmode)
827 addr = convert_memory_address (Pmode, addr);
828 #endif
829
830 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
831
832 /* Get an expression we can use to find the attributes to assign to MEM.
833 If it is an ADDR_EXPR, use the operand. Otherwise, dereference it if
834 we can. First remove any nops. */
835 while ((TREE_CODE (exp) == NOP_EXPR || TREE_CODE (exp) == CONVERT_EXPR
836 || TREE_CODE (exp) == NON_LVALUE_EXPR)
837 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
838 exp = TREE_OPERAND (exp, 0);
839
840 if (TREE_CODE (exp) == ADDR_EXPR)
841 {
842 exp = TREE_OPERAND (exp, 0);
843 set_mem_attributes (mem, exp, 0);
844 }
845 else if (POINTER_TYPE_P (TREE_TYPE (exp)))
846 {
847 exp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
848 /* memcpy, memset and other builtin stringops can alias with anything. */
849 set_mem_alias_set (mem, 0);
850 }
851
852 return mem;
853 }
854
855 /* Built-in functions to perform an untyped call and return. */
856
857 /* For each register that may be used for calling a function, this
858 gives a mode used to copy the register's value. VOIDmode indicates
859 the register is not used for calling a function. If the machine
860 has register windows, this gives only the outbound registers.
861 INCOMING_REGNO gives the corresponding inbound register. */
862 static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
863
864 /* For each register that may be used for returning values, this gives
865 a mode used to copy the register's value. VOIDmode indicates the
866 register is not used for returning values. If the machine has
867 register windows, this gives only the outbound registers.
868 INCOMING_REGNO gives the corresponding inbound register. */
869 static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
870
871 /* For each register that may be used for calling a function, this
872 gives the offset of that register into the block returned by
873 __builtin_apply_args. 0 indicates that the register is not
874 used for calling a function. */
875 static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
876
877 /* Return the offset of register REGNO into the block returned by
878 __builtin_apply_args. This is not declared static, since it is
879 needed in objc-act.c. */
880
881 int
apply_args_register_offset(regno)882 apply_args_register_offset (regno)
883 int regno;
884 {
885 apply_args_size ();
886
887 /* Arguments are always put in outgoing registers (in the argument
888 block) if such make sense. */
889 #ifdef OUTGOING_REGNO
890 regno = OUTGOING_REGNO (regno);
891 #endif
892 return apply_args_reg_offset[regno];
893 }
894
895 /* Return the size required for the block returned by __builtin_apply_args,
896 and initialize apply_args_mode. */
897
898 static int
apply_args_size()899 apply_args_size ()
900 {
901 static int size = -1;
902 int align;
903 unsigned int regno;
904 enum machine_mode mode;
905
906 /* The values computed by this function never change. */
907 if (size < 0)
908 {
909 /* The first value is the incoming arg-pointer. */
910 size = GET_MODE_SIZE (Pmode);
911
912 /* The second value is the structure value address unless this is
913 passed as an "invisible" first argument. */
914 if (struct_value_rtx)
915 size += GET_MODE_SIZE (Pmode);
916
917 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
918 if (FUNCTION_ARG_REGNO_P (regno))
919 {
920 /* Search for the proper mode for copying this register's
921 value. I'm not sure this is right, but it works so far. */
922 enum machine_mode best_mode = VOIDmode;
923
924 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
925 mode != VOIDmode;
926 mode = GET_MODE_WIDER_MODE (mode))
927 if (HARD_REGNO_MODE_OK (regno, mode)
928 && HARD_REGNO_NREGS (regno, mode) == 1)
929 best_mode = mode;
930
931 if (best_mode == VOIDmode)
932 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
933 mode != VOIDmode;
934 mode = GET_MODE_WIDER_MODE (mode))
935 if (HARD_REGNO_MODE_OK (regno, mode)
936 && have_insn_for (SET, mode))
937 best_mode = mode;
938
939 if (best_mode == VOIDmode)
940 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
941 mode != VOIDmode;
942 mode = GET_MODE_WIDER_MODE (mode))
943 if (HARD_REGNO_MODE_OK (regno, mode)
944 && have_insn_for (SET, mode))
945 best_mode = mode;
946
947 if (best_mode == VOIDmode)
948 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
949 mode != VOIDmode;
950 mode = GET_MODE_WIDER_MODE (mode))
951 if (HARD_REGNO_MODE_OK (regno, mode)
952 && have_insn_for (SET, mode))
953 best_mode = mode;
954
955 mode = best_mode;
956 if (mode == VOIDmode)
957 abort ();
958
959 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
960 if (size % align != 0)
961 size = CEIL (size, align) * align;
962 apply_args_reg_offset[regno] = size;
963 size += GET_MODE_SIZE (mode);
964 apply_args_mode[regno] = mode;
965 }
966 else
967 {
968 apply_args_mode[regno] = VOIDmode;
969 apply_args_reg_offset[regno] = 0;
970 }
971 }
972 return size;
973 }
974
975 /* Return the size required for the block returned by __builtin_apply,
976 and initialize apply_result_mode. */
977
978 static int
apply_result_size()979 apply_result_size ()
980 {
981 static int size = -1;
982 int align, regno;
983 enum machine_mode mode;
984
985 /* The values computed by this function never change. */
986 if (size < 0)
987 {
988 size = 0;
989
990 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
991 if (FUNCTION_VALUE_REGNO_P (regno))
992 {
993 /* Search for the proper mode for copying this register's
994 value. I'm not sure this is right, but it works so far. */
995 enum machine_mode best_mode = VOIDmode;
996
997 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
998 mode != TImode;
999 mode = GET_MODE_WIDER_MODE (mode))
1000 if (HARD_REGNO_MODE_OK (regno, mode))
1001 best_mode = mode;
1002
1003 if (best_mode == VOIDmode)
1004 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
1005 mode != VOIDmode;
1006 mode = GET_MODE_WIDER_MODE (mode))
1007 if (HARD_REGNO_MODE_OK (regno, mode)
1008 && have_insn_for (SET, mode))
1009 best_mode = mode;
1010
1011 if (best_mode == VOIDmode)
1012 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
1013 mode != VOIDmode;
1014 mode = GET_MODE_WIDER_MODE (mode))
1015 if (HARD_REGNO_MODE_OK (regno, mode)
1016 && have_insn_for (SET, mode))
1017 best_mode = mode;
1018
1019 if (best_mode == VOIDmode)
1020 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
1021 mode != VOIDmode;
1022 mode = GET_MODE_WIDER_MODE (mode))
1023 if (HARD_REGNO_MODE_OK (regno, mode)
1024 && have_insn_for (SET, mode))
1025 best_mode = mode;
1026
1027 mode = best_mode;
1028 if (mode == VOIDmode)
1029 abort ();
1030
1031 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1032 if (size % align != 0)
1033 size = CEIL (size, align) * align;
1034 size += GET_MODE_SIZE (mode);
1035 apply_result_mode[regno] = mode;
1036 }
1037 else
1038 apply_result_mode[regno] = VOIDmode;
1039
1040 /* Allow targets that use untyped_call and untyped_return to override
1041 the size so that machine-specific information can be stored here. */
1042 #ifdef APPLY_RESULT_SIZE
1043 size = APPLY_RESULT_SIZE;
1044 #endif
1045 }
1046 return size;
1047 }
1048
1049 #if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
1050 /* Create a vector describing the result block RESULT. If SAVEP is true,
1051 the result block is used to save the values; otherwise it is used to
1052 restore the values. */
1053
1054 static rtx
result_vector(savep,result)1055 result_vector (savep, result)
1056 int savep;
1057 rtx result;
1058 {
1059 int regno, size, align, nelts;
1060 enum machine_mode mode;
1061 rtx reg, mem;
1062 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
1063
1064 size = nelts = 0;
1065 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1066 if ((mode = apply_result_mode[regno]) != VOIDmode)
1067 {
1068 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1069 if (size % align != 0)
1070 size = CEIL (size, align) * align;
1071 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
1072 mem = adjust_address (result, mode, size);
1073 savevec[nelts++] = (savep
1074 ? gen_rtx_SET (VOIDmode, mem, reg)
1075 : gen_rtx_SET (VOIDmode, reg, mem));
1076 size += GET_MODE_SIZE (mode);
1077 }
1078 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
1079 }
1080 #endif /* HAVE_untyped_call or HAVE_untyped_return */
1081
1082 /* Save the state required to perform an untyped call with the same
1083 arguments as were passed to the current function. */
1084
1085 static rtx
expand_builtin_apply_args_1()1086 expand_builtin_apply_args_1 ()
1087 {
1088 rtx registers;
1089 int size, align, regno;
1090 enum machine_mode mode;
1091
1092 /* Create a block where the arg-pointer, structure value address,
1093 and argument registers can be saved. */
1094 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
1095
1096 /* Walk past the arg-pointer and structure value address. */
1097 size = GET_MODE_SIZE (Pmode);
1098 if (struct_value_rtx)
1099 size += GET_MODE_SIZE (Pmode);
1100
1101 /* Save each register used in calling a function to the block. */
1102 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1103 if ((mode = apply_args_mode[regno]) != VOIDmode)
1104 {
1105 rtx tem;
1106
1107 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1108 if (size % align != 0)
1109 size = CEIL (size, align) * align;
1110
1111 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1112
1113 emit_move_insn (adjust_address (registers, mode, size), tem);
1114 size += GET_MODE_SIZE (mode);
1115 }
1116
1117 /* Save the arg pointer to the block. */
1118 emit_move_insn (adjust_address (registers, Pmode, 0),
1119 copy_to_reg (virtual_incoming_args_rtx));
1120 size = GET_MODE_SIZE (Pmode);
1121
1122 /* Save the structure value address unless this is passed as an
1123 "invisible" first argument. */
1124 if (struct_value_incoming_rtx)
1125 {
1126 emit_move_insn (adjust_address (registers, Pmode, size),
1127 copy_to_reg (struct_value_incoming_rtx));
1128 size += GET_MODE_SIZE (Pmode);
1129 }
1130
1131 /* Return the address of the block. */
1132 return copy_addr_to_reg (XEXP (registers, 0));
1133 }
1134
1135 /* __builtin_apply_args returns block of memory allocated on
1136 the stack into which is stored the arg pointer, structure
1137 value address, static chain, and all the registers that might
1138 possibly be used in performing a function call. The code is
1139 moved to the start of the function so the incoming values are
1140 saved. */
1141
1142 static rtx
expand_builtin_apply_args()1143 expand_builtin_apply_args ()
1144 {
1145 /* Don't do __builtin_apply_args more than once in a function.
1146 Save the result of the first call and reuse it. */
1147 if (apply_args_value != 0)
1148 return apply_args_value;
1149 {
1150 /* When this function is called, it means that registers must be
1151 saved on entry to this function. So we migrate the
1152 call to the first insn of this function. */
1153 rtx temp;
1154 rtx seq;
1155
1156 start_sequence ();
1157 temp = expand_builtin_apply_args_1 ();
1158 seq = get_insns ();
1159 end_sequence ();
1160
1161 apply_args_value = temp;
1162
1163 /* Put the insns after the NOTE that starts the function.
1164 If this is inside a start_sequence, make the outer-level insn
1165 chain current, so the code is placed at the start of the
1166 function. */
1167 push_topmost_sequence ();
1168 emit_insn_before (seq, NEXT_INSN (get_insns ()));
1169 pop_topmost_sequence ();
1170 return temp;
1171 }
1172 }
1173
1174 /* Perform an untyped call and save the state required to perform an
1175 untyped return of whatever value was returned by the given function. */
1176
1177 static rtx
expand_builtin_apply(function,arguments,argsize)1178 expand_builtin_apply (function, arguments, argsize)
1179 rtx function, arguments, argsize;
1180 {
1181 int size, align, regno;
1182 enum machine_mode mode;
1183 rtx incoming_args, result, reg, dest, src, call_insn;
1184 rtx old_stack_level = 0;
1185 rtx call_fusage = 0;
1186
1187 #ifdef POINTERS_EXTEND_UNSIGNED
1188 if (GET_MODE (arguments) != Pmode)
1189 arguments = convert_memory_address (Pmode, arguments);
1190 #endif
1191
1192 /* Create a block where the return registers can be saved. */
1193 result = assign_stack_local (BLKmode, apply_result_size (), -1);
1194
1195 /* Fetch the arg pointer from the ARGUMENTS block. */
1196 incoming_args = gen_reg_rtx (Pmode);
1197 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
1198 #ifndef STACK_GROWS_DOWNWARD
1199 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
1200 incoming_args, 0, OPTAB_LIB_WIDEN);
1201 #endif
1202
1203 /* Perform postincrements before actually calling the function. */
1204 emit_queue ();
1205
1206 /* Push a new argument block and copy the arguments. Do not allow
1207 the (potential) memcpy call below to interfere with our stack
1208 manipulations. */
1209 do_pending_stack_adjust ();
1210 NO_DEFER_POP;
1211
1212 /* Save the stack with nonlocal if available */
1213 #ifdef HAVE_save_stack_nonlocal
1214 if (HAVE_save_stack_nonlocal)
1215 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
1216 else
1217 #endif
1218 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1219
1220 /* Push a block of memory onto the stack to store the memory arguments.
1221 Save the address in a register, and copy the memory arguments. ??? I
1222 haven't figured out how the calling convention macros effect this,
1223 but it's likely that the source and/or destination addresses in
1224 the block copy will need updating in machine specific ways. */
1225 dest = allocate_dynamic_stack_space (argsize, 0, BITS_PER_UNIT);
1226 dest = gen_rtx_MEM (BLKmode, dest);
1227 set_mem_align (dest, PARM_BOUNDARY);
1228 src = gen_rtx_MEM (BLKmode, incoming_args);
1229 set_mem_align (src, PARM_BOUNDARY);
1230 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
1231
1232 /* Refer to the argument block. */
1233 apply_args_size ();
1234 arguments = gen_rtx_MEM (BLKmode, arguments);
1235 set_mem_align (arguments, PARM_BOUNDARY);
1236
1237 /* Walk past the arg-pointer and structure value address. */
1238 size = GET_MODE_SIZE (Pmode);
1239 if (struct_value_rtx)
1240 size += GET_MODE_SIZE (Pmode);
1241
1242 /* Restore each of the registers previously saved. Make USE insns
1243 for each of these registers for use in making the call. */
1244 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1245 if ((mode = apply_args_mode[regno]) != VOIDmode)
1246 {
1247 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1248 if (size % align != 0)
1249 size = CEIL (size, align) * align;
1250 reg = gen_rtx_REG (mode, regno);
1251 emit_move_insn (reg, adjust_address (arguments, mode, size));
1252 use_reg (&call_fusage, reg);
1253 size += GET_MODE_SIZE (mode);
1254 }
1255
1256 /* Restore the structure value address unless this is passed as an
1257 "invisible" first argument. */
1258 size = GET_MODE_SIZE (Pmode);
1259 if (struct_value_rtx)
1260 {
1261 rtx value = gen_reg_rtx (Pmode);
1262 emit_move_insn (value, adjust_address (arguments, Pmode, size));
1263 emit_move_insn (struct_value_rtx, value);
1264 if (GET_CODE (struct_value_rtx) == REG)
1265 use_reg (&call_fusage, struct_value_rtx);
1266 size += GET_MODE_SIZE (Pmode);
1267 }
1268
1269 /* All arguments and registers used for the call are set up by now! */
1270 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0, 0);
1271
1272 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
1273 and we don't want to load it into a register as an optimization,
1274 because prepare_call_address already did it if it should be done. */
1275 if (GET_CODE (function) != SYMBOL_REF)
1276 function = memory_address (FUNCTION_MODE, function);
1277
1278 /* Generate the actual call instruction and save the return value. */
1279 #ifdef HAVE_untyped_call
1280 if (HAVE_untyped_call)
1281 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
1282 result, result_vector (1, result)));
1283 else
1284 #endif
1285 #ifdef HAVE_call_value
1286 if (HAVE_call_value)
1287 {
1288 rtx valreg = 0;
1289
1290 /* Locate the unique return register. It is not possible to
1291 express a call that sets more than one return register using
1292 call_value; use untyped_call for that. In fact, untyped_call
1293 only needs to save the return registers in the given block. */
1294 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1295 if ((mode = apply_result_mode[regno]) != VOIDmode)
1296 {
1297 if (valreg)
1298 abort (); /* HAVE_untyped_call required. */
1299 valreg = gen_rtx_REG (mode, regno);
1300 }
1301
1302 emit_call_insn (GEN_CALL_VALUE (valreg,
1303 gen_rtx_MEM (FUNCTION_MODE, function),
1304 const0_rtx, NULL_RTX, const0_rtx));
1305
1306 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
1307 }
1308 else
1309 #endif
1310 abort ();
1311
1312 /* Find the CALL insn we just emitted. */
1313 for (call_insn = get_last_insn ();
1314 call_insn && GET_CODE (call_insn) != CALL_INSN;
1315 call_insn = PREV_INSN (call_insn))
1316 ;
1317
1318 if (! call_insn)
1319 abort ();
1320
1321 /* Put the register usage information on the CALL. If there is already
1322 some usage information, put ours at the end. */
1323 if (CALL_INSN_FUNCTION_USAGE (call_insn))
1324 {
1325 rtx link;
1326
1327 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
1328 link = XEXP (link, 1))
1329 ;
1330
1331 XEXP (link, 1) = call_fusage;
1332 }
1333 else
1334 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
1335
1336 /* Restore the stack. */
1337 #ifdef HAVE_save_stack_nonlocal
1338 if (HAVE_save_stack_nonlocal)
1339 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
1340 else
1341 #endif
1342 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1343
1344 OK_DEFER_POP;
1345
1346 /* Return the address of the result block. */
1347 result = copy_addr_to_reg (XEXP (result, 0));
1348 #ifdef POINTERS_EXTEND_UNSIGNED
1349 if (GET_MODE (result) != ptr_mode)
1350 result = convert_memory_address (ptr_mode, result);
1351 #endif
1352 return result;
1353 }
1354
1355 /* Perform an untyped return. */
1356
1357 static void
expand_builtin_return(result)1358 expand_builtin_return (result)
1359 rtx result;
1360 {
1361 int size, align, regno;
1362 enum machine_mode mode;
1363 rtx reg;
1364 rtx call_fusage = 0;
1365
1366 #ifdef POINTERS_EXTEND_UNSIGNED
1367 if (GET_MODE (result) != Pmode)
1368 result = convert_memory_address (Pmode, result);
1369 #endif
1370
1371 apply_result_size ();
1372 result = gen_rtx_MEM (BLKmode, result);
1373
1374 #ifdef HAVE_untyped_return
1375 if (HAVE_untyped_return)
1376 {
1377 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
1378 emit_barrier ();
1379 return;
1380 }
1381 #endif
1382
1383 /* Restore the return value and note that each value is used. */
1384 size = 0;
1385 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
1386 if ((mode = apply_result_mode[regno]) != VOIDmode)
1387 {
1388 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
1389 if (size % align != 0)
1390 size = CEIL (size, align) * align;
1391 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
1392 emit_move_insn (reg, adjust_address (result, mode, size));
1393
1394 push_to_sequence (call_fusage);
1395 emit_insn (gen_rtx_USE (VOIDmode, reg));
1396 call_fusage = get_insns ();
1397 end_sequence ();
1398 size += GET_MODE_SIZE (mode);
1399 }
1400
1401 /* Put the USE insns before the return. */
1402 emit_insn (call_fusage);
1403
1404 /* Return whatever values was restored by jumping directly to the end
1405 of the function. */
1406 expand_null_return ();
1407 }
1408
1409 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
1410
1411 static enum type_class
type_to_class(type)1412 type_to_class (type)
1413 tree type;
1414 {
1415 switch (TREE_CODE (type))
1416 {
1417 case VOID_TYPE: return void_type_class;
1418 case INTEGER_TYPE: return integer_type_class;
1419 case CHAR_TYPE: return char_type_class;
1420 case ENUMERAL_TYPE: return enumeral_type_class;
1421 case BOOLEAN_TYPE: return boolean_type_class;
1422 case POINTER_TYPE: return pointer_type_class;
1423 case REFERENCE_TYPE: return reference_type_class;
1424 case OFFSET_TYPE: return offset_type_class;
1425 case REAL_TYPE: return real_type_class;
1426 case COMPLEX_TYPE: return complex_type_class;
1427 case FUNCTION_TYPE: return function_type_class;
1428 case METHOD_TYPE: return method_type_class;
1429 case RECORD_TYPE: return record_type_class;
1430 case UNION_TYPE:
1431 case QUAL_UNION_TYPE: return union_type_class;
1432 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
1433 ? string_type_class : array_type_class);
1434 case SET_TYPE: return set_type_class;
1435 case FILE_TYPE: return file_type_class;
1436 case LANG_TYPE: return lang_type_class;
1437 default: return no_type_class;
1438 }
1439 }
1440
1441 /* Expand a call to __builtin_classify_type with arguments found in
1442 ARGLIST. */
1443
1444 static rtx
expand_builtin_classify_type(arglist)1445 expand_builtin_classify_type (arglist)
1446 tree arglist;
1447 {
1448 if (arglist != 0)
1449 return GEN_INT (type_to_class (TREE_TYPE (TREE_VALUE (arglist))));
1450 return GEN_INT (no_type_class);
1451 }
1452
1453 /* Expand expression EXP, which is a call to __builtin_constant_p. */
1454
1455 static rtx
expand_builtin_constant_p(exp)1456 expand_builtin_constant_p (exp)
1457 tree exp;
1458 {
1459 tree arglist = TREE_OPERAND (exp, 1);
1460 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1461 rtx tmp;
1462
1463 if (arglist == 0)
1464 return const0_rtx;
1465 arglist = TREE_VALUE (arglist);
1466
1467 /* We have taken care of the easy cases during constant folding. This
1468 case is not obvious, so emit (constant_p_rtx (ARGLIST)) and let CSE get a
1469 chance to see if it can deduce whether ARGLIST is constant. */
1470
1471 tmp = expand_expr (arglist, NULL_RTX, VOIDmode, 0);
1472 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
1473 return tmp;
1474 }
1475
1476 /* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
1477 Return 0 if a normal call should be emitted rather than expanding the
1478 function in-line. EXP is the expression that is a call to the builtin
1479 function; if convenient, the result should be placed in TARGET.
1480 SUBTARGET may be used as the target for computing one of EXP's operands. */
1481
1482 static rtx
expand_builtin_mathfn(exp,target,subtarget)1483 expand_builtin_mathfn (exp, target, subtarget)
1484 tree exp;
1485 rtx target, subtarget;
1486 {
1487 optab builtin_optab;
1488 rtx op0, insns;
1489 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
1490 tree arglist = TREE_OPERAND (exp, 1);
1491 enum machine_mode argmode;
1492
1493 if (!validate_arglist (arglist, REAL_TYPE, VOID_TYPE))
1494 return 0;
1495
1496 /* Stabilize and compute the argument. */
1497 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
1498 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
1499 {
1500 exp = copy_node (exp);
1501 TREE_OPERAND (exp, 1) = arglist;
1502 /* Wrap the computation of the argument in a SAVE_EXPR. That
1503 way, if we need to expand the argument again (as in the
1504 flag_errno_math case below where we cannot directly set
1505 errno), we will not perform side-effects more than once.
1506 Note that here we're mutating the original EXP as well as the
1507 copy; that's the right thing to do in case the original EXP
1508 is expanded later. */
1509 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
1510 arglist = copy_node (arglist);
1511 }
1512 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
1513
1514 /* Make a suitable register to place result in. */
1515 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
1516
1517 emit_queue ();
1518 start_sequence ();
1519
1520 switch (DECL_FUNCTION_CODE (fndecl))
1521 {
1522 case BUILT_IN_SIN:
1523 case BUILT_IN_SINF:
1524 case BUILT_IN_SINL:
1525 builtin_optab = sin_optab; break;
1526 case BUILT_IN_COS:
1527 case BUILT_IN_COSF:
1528 case BUILT_IN_COSL:
1529 builtin_optab = cos_optab; break;
1530 case BUILT_IN_SQRT:
1531 case BUILT_IN_SQRTF:
1532 case BUILT_IN_SQRTL:
1533 builtin_optab = sqrt_optab; break;
1534 case BUILT_IN_EXP:
1535 case BUILT_IN_EXPF:
1536 case BUILT_IN_EXPL:
1537 builtin_optab = exp_optab; break;
1538 case BUILT_IN_LOG:
1539 case BUILT_IN_LOGF:
1540 case BUILT_IN_LOGL:
1541 builtin_optab = log_optab; break;
1542 default:
1543 abort ();
1544 }
1545
1546 /* Compute into TARGET.
1547 Set TARGET to wherever the result comes back. */
1548 argmode = TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist)));
1549 target = expand_unop (argmode, builtin_optab, op0, target, 0);
1550
1551 /* If we were unable to expand via the builtin, stop the
1552 sequence (without outputting the insns) and return 0, causing
1553 a call to the library function. */
1554 if (target == 0)
1555 {
1556 end_sequence ();
1557 return 0;
1558 }
1559
1560 /* If errno must be maintained, we must set it to EDOM for NaN results. */
1561
1562 if (flag_errno_math && HONOR_NANS (argmode))
1563 {
1564 rtx lab1;
1565
1566 lab1 = gen_label_rtx ();
1567
1568 /* Test the result; if it is NaN, set errno=EDOM because
1569 the argument was not in the domain. */
1570 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
1571 0, lab1);
1572
1573 #ifdef TARGET_EDOM
1574 {
1575 #ifdef GEN_ERRNO_RTX
1576 rtx errno_rtx = GEN_ERRNO_RTX;
1577 #else
1578 rtx errno_rtx
1579 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
1580 #endif
1581
1582 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
1583 }
1584 #else
1585 /* We can't set errno=EDOM directly; let the library call do it.
1586 Pop the arguments right away in case the call gets deleted. */
1587 NO_DEFER_POP;
1588 expand_call (exp, target, 0);
1589 OK_DEFER_POP;
1590 #endif
1591
1592 emit_label (lab1);
1593 }
1594
1595 /* Output the entire sequence. */
1596 insns = get_insns ();
1597 end_sequence ();
1598 emit_insn (insns);
1599
1600 return target;
1601 }
1602
1603 /* Expand expression EXP which is a call to the strlen builtin. Return 0
1604 if we failed the caller should emit a normal call, otherwise
1605 try to get the result in TARGET, if convenient. */
1606
1607 static rtx
expand_builtin_strlen(exp,target)1608 expand_builtin_strlen (exp, target)
1609 tree exp;
1610 rtx target;
1611 {
1612 tree arglist = TREE_OPERAND (exp, 1);
1613 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1614
1615 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
1616 return 0;
1617 else
1618 {
1619 rtx pat;
1620 tree src = TREE_VALUE (arglist);
1621
1622 int align
1623 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
1624
1625 rtx result, src_reg, char_rtx, before_strlen;
1626 enum machine_mode insn_mode = value_mode, char_mode;
1627 enum insn_code icode = CODE_FOR_nothing;
1628
1629 /* If SRC is not a pointer type, don't do this operation inline. */
1630 if (align == 0)
1631 return 0;
1632
1633 /* Bail out if we can't compute strlen in the right mode. */
1634 while (insn_mode != VOIDmode)
1635 {
1636 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
1637 if (icode != CODE_FOR_nothing)
1638 break;
1639
1640 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
1641 }
1642 if (insn_mode == VOIDmode)
1643 return 0;
1644
1645 /* Make a place to write the result of the instruction. */
1646 result = target;
1647 if (! (result != 0
1648 && GET_CODE (result) == REG
1649 && GET_MODE (result) == insn_mode
1650 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
1651 result = gen_reg_rtx (insn_mode);
1652
1653 /* Make a place to hold the source address. We will not expand
1654 the actual source until we are sure that the expansion will
1655 not fail -- there are trees that cannot be expanded twice. */
1656 src_reg = gen_reg_rtx (Pmode);
1657
1658 /* Mark the beginning of the strlen sequence so we can emit the
1659 source operand later. */
1660 before_strlen = get_last_insn ();
1661
1662 char_rtx = const0_rtx;
1663 char_mode = insn_data[(int) icode].operand[2].mode;
1664 if (! (*insn_data[(int) icode].operand[2].predicate) (char_rtx,
1665 char_mode))
1666 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
1667
1668 pat = GEN_FCN (icode) (result, gen_rtx_MEM (BLKmode, src_reg),
1669 char_rtx, GEN_INT (align));
1670 if (! pat)
1671 return 0;
1672 emit_insn (pat);
1673
1674 /* Now that we are assured of success, expand the source. */
1675 start_sequence ();
1676 pat = memory_address (BLKmode,
1677 expand_expr (src, src_reg, ptr_mode, EXPAND_SUM));
1678 if (pat != src_reg)
1679 emit_move_insn (src_reg, pat);
1680 pat = get_insns ();
1681 end_sequence ();
1682
1683 if (before_strlen)
1684 emit_insn_after (pat, before_strlen);
1685 else
1686 emit_insn_before (pat, get_insns ());
1687
1688 /* Return the value in the proper mode for this function. */
1689 if (GET_MODE (result) == value_mode)
1690 target = result;
1691 else if (target != 0)
1692 convert_move (target, result, 0);
1693 else
1694 target = convert_to_mode (value_mode, result, 0);
1695
1696 return target;
1697 }
1698 }
1699
1700 /* Expand a call to the strstr builtin. Return 0 if we failed the
1701 caller should emit a normal call, otherwise try to get the result
1702 in TARGET, if convenient (and in mode MODE if that's convenient). */
1703
1704 static rtx
expand_builtin_strstr(arglist,target,mode)1705 expand_builtin_strstr (arglist, target, mode)
1706 tree arglist;
1707 rtx target;
1708 enum machine_mode mode;
1709 {
1710 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1711 return 0;
1712 else
1713 {
1714 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1715 tree fn;
1716 const char *p1, *p2;
1717
1718 p2 = c_getstr (s2);
1719 if (p2 == NULL)
1720 return 0;
1721
1722 p1 = c_getstr (s1);
1723 if (p1 != NULL)
1724 {
1725 const char *r = strstr (p1, p2);
1726
1727 if (r == NULL)
1728 return const0_rtx;
1729
1730 /* Return an offset into the constant string argument. */
1731 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1732 s1, ssize_int (r - p1))),
1733 target, mode, EXPAND_NORMAL);
1734 }
1735
1736 if (p2[0] == '\0')
1737 return expand_expr (s1, target, mode, EXPAND_NORMAL);
1738
1739 if (p2[1] != '\0')
1740 return 0;
1741
1742 fn = built_in_decls[BUILT_IN_STRCHR];
1743 if (!fn)
1744 return 0;
1745
1746 /* New argument list transforming strstr(s1, s2) to
1747 strchr(s1, s2[0]). */
1748 arglist =
1749 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
1750 arglist = tree_cons (NULL_TREE, s1, arglist);
1751 return expand_expr (build_function_call_expr (fn, arglist),
1752 target, mode, EXPAND_NORMAL);
1753 }
1754 }
1755
1756 /* Expand a call to the strchr builtin. Return 0 if we failed the
1757 caller should emit a normal call, otherwise try to get the result
1758 in TARGET, if convenient (and in mode MODE if that's convenient). */
1759
1760 static rtx
expand_builtin_strchr(arglist,target,mode)1761 expand_builtin_strchr (arglist, target, mode)
1762 tree arglist;
1763 rtx target;
1764 enum machine_mode mode;
1765 {
1766 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1767 return 0;
1768 else
1769 {
1770 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1771 const char *p1;
1772
1773 if (TREE_CODE (s2) != INTEGER_CST)
1774 return 0;
1775
1776 p1 = c_getstr (s1);
1777 if (p1 != NULL)
1778 {
1779 char c;
1780 const char *r;
1781
1782 if (target_char_cast (s2, &c))
1783 return 0;
1784
1785 r = strchr (p1, c);
1786
1787 if (r == NULL)
1788 return const0_rtx;
1789
1790 /* Return an offset into the constant string argument. */
1791 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1792 s1, ssize_int (r - p1))),
1793 target, mode, EXPAND_NORMAL);
1794 }
1795
1796 /* FIXME: Should use here strchrM optab so that ports can optimize
1797 this. */
1798 return 0;
1799 }
1800 }
1801
1802 /* Expand a call to the strrchr builtin. Return 0 if we failed the
1803 caller should emit a normal call, otherwise try to get the result
1804 in TARGET, if convenient (and in mode MODE if that's convenient). */
1805
1806 static rtx
expand_builtin_strrchr(arglist,target,mode)1807 expand_builtin_strrchr (arglist, target, mode)
1808 tree arglist;
1809 rtx target;
1810 enum machine_mode mode;
1811 {
1812 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1813 return 0;
1814 else
1815 {
1816 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1817 tree fn;
1818 const char *p1;
1819
1820 if (TREE_CODE (s2) != INTEGER_CST)
1821 return 0;
1822
1823 p1 = c_getstr (s1);
1824 if (p1 != NULL)
1825 {
1826 char c;
1827 const char *r;
1828
1829 if (target_char_cast (s2, &c))
1830 return 0;
1831
1832 r = strrchr (p1, c);
1833
1834 if (r == NULL)
1835 return const0_rtx;
1836
1837 /* Return an offset into the constant string argument. */
1838 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1839 s1, ssize_int (r - p1))),
1840 target, mode, EXPAND_NORMAL);
1841 }
1842
1843 if (! integer_zerop (s2))
1844 return 0;
1845
1846 fn = built_in_decls[BUILT_IN_STRCHR];
1847 if (!fn)
1848 return 0;
1849
1850 /* Transform strrchr(s1, '\0') to strchr(s1, '\0'). */
1851 return expand_expr (build_function_call_expr (fn, arglist),
1852 target, mode, EXPAND_NORMAL);
1853 }
1854 }
1855
1856 /* Expand a call to the strpbrk builtin. Return 0 if we failed the
1857 caller should emit a normal call, otherwise try to get the result
1858 in TARGET, if convenient (and in mode MODE if that's convenient). */
1859
1860 static rtx
expand_builtin_strpbrk(arglist,target,mode)1861 expand_builtin_strpbrk (arglist, target, mode)
1862 tree arglist;
1863 rtx target;
1864 enum machine_mode mode;
1865 {
1866 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1867 return 0;
1868 else
1869 {
1870 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
1871 tree fn;
1872 const char *p1, *p2;
1873
1874 p2 = c_getstr (s2);
1875 if (p2 == NULL)
1876 return 0;
1877
1878 p1 = c_getstr (s1);
1879 if (p1 != NULL)
1880 {
1881 const char *r = strpbrk (p1, p2);
1882
1883 if (r == NULL)
1884 return const0_rtx;
1885
1886 /* Return an offset into the constant string argument. */
1887 return expand_expr (fold (build (PLUS_EXPR, TREE_TYPE (s1),
1888 s1, ssize_int (r - p1))),
1889 target, mode, EXPAND_NORMAL);
1890 }
1891
1892 if (p2[0] == '\0')
1893 {
1894 /* strpbrk(x, "") == NULL.
1895 Evaluate and ignore the arguments in case they had
1896 side-effects. */
1897 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
1898 return const0_rtx;
1899 }
1900
1901 if (p2[1] != '\0')
1902 return 0; /* Really call strpbrk. */
1903
1904 fn = built_in_decls[BUILT_IN_STRCHR];
1905 if (!fn)
1906 return 0;
1907
1908 /* New argument list transforming strpbrk(s1, s2) to
1909 strchr(s1, s2[0]). */
1910 arglist =
1911 build_tree_list (NULL_TREE, build_int_2 (p2[0], 0));
1912 arglist = tree_cons (NULL_TREE, s1, arglist);
1913 return expand_expr (build_function_call_expr (fn, arglist),
1914 target, mode, EXPAND_NORMAL);
1915 }
1916 }
1917
1918 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
1919 bytes from constant string DATA + OFFSET and return it as target
1920 constant. */
1921
1922 static rtx
builtin_memcpy_read_str(data,offset,mode)1923 builtin_memcpy_read_str (data, offset, mode)
1924 PTR data;
1925 HOST_WIDE_INT offset;
1926 enum machine_mode mode;
1927 {
1928 const char *str = (const char *) data;
1929
1930 if (offset < 0
1931 || ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
1932 > strlen (str) + 1))
1933 abort (); /* Attempt to read past the end of constant string. */
1934
1935 return c_readstr (str + offset, mode);
1936 }
1937
1938 /* Expand a call to the memcpy builtin, with arguments in ARGLIST.
1939 Return 0 if we failed, the caller should emit a normal call, otherwise
1940 try to get the result in TARGET, if convenient (and in mode MODE if
1941 that's convenient). */
1942
1943 static rtx
expand_builtin_memcpy(arglist,target,mode)1944 expand_builtin_memcpy (arglist, target, mode)
1945 tree arglist;
1946 rtx target;
1947 enum machine_mode mode;
1948 {
1949 if (!validate_arglist (arglist,
1950 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
1951 return 0;
1952 else
1953 {
1954 tree dest = TREE_VALUE (arglist);
1955 tree src = TREE_VALUE (TREE_CHAIN (arglist));
1956 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
1957 const char *src_str;
1958
1959 unsigned int src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
1960 unsigned int dest_align
1961 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
1962 rtx dest_mem, src_mem, dest_addr, len_rtx;
1963
1964 /* If DEST is not a pointer type, call the normal function. */
1965 if (dest_align == 0)
1966 return 0;
1967
1968 /* If the LEN parameter is zero, return DEST. */
1969 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
1970 {
1971 /* Evaluate and ignore SRC in case it has side-effects. */
1972 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
1973 return expand_expr (dest, target, mode, EXPAND_NORMAL);
1974 }
1975
1976 /* If either SRC is not a pointer type, don't do this
1977 operation in-line. */
1978 if (src_align == 0)
1979 return 0;
1980
1981 dest_mem = get_memory_rtx (dest);
1982 set_mem_align (dest_mem, dest_align);
1983 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
1984 src_str = c_getstr (src);
1985
1986 /* If SRC is a string constant and block move would be done
1987 by pieces, we can avoid loading the string from memory
1988 and only stored the computed constants. */
1989 if (src_str
1990 && GET_CODE (len_rtx) == CONST_INT
1991 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= strlen (src_str) + 1
1992 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
1993 (PTR) src_str, dest_align))
1994 {
1995 store_by_pieces (dest_mem, INTVAL (len_rtx),
1996 builtin_memcpy_read_str,
1997 (PTR) src_str, dest_align);
1998 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
1999 #ifdef POINTERS_EXTEND_UNSIGNED
2000 if (GET_MODE (dest_mem) != ptr_mode)
2001 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2002 #endif
2003 return dest_mem;
2004 }
2005
2006 src_mem = get_memory_rtx (src);
2007 set_mem_align (src_mem, src_align);
2008
2009 /* Copy word part most expediently. */
2010 dest_addr = emit_block_move (dest_mem, src_mem, len_rtx,
2011 BLOCK_OP_NORMAL);
2012
2013 if (dest_addr == 0)
2014 {
2015 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2016 #ifdef POINTERS_EXTEND_UNSIGNED
2017 if (GET_MODE (dest_addr) != ptr_mode)
2018 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2019 #endif
2020 }
2021
2022 return dest_addr;
2023 }
2024 }
2025
2026 /* Expand expression EXP, which is a call to the strcpy builtin. Return 0
2027 if we failed the caller should emit a normal call, otherwise try to get
2028 the result in TARGET, if convenient (and in mode MODE if that's
2029 convenient). */
2030
2031 static rtx
expand_builtin_strcpy(exp,target,mode)2032 expand_builtin_strcpy (exp, target, mode)
2033 tree exp;
2034 rtx target;
2035 enum machine_mode mode;
2036 {
2037 tree arglist = TREE_OPERAND (exp, 1);
2038 tree fn, len, src, dst;
2039
2040 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2041 return 0;
2042
2043 fn = built_in_decls[BUILT_IN_MEMCPY];
2044 if (!fn)
2045 return 0;
2046
2047 src = TREE_VALUE (TREE_CHAIN (arglist));
2048 len = c_strlen (src);
2049 if (len == 0 || TREE_SIDE_EFFECTS (len))
2050 return 0;
2051
2052 dst = TREE_VALUE (arglist);
2053 len = size_binop (PLUS_EXPR, len, ssize_int (1));
2054 arglist = build_tree_list (NULL_TREE, len);
2055 arglist = tree_cons (NULL_TREE, src, arglist);
2056 arglist = tree_cons (NULL_TREE, dst, arglist);
2057 return expand_expr (build_function_call_expr (fn, arglist),
2058 target, mode, EXPAND_NORMAL);
2059 }
2060
2061 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2062 bytes from constant string DATA + OFFSET and return it as target
2063 constant. */
2064
2065 static rtx
builtin_strncpy_read_str(data,offset,mode)2066 builtin_strncpy_read_str (data, offset, mode)
2067 PTR data;
2068 HOST_WIDE_INT offset;
2069 enum machine_mode mode;
2070 {
2071 const char *str = (const char *) data;
2072
2073 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
2074 return const0_rtx;
2075
2076 return c_readstr (str + offset, mode);
2077 }
2078
2079 /* Expand expression EXP, which is a call to the strncpy builtin. Return 0
2080 if we failed the caller should emit a normal call. */
2081
2082 static rtx
expand_builtin_strncpy(arglist,target,mode)2083 expand_builtin_strncpy (arglist, target, mode)
2084 tree arglist;
2085 rtx target;
2086 enum machine_mode mode;
2087 {
2088 if (!validate_arglist (arglist,
2089 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2090 return 0;
2091 else
2092 {
2093 tree slen = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
2094 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2095 tree fn;
2096
2097 /* We must be passed a constant len parameter. */
2098 if (TREE_CODE (len) != INTEGER_CST)
2099 return 0;
2100
2101 /* If the len parameter is zero, return the dst parameter. */
2102 if (integer_zerop (len))
2103 {
2104 /* Evaluate and ignore the src argument in case it has
2105 side-effects. */
2106 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
2107 VOIDmode, EXPAND_NORMAL);
2108 /* Return the dst parameter. */
2109 return expand_expr (TREE_VALUE (arglist), target, mode,
2110 EXPAND_NORMAL);
2111 }
2112
2113 /* Now, we must be passed a constant src ptr parameter. */
2114 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
2115 return 0;
2116
2117 slen = size_binop (PLUS_EXPR, slen, ssize_int (1));
2118
2119 /* We're required to pad with trailing zeros if the requested
2120 len is greater than strlen(s2)+1. In that case try to
2121 use store_by_pieces, if it fails, punt. */
2122 if (tree_int_cst_lt (slen, len))
2123 {
2124 tree dest = TREE_VALUE (arglist);
2125 unsigned int dest_align
2126 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2127 const char *p = c_getstr (TREE_VALUE (TREE_CHAIN (arglist)));
2128 rtx dest_mem;
2129
2130 if (!p || dest_align == 0 || !host_integerp (len, 1)
2131 || !can_store_by_pieces (tree_low_cst (len, 1),
2132 builtin_strncpy_read_str,
2133 (PTR) p, dest_align))
2134 return 0;
2135
2136 dest_mem = get_memory_rtx (dest);
2137 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2138 builtin_strncpy_read_str,
2139 (PTR) p, dest_align);
2140 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2141 #ifdef POINTERS_EXTEND_UNSIGNED
2142 if (GET_MODE (dest_mem) != ptr_mode)
2143 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2144 #endif
2145 return dest_mem;
2146 }
2147
2148 /* OK transform into builtin memcpy. */
2149 fn = built_in_decls[BUILT_IN_MEMCPY];
2150 if (!fn)
2151 return 0;
2152 return expand_expr (build_function_call_expr (fn, arglist),
2153 target, mode, EXPAND_NORMAL);
2154 }
2155 }
2156
2157 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
2158 bytes from constant string DATA + OFFSET and return it as target
2159 constant. */
2160
2161 static rtx
builtin_memset_read_str(data,offset,mode)2162 builtin_memset_read_str (data, offset, mode)
2163 PTR data;
2164 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2165 enum machine_mode mode;
2166 {
2167 const char *c = (const char *) data;
2168 char *p = alloca (GET_MODE_SIZE (mode));
2169
2170 memset (p, *c, GET_MODE_SIZE (mode));
2171
2172 return c_readstr (p, mode);
2173 }
2174
2175 /* Callback routine for store_by_pieces. Return the RTL of a register
2176 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
2177 char value given in the RTL register data. For example, if mode is
2178 4 bytes wide, return the RTL for 0x01010101*data. */
2179
2180 static rtx
builtin_memset_gen_str(data,offset,mode)2181 builtin_memset_gen_str (data, offset, mode)
2182 PTR data;
2183 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2184 enum machine_mode mode;
2185 {
2186 rtx target, coeff;
2187 size_t size;
2188 char *p;
2189
2190 size = GET_MODE_SIZE (mode);
2191 if (size == 1)
2192 return (rtx) data;
2193
2194 p = alloca (size);
2195 memset (p, 1, size);
2196 coeff = c_readstr (p, mode);
2197
2198 target = convert_to_mode (mode, (rtx) data, 1);
2199 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
2200 return force_reg (mode, target);
2201 }
2202
2203 /* Expand expression EXP, which is a call to the memset builtin. Return 0
2204 if we failed the caller should emit a normal call, otherwise try to get
2205 the result in TARGET, if convenient (and in mode MODE if that's
2206 convenient). */
2207
2208 static rtx
expand_builtin_memset(exp,target,mode)2209 expand_builtin_memset (exp, target, mode)
2210 tree exp;
2211 rtx target;
2212 enum machine_mode mode;
2213 {
2214 tree arglist = TREE_OPERAND (exp, 1);
2215
2216 if (!validate_arglist (arglist,
2217 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
2218 return 0;
2219 else
2220 {
2221 tree dest = TREE_VALUE (arglist);
2222 tree val = TREE_VALUE (TREE_CHAIN (arglist));
2223 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2224 char c;
2225
2226 unsigned int dest_align
2227 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT);
2228 rtx dest_mem, dest_addr, len_rtx;
2229
2230 /* If DEST is not a pointer type, don't do this
2231 operation in-line. */
2232 if (dest_align == 0)
2233 return 0;
2234
2235 /* If the LEN parameter is zero, return DEST. */
2236 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2237 {
2238 /* Evaluate and ignore VAL in case it has side-effects. */
2239 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
2240 return expand_expr (dest, target, mode, EXPAND_NORMAL);
2241 }
2242
2243 if (TREE_CODE (val) != INTEGER_CST)
2244 {
2245 rtx val_rtx;
2246
2247 if (!host_integerp (len, 1))
2248 return 0;
2249
2250 if (optimize_size && tree_low_cst (len, 1) > 1)
2251 return 0;
2252
2253 /* Assume that we can memset by pieces if we can store the
2254 * the coefficients by pieces (in the required modes).
2255 * We can't pass builtin_memset_gen_str as that emits RTL. */
2256 c = 1;
2257 if (!can_store_by_pieces (tree_low_cst (len, 1),
2258 builtin_memset_read_str,
2259 (PTR) &c, dest_align))
2260 return 0;
2261
2262 val = fold (build1 (CONVERT_EXPR, unsigned_char_type_node, val));
2263 val_rtx = expand_expr (val, NULL_RTX, VOIDmode, 0);
2264 val_rtx = force_reg (TYPE_MODE (unsigned_char_type_node),
2265 val_rtx);
2266 dest_mem = get_memory_rtx (dest);
2267 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2268 builtin_memset_gen_str,
2269 (PTR) val_rtx, dest_align);
2270 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2271 #ifdef POINTERS_EXTEND_UNSIGNED
2272 if (GET_MODE (dest_mem) != ptr_mode)
2273 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2274 #endif
2275 return dest_mem;
2276 }
2277
2278 if (target_char_cast (val, &c))
2279 return 0;
2280
2281 if (c)
2282 {
2283 if (!host_integerp (len, 1))
2284 return 0;
2285 if (!can_store_by_pieces (tree_low_cst (len, 1),
2286 builtin_memset_read_str, (PTR) &c,
2287 dest_align))
2288 return 0;
2289
2290 dest_mem = get_memory_rtx (dest);
2291 store_by_pieces (dest_mem, tree_low_cst (len, 1),
2292 builtin_memset_read_str,
2293 (PTR) &c, dest_align);
2294 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2295 #ifdef POINTERS_EXTEND_UNSIGNED
2296 if (GET_MODE (dest_mem) != ptr_mode)
2297 dest_mem = convert_memory_address (ptr_mode, dest_mem);
2298 #endif
2299 return dest_mem;
2300 }
2301
2302 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2303
2304 dest_mem = get_memory_rtx (dest);
2305 set_mem_align (dest_mem, dest_align);
2306 dest_addr = clear_storage (dest_mem, len_rtx);
2307
2308 if (dest_addr == 0)
2309 {
2310 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
2311 #ifdef POINTERS_EXTEND_UNSIGNED
2312 if (GET_MODE (dest_addr) != ptr_mode)
2313 dest_addr = convert_memory_address (ptr_mode, dest_addr);
2314 #endif
2315 }
2316
2317 return dest_addr;
2318 }
2319 }
2320
2321 /* Expand expression EXP, which is a call to the bzero builtin. Return 0
2322 if we failed the caller should emit a normal call. */
2323
2324 static rtx
expand_builtin_bzero(exp)2325 expand_builtin_bzero (exp)
2326 tree exp;
2327 {
2328 tree arglist = TREE_OPERAND (exp, 1);
2329 tree dest, size, newarglist;
2330 rtx result;
2331
2332 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2333 return NULL_RTX;
2334
2335 dest = TREE_VALUE (arglist);
2336 size = TREE_VALUE (TREE_CHAIN (arglist));
2337
2338 /* New argument list transforming bzero(ptr x, int y) to
2339 memset(ptr x, int 0, size_t y). This is done this way
2340 so that if it isn't expanded inline, we fallback to
2341 calling bzero instead of memset. */
2342
2343 newarglist = build_tree_list (NULL_TREE, convert (sizetype, size));
2344 newarglist = tree_cons (NULL_TREE, integer_zero_node, newarglist);
2345 newarglist = tree_cons (NULL_TREE, dest, newarglist);
2346
2347 TREE_OPERAND (exp, 1) = newarglist;
2348 result = expand_builtin_memset (exp, const0_rtx, VOIDmode);
2349
2350 /* Always restore the original arguments. */
2351 TREE_OPERAND (exp, 1) = arglist;
2352
2353 return result;
2354 }
2355
2356 /* Expand expression EXP, which is a call to the memcmp built-in function.
2357 ARGLIST is the argument list for this call. Return 0 if we failed and the
2358 caller should emit a normal call, otherwise try to get the result in
2359 TARGET, if convenient (and in mode MODE, if that's convenient). */
2360
2361 static rtx
expand_builtin_memcmp(exp,arglist,target,mode)2362 expand_builtin_memcmp (exp, arglist, target, mode)
2363 tree exp ATTRIBUTE_UNUSED;
2364 tree arglist;
2365 rtx target;
2366 enum machine_mode mode;
2367 {
2368 tree arg1, arg2, len;
2369 const char *p1, *p2;
2370
2371 if (!validate_arglist (arglist,
2372 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2373 return 0;
2374
2375 arg1 = TREE_VALUE (arglist);
2376 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2377 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2378
2379 /* If the len parameter is zero, return zero. */
2380 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 0)
2381 {
2382 /* Evaluate and ignore arg1 and arg2 in case they have
2383 side-effects. */
2384 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2385 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2386 return const0_rtx;
2387 }
2388
2389 p1 = c_getstr (arg1);
2390 p2 = c_getstr (arg2);
2391
2392 /* If all arguments are constant, and the value of len is not greater
2393 than the lengths of arg1 and arg2, evaluate at compile-time. */
2394 if (host_integerp (len, 1) && p1 && p2
2395 && compare_tree_int (len, strlen (p1) + 1) <= 0
2396 && compare_tree_int (len, strlen (p2) + 1) <= 0)
2397 {
2398 const int r = memcmp (p1, p2, tree_low_cst (len, 1));
2399
2400 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2401 }
2402
2403 /* If len parameter is one, return an expression corresponding to
2404 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2405 if (host_integerp (len, 1) && tree_low_cst (len, 1) == 1)
2406 {
2407 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2408 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2409 tree ind1 =
2410 fold (build1 (CONVERT_EXPR, integer_type_node,
2411 build1 (INDIRECT_REF, cst_uchar_node,
2412 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2413 tree ind2 =
2414 fold (build1 (CONVERT_EXPR, integer_type_node,
2415 build1 (INDIRECT_REF, cst_uchar_node,
2416 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2417 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2418 return expand_expr (result, target, mode, EXPAND_NORMAL);
2419 }
2420
2421 #if defined HAVE_cmpmemsi || defined HAVE_cmpstrsi
2422 {
2423 rtx arg1_rtx, arg2_rtx, arg3_rtx;
2424 rtx result;
2425 rtx insn;
2426
2427 int arg1_align
2428 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2429 int arg2_align
2430 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2431 enum machine_mode insn_mode;
2432
2433 #ifdef HAVE_cmpmemsi
2434 if (HAVE_cmpmemsi)
2435 insn_mode = insn_data[(int) CODE_FOR_cmpmemsi].operand[0].mode;
2436 else
2437 #endif
2438 #ifdef HAVE_cmpstrsi
2439 if (HAVE_cmpstrsi)
2440 insn_mode = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
2441 else
2442 #endif
2443 return 0;
2444
2445 /* If we don't have POINTER_TYPE, call the function. */
2446 if (arg1_align == 0 || arg2_align == 0)
2447 return 0;
2448
2449 /* Make a place to write the result of the instruction. */
2450 result = target;
2451 if (! (result != 0
2452 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
2453 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2454 result = gen_reg_rtx (insn_mode);
2455
2456 arg1_rtx = get_memory_rtx (arg1);
2457 arg2_rtx = get_memory_rtx (arg2);
2458 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2459 #ifdef HAVE_cmpmemsi
2460 if (HAVE_cmpmemsi)
2461 insn = gen_cmpmemsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
2462 GEN_INT (MIN (arg1_align, arg2_align)));
2463 else
2464 #endif
2465 #ifdef HAVE_cmpstrsi
2466 if (HAVE_cmpstrsi)
2467 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
2468 GEN_INT (MIN (arg1_align, arg2_align)));
2469 else
2470 #endif
2471 abort ();
2472
2473 if (insn)
2474 emit_insn (insn);
2475 else
2476 emit_library_call_value (memcmp_libfunc, result, LCT_PURE_MAKE_BLOCK,
2477 TYPE_MODE (integer_type_node), 3,
2478 XEXP (arg1_rtx, 0), Pmode,
2479 XEXP (arg2_rtx, 0), Pmode,
2480 convert_to_mode (TYPE_MODE (sizetype), arg3_rtx,
2481 TREE_UNSIGNED (sizetype)),
2482 TYPE_MODE (sizetype));
2483
2484 /* Return the value in the proper mode for this function. */
2485 mode = TYPE_MODE (TREE_TYPE (exp));
2486 if (GET_MODE (result) == mode)
2487 return result;
2488 else if (target != 0)
2489 {
2490 convert_move (target, result, 0);
2491 return target;
2492 }
2493 else
2494 return convert_to_mode (mode, result, 0);
2495 }
2496 #endif
2497
2498 return 0;
2499 }
2500
2501 /* Expand expression EXP, which is a call to the strcmp builtin. Return 0
2502 if we failed the caller should emit a normal call, otherwise try to get
2503 the result in TARGET, if convenient. */
2504
2505 static rtx
expand_builtin_strcmp(exp,target,mode)2506 expand_builtin_strcmp (exp, target, mode)
2507 tree exp;
2508 rtx target;
2509 enum machine_mode mode;
2510 {
2511 tree arglist = TREE_OPERAND (exp, 1);
2512 tree arg1, arg2;
2513 const char *p1, *p2;
2514
2515 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2516 return 0;
2517
2518 arg1 = TREE_VALUE (arglist);
2519 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2520
2521 p1 = c_getstr (arg1);
2522 p2 = c_getstr (arg2);
2523
2524 if (p1 && p2)
2525 {
2526 const int i = strcmp (p1, p2);
2527 return (i < 0 ? constm1_rtx : (i > 0 ? const1_rtx : const0_rtx));
2528 }
2529
2530 /* If either arg is "", return an expression corresponding to
2531 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
2532 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
2533 {
2534 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2535 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2536 tree ind1 =
2537 fold (build1 (CONVERT_EXPR, integer_type_node,
2538 build1 (INDIRECT_REF, cst_uchar_node,
2539 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2540 tree ind2 =
2541 fold (build1 (CONVERT_EXPR, integer_type_node,
2542 build1 (INDIRECT_REF, cst_uchar_node,
2543 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2544 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2545 return expand_expr (result, target, mode, EXPAND_NORMAL);
2546 }
2547
2548 #ifdef HAVE_cmpstrsi
2549 if (HAVE_cmpstrsi)
2550 {
2551 tree len, len1, len2;
2552 rtx arg1_rtx, arg2_rtx, arg3_rtx;
2553 rtx result, insn;
2554
2555 int arg1_align
2556 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2557 int arg2_align
2558 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2559 enum machine_mode insn_mode
2560 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
2561
2562 len1 = c_strlen (arg1);
2563 len2 = c_strlen (arg2);
2564
2565 if (len1)
2566 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
2567 if (len2)
2568 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
2569
2570 /* If we don't have a constant length for the first, use the length
2571 of the second, if we know it. We don't require a constant for
2572 this case; some cost analysis could be done if both are available
2573 but neither is constant. For now, assume they're equally cheap
2574 unless one has side effects. If both strings have constant lengths,
2575 use the smaller. */
2576
2577 if (!len1)
2578 len = len2;
2579 else if (!len2)
2580 len = len1;
2581 else if (TREE_SIDE_EFFECTS (len1))
2582 len = len2;
2583 else if (TREE_SIDE_EFFECTS (len2))
2584 len = len1;
2585 else if (TREE_CODE (len1) != INTEGER_CST)
2586 len = len2;
2587 else if (TREE_CODE (len2) != INTEGER_CST)
2588 len = len1;
2589 else if (tree_int_cst_lt (len1, len2))
2590 len = len1;
2591 else
2592 len = len2;
2593
2594 /* If both arguments have side effects, we cannot optimize. */
2595 if (!len || TREE_SIDE_EFFECTS (len))
2596 return 0;
2597
2598 /* If we don't have POINTER_TYPE, call the function. */
2599 if (arg1_align == 0 || arg2_align == 0)
2600 return 0;
2601
2602 /* Make a place to write the result of the instruction. */
2603 result = target;
2604 if (! (result != 0
2605 && GET_CODE (result) == REG
2606 && GET_MODE (result) == insn_mode
2607 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2608 result = gen_reg_rtx (insn_mode);
2609
2610 arg1_rtx = get_memory_rtx (arg1);
2611 arg2_rtx = get_memory_rtx (arg2);
2612 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2613 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
2614 GEN_INT (MIN (arg1_align, arg2_align)));
2615 if (!insn)
2616 return 0;
2617
2618 emit_insn (insn);
2619
2620 /* Return the value in the proper mode for this function. */
2621 mode = TYPE_MODE (TREE_TYPE (exp));
2622 if (GET_MODE (result) == mode)
2623 return result;
2624 if (target == 0)
2625 return convert_to_mode (mode, result, 0);
2626 convert_move (target, result, 0);
2627 return target;
2628 }
2629 #endif
2630 return 0;
2631 }
2632
2633 /* Expand expression EXP, which is a call to the strncmp builtin. Return 0
2634 if we failed the caller should emit a normal call, otherwise try to get
2635 the result in TARGET, if convenient. */
2636
2637 static rtx
expand_builtin_strncmp(exp,target,mode)2638 expand_builtin_strncmp (exp, target, mode)
2639 tree exp;
2640 rtx target;
2641 enum machine_mode mode;
2642 {
2643 tree arglist = TREE_OPERAND (exp, 1);
2644 tree arg1, arg2, arg3;
2645 const char *p1, *p2;
2646
2647 if (!validate_arglist (arglist,
2648 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2649 return 0;
2650
2651 arg1 = TREE_VALUE (arglist);
2652 arg2 = TREE_VALUE (TREE_CHAIN (arglist));
2653 arg3 = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2654
2655 /* If the len parameter is zero, return zero. */
2656 if (host_integerp (arg3, 1) && tree_low_cst (arg3, 1) == 0)
2657 {
2658 /* Evaluate and ignore arg1 and arg2 in case they have
2659 side-effects. */
2660 expand_expr (arg1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2661 expand_expr (arg2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2662 return const0_rtx;
2663 }
2664
2665 p1 = c_getstr (arg1);
2666 p2 = c_getstr (arg2);
2667
2668 /* If all arguments are constant, evaluate at compile-time. */
2669 if (host_integerp (arg3, 1) && p1 && p2)
2670 {
2671 const int r = strncmp (p1, p2, tree_low_cst (arg3, 1));
2672 return (r < 0 ? constm1_rtx : (r > 0 ? const1_rtx : const0_rtx));
2673 }
2674
2675 /* If len == 1 or (either string parameter is "" and (len >= 1)),
2676 return (*(const u_char*)arg1 - *(const u_char*)arg2). */
2677 if (host_integerp (arg3, 1)
2678 && (tree_low_cst (arg3, 1) == 1
2679 || (tree_low_cst (arg3, 1) > 1
2680 && ((p1 && *p1 == '\0') || (p2 && *p2 == '\0')))))
2681 {
2682 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2683 tree cst_uchar_ptr_node = build_pointer_type (cst_uchar_node);
2684 tree ind1 =
2685 fold (build1 (CONVERT_EXPR, integer_type_node,
2686 build1 (INDIRECT_REF, cst_uchar_node,
2687 build1 (NOP_EXPR, cst_uchar_ptr_node, arg1))));
2688 tree ind2 =
2689 fold (build1 (CONVERT_EXPR, integer_type_node,
2690 build1 (INDIRECT_REF, cst_uchar_node,
2691 build1 (NOP_EXPR, cst_uchar_ptr_node, arg2))));
2692 tree result = fold (build (MINUS_EXPR, integer_type_node, ind1, ind2));
2693 return expand_expr (result, target, mode, EXPAND_NORMAL);
2694 }
2695
2696 /* If c_strlen can determine an expression for one of the string
2697 lengths, and it doesn't have side effects, then emit cmpstrsi
2698 using length MIN(strlen(string)+1, arg3). */
2699 #ifdef HAVE_cmpstrsi
2700 if (HAVE_cmpstrsi)
2701 {
2702 tree len, len1, len2;
2703 rtx arg1_rtx, arg2_rtx, arg3_rtx;
2704 rtx result, insn;
2705
2706 int arg1_align
2707 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2708 int arg2_align
2709 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
2710 enum machine_mode insn_mode
2711 = insn_data[(int) CODE_FOR_cmpstrsi].operand[0].mode;
2712
2713 len1 = c_strlen (arg1);
2714 len2 = c_strlen (arg2);
2715
2716 if (len1)
2717 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
2718 if (len2)
2719 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
2720
2721 /* If we don't have a constant length for the first, use the length
2722 of the second, if we know it. We don't require a constant for
2723 this case; some cost analysis could be done if both are available
2724 but neither is constant. For now, assume they're equally cheap,
2725 unless one has side effects. If both strings have constant lengths,
2726 use the smaller. */
2727
2728 if (!len1)
2729 len = len2;
2730 else if (!len2)
2731 len = len1;
2732 else if (TREE_SIDE_EFFECTS (len1))
2733 len = len2;
2734 else if (TREE_SIDE_EFFECTS (len2))
2735 len = len1;
2736 else if (TREE_CODE (len1) != INTEGER_CST)
2737 len = len2;
2738 else if (TREE_CODE (len2) != INTEGER_CST)
2739 len = len1;
2740 else if (tree_int_cst_lt (len1, len2))
2741 len = len1;
2742 else
2743 len = len2;
2744
2745 /* If both arguments have side effects, we cannot optimize. */
2746 if (!len || TREE_SIDE_EFFECTS (len))
2747 return 0;
2748
2749 /* The actual new length parameter is MIN(len,arg3). */
2750 len = fold (build (MIN_EXPR, TREE_TYPE (len), len, arg3));
2751
2752 /* If we don't have POINTER_TYPE, call the function. */
2753 if (arg1_align == 0 || arg2_align == 0)
2754 return 0;
2755
2756 /* Make a place to write the result of the instruction. */
2757 result = target;
2758 if (! (result != 0
2759 && GET_CODE (result) == REG
2760 && GET_MODE (result) == insn_mode
2761 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
2762 result = gen_reg_rtx (insn_mode);
2763
2764 arg1_rtx = get_memory_rtx (arg1);
2765 arg2_rtx = get_memory_rtx (arg2);
2766 arg3_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
2767 insn = gen_cmpstrsi (result, arg1_rtx, arg2_rtx, arg3_rtx,
2768 GEN_INT (MIN (arg1_align, arg2_align)));
2769 if (!insn)
2770 return 0;
2771
2772 emit_insn (insn);
2773
2774 /* Return the value in the proper mode for this function. */
2775 mode = TYPE_MODE (TREE_TYPE (exp));
2776 if (GET_MODE (result) == mode)
2777 return result;
2778 if (target == 0)
2779 return convert_to_mode (mode, result, 0);
2780 convert_move (target, result, 0);
2781 return target;
2782 }
2783 #endif
2784 return 0;
2785 }
2786
2787 /* Expand expression EXP, which is a call to the strcat builtin.
2788 Return 0 if we failed the caller should emit a normal call,
2789 otherwise try to get the result in TARGET, if convenient. */
2790
2791 static rtx
expand_builtin_strcat(arglist,target,mode)2792 expand_builtin_strcat (arglist, target, mode)
2793 tree arglist;
2794 rtx target;
2795 enum machine_mode mode;
2796 {
2797 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2798 return 0;
2799 else
2800 {
2801 tree dst = TREE_VALUE (arglist),
2802 src = TREE_VALUE (TREE_CHAIN (arglist));
2803 const char *p = c_getstr (src);
2804
2805 /* If the string length is zero, return the dst parameter. */
2806 if (p && *p == '\0')
2807 return expand_expr (dst, target, mode, EXPAND_NORMAL);
2808
2809 return 0;
2810 }
2811 }
2812
2813 /* Expand expression EXP, which is a call to the strncat builtin.
2814 Return 0 if we failed the caller should emit a normal call,
2815 otherwise try to get the result in TARGET, if convenient. */
2816
2817 static rtx
expand_builtin_strncat(arglist,target,mode)2818 expand_builtin_strncat (arglist, target, mode)
2819 tree arglist;
2820 rtx target;
2821 enum machine_mode mode;
2822 {
2823 if (!validate_arglist (arglist,
2824 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
2825 return 0;
2826 else
2827 {
2828 tree dst = TREE_VALUE (arglist),
2829 src = TREE_VALUE (TREE_CHAIN (arglist)),
2830 len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
2831 const char *p = c_getstr (src);
2832
2833 /* If the requested length is zero, or the src parameter string
2834 length is zero, return the dst parameter. */
2835 if (integer_zerop (len) || (p && *p == '\0'))
2836 {
2837 /* Evaluate and ignore the src and len parameters in case
2838 they have side-effects. */
2839 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
2840 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
2841 return expand_expr (dst, target, mode, EXPAND_NORMAL);
2842 }
2843
2844 /* If the requested len is greater than or equal to the string
2845 length, call strcat. */
2846 if (TREE_CODE (len) == INTEGER_CST && p
2847 && compare_tree_int (len, strlen (p)) >= 0)
2848 {
2849 tree newarglist
2850 = tree_cons (NULL_TREE, dst, build_tree_list (NULL_TREE, src));
2851 tree fn = built_in_decls[BUILT_IN_STRCAT];
2852
2853 /* If the replacement _DECL isn't initialized, don't do the
2854 transformation. */
2855 if (!fn)
2856 return 0;
2857
2858 return expand_expr (build_function_call_expr (fn, newarglist),
2859 target, mode, EXPAND_NORMAL);
2860 }
2861 return 0;
2862 }
2863 }
2864
2865 /* Expand expression EXP, which is a call to the strspn builtin.
2866 Return 0 if we failed the caller should emit a normal call,
2867 otherwise try to get the result in TARGET, if convenient. */
2868
2869 static rtx
expand_builtin_strspn(arglist,target,mode)2870 expand_builtin_strspn (arglist, target, mode)
2871 tree arglist;
2872 rtx target;
2873 enum machine_mode mode;
2874 {
2875 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2876 return 0;
2877 else
2878 {
2879 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2880 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
2881
2882 /* If both arguments are constants, evaluate at compile-time. */
2883 if (p1 && p2)
2884 {
2885 const size_t r = strspn (p1, p2);
2886 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
2887 }
2888
2889 /* If either argument is "", return 0. */
2890 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
2891 {
2892 /* Evaluate and ignore both arguments in case either one has
2893 side-effects. */
2894 expand_expr (s1, const0_rtx, VOIDmode, EXPAND_NORMAL);
2895 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2896 return const0_rtx;
2897 }
2898 return 0;
2899 }
2900 }
2901
2902 /* Expand expression EXP, which is a call to the strcspn builtin.
2903 Return 0 if we failed the caller should emit a normal call,
2904 otherwise try to get the result in TARGET, if convenient. */
2905
2906 static rtx
expand_builtin_strcspn(arglist,target,mode)2907 expand_builtin_strcspn (arglist, target, mode)
2908 tree arglist;
2909 rtx target;
2910 enum machine_mode mode;
2911 {
2912 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
2913 return 0;
2914 else
2915 {
2916 tree s1 = TREE_VALUE (arglist), s2 = TREE_VALUE (TREE_CHAIN (arglist));
2917 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
2918
2919 /* If both arguments are constants, evaluate at compile-time. */
2920 if (p1 && p2)
2921 {
2922 const size_t r = strcspn (p1, p2);
2923 return expand_expr (size_int (r), target, mode, EXPAND_NORMAL);
2924 }
2925
2926 /* If the first argument is "", return 0. */
2927 if (p1 && *p1 == '\0')
2928 {
2929 /* Evaluate and ignore argument s2 in case it has
2930 side-effects. */
2931 expand_expr (s2, const0_rtx, VOIDmode, EXPAND_NORMAL);
2932 return const0_rtx;
2933 }
2934
2935 /* If the second argument is "", return __builtin_strlen(s1). */
2936 if (p2 && *p2 == '\0')
2937 {
2938 tree newarglist = build_tree_list (NULL_TREE, s1),
2939 fn = built_in_decls[BUILT_IN_STRLEN];
2940
2941 /* If the replacement _DECL isn't initialized, don't do the
2942 transformation. */
2943 if (!fn)
2944 return 0;
2945
2946 return expand_expr (build_function_call_expr (fn, newarglist),
2947 target, mode, EXPAND_NORMAL);
2948 }
2949 return 0;
2950 }
2951 }
2952
2953 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
2954 if that's convenient. */
2955
2956 rtx
expand_builtin_saveregs()2957 expand_builtin_saveregs ()
2958 {
2959 rtx val, seq;
2960
2961 /* Don't do __builtin_saveregs more than once in a function.
2962 Save the result of the first call and reuse it. */
2963 if (saveregs_value != 0)
2964 return saveregs_value;
2965
2966 /* When this function is called, it means that registers must be
2967 saved on entry to this function. So we migrate the call to the
2968 first insn of this function. */
2969
2970 start_sequence ();
2971
2972 #ifdef EXPAND_BUILTIN_SAVEREGS
2973 /* Do whatever the machine needs done in this case. */
2974 val = EXPAND_BUILTIN_SAVEREGS ();
2975 #else
2976 /* ??? We used to try and build up a call to the out of line function,
2977 guessing about what registers needed saving etc. This became much
2978 harder with __builtin_va_start, since we don't have a tree for a
2979 call to __builtin_saveregs to fall back on. There was exactly one
2980 port (i860) that used this code, and I'm unconvinced it could actually
2981 handle the general case. So we no longer try to handle anything
2982 weird and make the backend absorb the evil. */
2983
2984 error ("__builtin_saveregs not supported by this target");
2985 val = const0_rtx;
2986 #endif
2987
2988 seq = get_insns ();
2989 end_sequence ();
2990
2991 saveregs_value = val;
2992
2993 /* Put the insns after the NOTE that starts the function. If this
2994 is inside a start_sequence, make the outer-level insn chain current, so
2995 the code is placed at the start of the function. */
2996 push_topmost_sequence ();
2997 emit_insn_after (seq, get_insns ());
2998 pop_topmost_sequence ();
2999
3000 return val;
3001 }
3002
3003 /* __builtin_args_info (N) returns word N of the arg space info
3004 for the current function. The number and meanings of words
3005 is controlled by the definition of CUMULATIVE_ARGS. */
3006
3007 static rtx
expand_builtin_args_info(exp)3008 expand_builtin_args_info (exp)
3009 tree exp;
3010 {
3011 tree arglist = TREE_OPERAND (exp, 1);
3012 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
3013 int *word_ptr = (int *) ¤t_function_args_info;
3014 #if 0
3015 /* These are used by the code below that is if 0'ed away */
3016 int i;
3017 tree type, elts, result;
3018 #endif
3019
3020 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
3021 abort ();
3022
3023 if (arglist != 0)
3024 {
3025 if (!host_integerp (TREE_VALUE (arglist), 0))
3026 error ("argument of `__builtin_args_info' must be constant");
3027 else
3028 {
3029 HOST_WIDE_INT wordnum = tree_low_cst (TREE_VALUE (arglist), 0);
3030
3031 if (wordnum < 0 || wordnum >= nwords)
3032 error ("argument of `__builtin_args_info' out of range");
3033 else
3034 return GEN_INT (word_ptr[wordnum]);
3035 }
3036 }
3037 else
3038 error ("missing argument in `__builtin_args_info'");
3039
3040 return const0_rtx;
3041
3042 #if 0
3043 for (i = 0; i < nwords; i++)
3044 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
3045
3046 type = build_array_type (integer_type_node,
3047 build_index_type (build_int_2 (nwords, 0)));
3048 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
3049 TREE_CONSTANT (result) = 1;
3050 TREE_STATIC (result) = 1;
3051 result = build1 (INDIRECT_REF, build_pointer_type (type), result);
3052 TREE_CONSTANT (result) = 1;
3053 return expand_expr (result, NULL_RTX, VOIDmode, 0);
3054 #endif
3055 }
3056
3057 /* Expand ARGLIST, from a call to __builtin_next_arg. */
3058
3059 static rtx
expand_builtin_next_arg(arglist)3060 expand_builtin_next_arg (arglist)
3061 tree arglist;
3062 {
3063 tree fntype = TREE_TYPE (current_function_decl);
3064
3065 if (TYPE_ARG_TYPES (fntype) == 0
3066 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
3067 == void_type_node))
3068 {
3069 error ("`va_start' used in function with fixed args");
3070 return const0_rtx;
3071 }
3072
3073 if (arglist)
3074 {
3075 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
3076 tree arg = TREE_VALUE (arglist);
3077
3078 /* Strip off all nops for the sake of the comparison. This
3079 is not quite the same as STRIP_NOPS. It does more.
3080 We must also strip off INDIRECT_EXPR for C++ reference
3081 parameters. */
3082 while (TREE_CODE (arg) == NOP_EXPR
3083 || TREE_CODE (arg) == CONVERT_EXPR
3084 || TREE_CODE (arg) == NON_LVALUE_EXPR
3085 || TREE_CODE (arg) == INDIRECT_REF)
3086 arg = TREE_OPERAND (arg, 0);
3087 if (arg != last_parm)
3088 warning ("second parameter of `va_start' not last named argument");
3089 }
3090 else
3091 /* Evidently an out of date version of <stdarg.h>; can't validate
3092 va_start's second argument, but can still work as intended. */
3093 warning ("`__builtin_next_arg' called without an argument");
3094
3095 return expand_binop (Pmode, add_optab,
3096 current_function_internal_arg_pointer,
3097 current_function_arg_offset_rtx,
3098 NULL_RTX, 0, OPTAB_LIB_WIDEN);
3099 }
3100
3101 /* Make it easier for the backends by protecting the valist argument
3102 from multiple evaluations. */
3103
3104 static tree
stabilize_va_list(valist,needs_lvalue)3105 stabilize_va_list (valist, needs_lvalue)
3106 tree valist;
3107 int needs_lvalue;
3108 {
3109 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
3110 {
3111 if (TREE_SIDE_EFFECTS (valist))
3112 valist = save_expr (valist);
3113
3114 /* For this case, the backends will be expecting a pointer to
3115 TREE_TYPE (va_list_type_node), but it's possible we've
3116 actually been given an array (an actual va_list_type_node).
3117 So fix it. */
3118 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
3119 {
3120 tree p1 = build_pointer_type (TREE_TYPE (va_list_type_node));
3121 tree p2 = build_pointer_type (va_list_type_node);
3122
3123 valist = build1 (ADDR_EXPR, p2, valist);
3124 valist = fold (build1 (NOP_EXPR, p1, valist));
3125 }
3126 }
3127 else
3128 {
3129 tree pt;
3130
3131 if (! needs_lvalue)
3132 {
3133 if (! TREE_SIDE_EFFECTS (valist))
3134 return valist;
3135
3136 pt = build_pointer_type (va_list_type_node);
3137 valist = fold (build1 (ADDR_EXPR, pt, valist));
3138 TREE_SIDE_EFFECTS (valist) = 1;
3139 }
3140
3141 if (TREE_SIDE_EFFECTS (valist))
3142 valist = save_expr (valist);
3143 valist = fold (build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)),
3144 valist));
3145 }
3146
3147 return valist;
3148 }
3149
3150 /* The "standard" implementation of va_start: just assign `nextarg' to
3151 the variable. */
3152
3153 void
std_expand_builtin_va_start(valist,nextarg)3154 std_expand_builtin_va_start (valist, nextarg)
3155 tree valist;
3156 rtx nextarg;
3157 {
3158 tree t;
3159
3160 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3161 make_tree (ptr_type_node, nextarg));
3162 TREE_SIDE_EFFECTS (t) = 1;
3163
3164 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3165 }
3166
3167 /* Expand ARGLIST, from a call to __builtin_va_start. */
3168
3169 static rtx
expand_builtin_va_start(arglist)3170 expand_builtin_va_start (arglist)
3171 tree arglist;
3172 {
3173 rtx nextarg;
3174 tree chain, valist;
3175
3176 chain = TREE_CHAIN (arglist);
3177
3178 if (TREE_CHAIN (chain))
3179 error ("too many arguments to function `va_start'");
3180
3181 nextarg = expand_builtin_next_arg (chain);
3182 valist = stabilize_va_list (TREE_VALUE (arglist), 1);
3183
3184 #ifdef EXPAND_BUILTIN_VA_START
3185 EXPAND_BUILTIN_VA_START (valist, nextarg);
3186 #else
3187 std_expand_builtin_va_start (valist, nextarg);
3188 #endif
3189
3190 return const0_rtx;
3191 }
3192
3193 /* The "standard" implementation of va_arg: read the value from the
3194 current (padded) address and increment by the (padded) size. */
3195
3196 rtx
std_expand_builtin_va_arg(valist,type)3197 std_expand_builtin_va_arg (valist, type)
3198 tree valist, type;
3199 {
3200 tree addr_tree, t, type_size = NULL;
3201 tree align, alignm1;
3202 tree rounded_size;
3203 rtx addr;
3204
3205 /* Compute the rounded size of the type. */
3206 align = size_int (PARM_BOUNDARY / BITS_PER_UNIT);
3207 alignm1 = size_int (PARM_BOUNDARY / BITS_PER_UNIT - 1);
3208 if (type == error_mark_node
3209 || (type_size = TYPE_SIZE_UNIT (TYPE_MAIN_VARIANT (type))) == NULL
3210 || TREE_OVERFLOW (type_size))
3211 rounded_size = size_zero_node;
3212 else
3213 rounded_size = fold (build (MULT_EXPR, sizetype,
3214 fold (build (TRUNC_DIV_EXPR, sizetype,
3215 fold (build (PLUS_EXPR, sizetype,
3216 type_size, alignm1)),
3217 align)),
3218 align));
3219
3220 /* Get AP. */
3221 addr_tree = valist;
3222 if (PAD_VARARGS_DOWN && ! integer_zerop (rounded_size))
3223 {
3224 /* Small args are padded downward. */
3225 addr_tree = fold (build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
3226 fold (build (COND_EXPR, sizetype,
3227 fold (build (GT_EXPR, sizetype,
3228 rounded_size,
3229 align)),
3230 size_zero_node,
3231 fold (build (MINUS_EXPR, sizetype,
3232 rounded_size,
3233 type_size))))));
3234 }
3235
3236 addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
3237 addr = copy_to_reg (addr);
3238
3239 /* Compute new value for AP. */
3240 if (! integer_zerop (rounded_size))
3241 {
3242 t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
3243 build (PLUS_EXPR, TREE_TYPE (valist), valist,
3244 rounded_size));
3245 TREE_SIDE_EFFECTS (t) = 1;
3246 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3247 }
3248
3249 return addr;
3250 }
3251
3252 /* Expand __builtin_va_arg, which is not really a builtin function, but
3253 a very special sort of operator. */
3254
3255 rtx
expand_builtin_va_arg(valist,type)3256 expand_builtin_va_arg (valist, type)
3257 tree valist, type;
3258 {
3259 rtx addr, result;
3260 tree promoted_type, want_va_type, have_va_type;
3261
3262 /* Verify that valist is of the proper type. */
3263
3264 want_va_type = va_list_type_node;
3265 have_va_type = TREE_TYPE (valist);
3266 if (TREE_CODE (want_va_type) == ARRAY_TYPE)
3267 {
3268 /* If va_list is an array type, the argument may have decayed
3269 to a pointer type, e.g. by being passed to another function.
3270 In that case, unwrap both types so that we can compare the
3271 underlying records. */
3272 if (TREE_CODE (have_va_type) == ARRAY_TYPE
3273 || TREE_CODE (have_va_type) == POINTER_TYPE)
3274 {
3275 want_va_type = TREE_TYPE (want_va_type);
3276 have_va_type = TREE_TYPE (have_va_type);
3277 }
3278 }
3279 if (TYPE_MAIN_VARIANT (want_va_type) != TYPE_MAIN_VARIANT (have_va_type))
3280 {
3281 error ("first argument to `va_arg' not of type `va_list'");
3282 addr = const0_rtx;
3283 }
3284
3285 /* Generate a diagnostic for requesting data of a type that cannot
3286 be passed through `...' due to type promotion at the call site. */
3287 else if ((promoted_type = (*lang_hooks.types.type_promotes_to) (type))
3288 != type)
3289 {
3290 const char *name = "<anonymous type>", *pname = 0;
3291 static bool gave_help;
3292
3293 if (TYPE_NAME (type))
3294 {
3295 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE)
3296 name = IDENTIFIER_POINTER (TYPE_NAME (type));
3297 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
3298 && DECL_NAME (TYPE_NAME (type)))
3299 name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
3300 }
3301 if (TYPE_NAME (promoted_type))
3302 {
3303 if (TREE_CODE (TYPE_NAME (promoted_type)) == IDENTIFIER_NODE)
3304 pname = IDENTIFIER_POINTER (TYPE_NAME (promoted_type));
3305 else if (TREE_CODE (TYPE_NAME (promoted_type)) == TYPE_DECL
3306 && DECL_NAME (TYPE_NAME (promoted_type)))
3307 pname = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (promoted_type)));
3308 }
3309
3310 /* Unfortunately, this is merely undefined, rather than a constraint
3311 violation, so we cannot make this an error. If this call is never
3312 executed, the program is still strictly conforming. */
3313 warning ("`%s' is promoted to `%s' when passed through `...'",
3314 name, pname);
3315 if (! gave_help)
3316 {
3317 gave_help = true;
3318 warning ("(so you should pass `%s' not `%s' to `va_arg')",
3319 pname, name);
3320 }
3321
3322 /* We can, however, treat "undefined" any way we please.
3323 Call abort to encourage the user to fix the program. */
3324 expand_builtin_trap ();
3325
3326 /* This is dead code, but go ahead and finish so that the
3327 mode of the result comes out right. */
3328 addr = const0_rtx;
3329 }
3330 else
3331 {
3332 /* Make it easier for the backends by protecting the valist argument
3333 from multiple evaluations. */
3334 valist = stabilize_va_list (valist, 0);
3335
3336 #ifdef EXPAND_BUILTIN_VA_ARG
3337 addr = EXPAND_BUILTIN_VA_ARG (valist, type);
3338 #else
3339 addr = std_expand_builtin_va_arg (valist, type);
3340 #endif
3341 }
3342
3343 #ifdef POINTERS_EXTEND_UNSIGNED
3344 if (GET_MODE (addr) != Pmode)
3345 addr = convert_memory_address (Pmode, addr);
3346 #endif
3347
3348 result = gen_rtx_MEM (TYPE_MODE (type), addr);
3349 set_mem_alias_set (result, get_varargs_alias_set ());
3350
3351 return result;
3352 }
3353
3354 /* Expand ARGLIST, from a call to __builtin_va_end. */
3355
3356 static rtx
expand_builtin_va_end(arglist)3357 expand_builtin_va_end (arglist)
3358 tree arglist;
3359 {
3360 tree valist = TREE_VALUE (arglist);
3361
3362 #ifdef EXPAND_BUILTIN_VA_END
3363 valist = stabilize_va_list (valist, 0);
3364 EXPAND_BUILTIN_VA_END (arglist);
3365 #else
3366 /* Evaluate for side effects, if needed. I hate macros that don't
3367 do that. */
3368 if (TREE_SIDE_EFFECTS (valist))
3369 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
3370 #endif
3371
3372 return const0_rtx;
3373 }
3374
3375 /* Expand ARGLIST, from a call to __builtin_va_copy. We do this as a
3376 builtin rather than just as an assignment in stdarg.h because of the
3377 nastiness of array-type va_list types. */
3378
3379 static rtx
expand_builtin_va_copy(arglist)3380 expand_builtin_va_copy (arglist)
3381 tree arglist;
3382 {
3383 tree dst, src, t;
3384
3385 dst = TREE_VALUE (arglist);
3386 src = TREE_VALUE (TREE_CHAIN (arglist));
3387
3388 dst = stabilize_va_list (dst, 1);
3389 src = stabilize_va_list (src, 0);
3390
3391 if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
3392 {
3393 t = build (MODIFY_EXPR, va_list_type_node, dst, src);
3394 TREE_SIDE_EFFECTS (t) = 1;
3395 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
3396 }
3397 else
3398 {
3399 rtx dstb, srcb, size;
3400
3401 /* Evaluate to pointers. */
3402 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
3403 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
3404 size = expand_expr (TYPE_SIZE_UNIT (va_list_type_node), NULL_RTX,
3405 VOIDmode, EXPAND_NORMAL);
3406
3407 #ifdef POINTERS_EXTEND_UNSIGNED
3408 if (GET_MODE (dstb) != Pmode)
3409 dstb = convert_memory_address (Pmode, dstb);
3410
3411 if (GET_MODE (srcb) != Pmode)
3412 srcb = convert_memory_address (Pmode, srcb);
3413 #endif
3414
3415 /* "Dereference" to BLKmode memories. */
3416 dstb = gen_rtx_MEM (BLKmode, dstb);
3417 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
3418 set_mem_align (dstb, TYPE_ALIGN (va_list_type_node));
3419 srcb = gen_rtx_MEM (BLKmode, srcb);
3420 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
3421 set_mem_align (srcb, TYPE_ALIGN (va_list_type_node));
3422
3423 /* Copy. */
3424 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
3425 }
3426
3427 return const0_rtx;
3428 }
3429
3430 /* Expand a call to one of the builtin functions __builtin_frame_address or
3431 __builtin_return_address. */
3432
3433 static rtx
expand_builtin_frame_address(exp)3434 expand_builtin_frame_address (exp)
3435 tree exp;
3436 {
3437 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3438 tree arglist = TREE_OPERAND (exp, 1);
3439
3440 /* The argument must be a nonnegative integer constant.
3441 It counts the number of frames to scan up the stack.
3442 The value is the return address saved in that frame. */
3443 if (arglist == 0)
3444 /* Warning about missing arg was already issued. */
3445 return const0_rtx;
3446 else if (! host_integerp (TREE_VALUE (arglist), 1))
3447 {
3448 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3449 error ("invalid arg to `__builtin_frame_address'");
3450 else
3451 error ("invalid arg to `__builtin_return_address'");
3452 return const0_rtx;
3453 }
3454 else
3455 {
3456 rtx tem
3457 = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
3458 tree_low_cst (TREE_VALUE (arglist), 1),
3459 hard_frame_pointer_rtx);
3460
3461 /* Some ports cannot access arbitrary stack frames. */
3462 if (tem == NULL)
3463 {
3464 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3465 warning ("unsupported arg to `__builtin_frame_address'");
3466 else
3467 warning ("unsupported arg to `__builtin_return_address'");
3468 return const0_rtx;
3469 }
3470
3471 /* For __builtin_frame_address, return what we've got. */
3472 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
3473 return tem;
3474
3475 if (GET_CODE (tem) != REG
3476 && ! CONSTANT_P (tem))
3477 tem = copy_to_mode_reg (Pmode, tem);
3478 return tem;
3479 }
3480 }
3481
3482 /* Expand a call to the alloca builtin, with arguments ARGLIST. Return 0 if
3483 we failed and the caller should emit a normal call, otherwise try to get
3484 the result in TARGET, if convenient. */
3485
3486 static rtx
expand_builtin_alloca(arglist,target)3487 expand_builtin_alloca (arglist, target)
3488 tree arglist;
3489 rtx target;
3490 {
3491 rtx op0;
3492 rtx result;
3493
3494 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3495 return 0;
3496
3497 /* Compute the argument. */
3498 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
3499
3500 /* Allocate the desired space. */
3501 result = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
3502
3503 #ifdef POINTERS_EXTEND_UNSIGNED
3504 if (GET_MODE (result) != ptr_mode)
3505 result = convert_memory_address (ptr_mode, result);
3506 #endif
3507
3508 return result;
3509 }
3510
3511 /* Expand a call to the ffs builtin. The arguments are in ARGLIST.
3512 Return 0 if a normal call should be emitted rather than expanding the
3513 function in-line. If convenient, the result should be placed in TARGET.
3514 SUBTARGET may be used as the target for computing one of EXP's operands. */
3515
3516 static rtx
expand_builtin_ffs(arglist,target,subtarget)3517 expand_builtin_ffs (arglist, target, subtarget)
3518 tree arglist;
3519 rtx target, subtarget;
3520 {
3521 rtx op0;
3522 if (!validate_arglist (arglist, INTEGER_TYPE, VOID_TYPE))
3523 return 0;
3524
3525 /* Compute the argument. */
3526 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
3527 /* Compute ffs, into TARGET if possible.
3528 Set TARGET to wherever the result comes back. */
3529 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
3530 ffs_optab, op0, target, 1);
3531 if (target == 0)
3532 abort ();
3533 return target;
3534 }
3535
3536 /* If the string passed to fputs is a constant and is one character
3537 long, we attempt to transform this call into __builtin_fputc(). */
3538
3539 static rtx
expand_builtin_fputs(arglist,ignore,unlocked)3540 expand_builtin_fputs (arglist, ignore, unlocked)
3541 tree arglist;
3542 int ignore;
3543 int unlocked;
3544 {
3545 tree len, fn;
3546 tree fn_fputc = unlocked ? built_in_decls[BUILT_IN_FPUTC_UNLOCKED]
3547 : built_in_decls[BUILT_IN_FPUTC];
3548 tree fn_fwrite = unlocked ? built_in_decls[BUILT_IN_FWRITE_UNLOCKED]
3549 : built_in_decls[BUILT_IN_FWRITE];
3550
3551 /* If the return value is used, or the replacement _DECL isn't
3552 initialized, don't do the transformation. */
3553 if (!ignore || !fn_fputc || !fn_fwrite)
3554 return 0;
3555
3556 /* Verify the arguments in the original call. */
3557 if (!validate_arglist (arglist, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3558 return 0;
3559
3560 /* Get the length of the string passed to fputs. If the length
3561 can't be determined, punt. */
3562 if (!(len = c_strlen (TREE_VALUE (arglist)))
3563 || TREE_CODE (len) != INTEGER_CST)
3564 return 0;
3565
3566 switch (compare_tree_int (len, 1))
3567 {
3568 case -1: /* length is 0, delete the call entirely . */
3569 {
3570 /* Evaluate and ignore the argument in case it has
3571 side-effects. */
3572 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)), const0_rtx,
3573 VOIDmode, EXPAND_NORMAL);
3574 return const0_rtx;
3575 }
3576 case 0: /* length is 1, call fputc. */
3577 {
3578 const char *p = c_getstr (TREE_VALUE (arglist));
3579
3580 if (p != NULL)
3581 {
3582 /* New argument list transforming fputs(string, stream) to
3583 fputc(string[0], stream). */
3584 arglist =
3585 build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3586 arglist =
3587 tree_cons (NULL_TREE, build_int_2 (p[0], 0), arglist);
3588 fn = fn_fputc;
3589 break;
3590 }
3591 }
3592 /* FALLTHROUGH */
3593 case 1: /* length is greater than 1, call fwrite. */
3594 {
3595 tree string_arg;
3596
3597 /* If optimizing for size keep fputs. */
3598 if (optimize_size)
3599 return 0;
3600 string_arg = TREE_VALUE (arglist);
3601 /* New argument list transforming fputs(string, stream) to
3602 fwrite(string, 1, len, stream). */
3603 arglist = build_tree_list (NULL_TREE, TREE_VALUE (TREE_CHAIN (arglist)));
3604 arglist = tree_cons (NULL_TREE, len, arglist);
3605 arglist = tree_cons (NULL_TREE, size_one_node, arglist);
3606 arglist = tree_cons (NULL_TREE, string_arg, arglist);
3607 fn = fn_fwrite;
3608 break;
3609 }
3610 default:
3611 abort ();
3612 }
3613
3614 return expand_expr (build_function_call_expr (fn, arglist),
3615 (ignore ? const0_rtx : NULL_RTX),
3616 VOIDmode, EXPAND_NORMAL);
3617 }
3618
3619 /* Expand a call to __builtin_expect. We return our argument and emit a
3620 NOTE_INSN_EXPECTED_VALUE note. This is the expansion of __builtin_expect in
3621 a non-jump context. */
3622
3623 static rtx
expand_builtin_expect(arglist,target)3624 expand_builtin_expect (arglist, target)
3625 tree arglist;
3626 rtx target;
3627 {
3628 tree exp, c;
3629 rtx note, rtx_c;
3630
3631 if (arglist == NULL_TREE
3632 || TREE_CHAIN (arglist) == NULL_TREE)
3633 return const0_rtx;
3634 exp = TREE_VALUE (arglist);
3635 c = TREE_VALUE (TREE_CHAIN (arglist));
3636
3637 if (TREE_CODE (c) != INTEGER_CST)
3638 {
3639 error ("second arg to `__builtin_expect' must be a constant");
3640 c = integer_zero_node;
3641 }
3642
3643 target = expand_expr (exp, target, VOIDmode, EXPAND_NORMAL);
3644
3645 /* Don't bother with expected value notes for integral constants. */
3646 if (flag_guess_branch_prob && GET_CODE (target) != CONST_INT)
3647 {
3648 /* We do need to force this into a register so that we can be
3649 moderately sure to be able to correctly interpret the branch
3650 condition later. */
3651 target = force_reg (GET_MODE (target), target);
3652
3653 rtx_c = expand_expr (c, NULL_RTX, GET_MODE (target), EXPAND_NORMAL);
3654
3655 note = emit_note (NULL, NOTE_INSN_EXPECTED_VALUE);
3656 NOTE_EXPECTED_VALUE (note) = gen_rtx_EQ (VOIDmode, target, rtx_c);
3657 }
3658
3659 return target;
3660 }
3661
3662 /* Like expand_builtin_expect, except do this in a jump context. This is
3663 called from do_jump if the conditional is a __builtin_expect. Return either
3664 a list of insns to emit the jump or NULL if we cannot optimize
3665 __builtin_expect. We need to optimize this at jump time so that machines
3666 like the PowerPC don't turn the test into a SCC operation, and then jump
3667 based on the test being 0/1. */
3668
3669 rtx
expand_builtin_expect_jump(exp,if_false_label,if_true_label)3670 expand_builtin_expect_jump (exp, if_false_label, if_true_label)
3671 tree exp;
3672 rtx if_false_label;
3673 rtx if_true_label;
3674 {
3675 tree arglist = TREE_OPERAND (exp, 1);
3676 tree arg0 = TREE_VALUE (arglist);
3677 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
3678 rtx ret = NULL_RTX;
3679
3680 /* Only handle __builtin_expect (test, 0) and
3681 __builtin_expect (test, 1). */
3682 if (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
3683 && (integer_zerop (arg1) || integer_onep (arg1)))
3684 {
3685 rtx insn, drop_through_label;
3686
3687 /* Expand the jump insns. */
3688 start_sequence ();
3689 do_jump (arg0, if_false_label, if_true_label);
3690 ret = get_insns ();
3691
3692 drop_through_label = get_last_insn ();
3693 if (drop_through_label && GET_CODE (drop_through_label) == NOTE)
3694 drop_through_label = prev_nonnote_insn (drop_through_label);
3695 if (drop_through_label && GET_CODE (drop_through_label) != CODE_LABEL)
3696 drop_through_label = NULL_RTX;
3697 end_sequence ();
3698
3699 if (! if_true_label)
3700 if_true_label = drop_through_label;
3701 if (! if_false_label)
3702 if_false_label = drop_through_label;
3703
3704 /* Now that the __builtin_expect has been validated, go through and add
3705 the expect's to each of the conditional jumps. If we run into an
3706 error, just give up and generate the 'safe' code of doing a SCC
3707 operation and then doing a branch on that. */
3708 insn = ret;
3709 while (insn != NULL_RTX)
3710 {
3711 rtx next = NEXT_INSN (insn);
3712 rtx pattern;
3713
3714 if (GET_CODE (insn) == JUMP_INSN && any_condjump_p (insn)
3715 && (pattern = pc_set (insn)) != NULL_RTX)
3716 {
3717 rtx ifelse = SET_SRC (pattern);
3718 rtx label;
3719 int taken;
3720
3721 if (GET_CODE (ifelse) != IF_THEN_ELSE)
3722 goto do_next_insn;
3723
3724 if (GET_CODE (XEXP (ifelse, 1)) == LABEL_REF)
3725 {
3726 taken = 1;
3727 label = XEXP (XEXP (ifelse, 1), 0);
3728 }
3729 /* An inverted jump reverses the probabilities. */
3730 else if (GET_CODE (XEXP (ifelse, 2)) == LABEL_REF)
3731 {
3732 taken = 0;
3733 label = XEXP (XEXP (ifelse, 2), 0);
3734 }
3735 /* We shouldn't have to worry about conditional returns during
3736 the expansion stage, but handle it gracefully anyway. */
3737 else if (GET_CODE (XEXP (ifelse, 1)) == RETURN)
3738 {
3739 taken = 1;
3740 label = NULL_RTX;
3741 }
3742 /* An inverted return reverses the probabilities. */
3743 else if (GET_CODE (XEXP (ifelse, 2)) == RETURN)
3744 {
3745 taken = 0;
3746 label = NULL_RTX;
3747 }
3748 else
3749 goto do_next_insn;
3750
3751 /* If the test is expected to fail, reverse the
3752 probabilities. */
3753 if (integer_zerop (arg1))
3754 taken = 1 - taken;
3755
3756 /* If we are jumping to the false label, reverse the
3757 probabilities. */
3758 if (label == NULL_RTX)
3759 ; /* conditional return */
3760 else if (label == if_false_label)
3761 taken = 1 - taken;
3762 else if (label != if_true_label)
3763 goto do_next_insn;
3764
3765 predict_insn_def (insn, PRED_BUILTIN_EXPECT, taken);
3766 }
3767
3768 do_next_insn:
3769 insn = next;
3770 }
3771 }
3772
3773 return ret;
3774 }
3775
3776 void
expand_builtin_trap()3777 expand_builtin_trap ()
3778 {
3779 #ifdef HAVE_trap
3780 if (HAVE_trap)
3781 emit_insn (gen_trap ());
3782 else
3783 #endif
3784 emit_library_call (abort_libfunc, LCT_NORETURN, VOIDmode, 0);
3785 emit_barrier ();
3786 }
3787
3788 /* Expand an expression EXP that calls a built-in function,
3789 with result going to TARGET if that's convenient
3790 (and in mode MODE if that's convenient).
3791 SUBTARGET may be used as the target for computing one of EXP's operands.
3792 IGNORE is nonzero if the value is to be ignored. */
3793
3794 rtx
expand_builtin(exp,target,subtarget,mode,ignore)3795 expand_builtin (exp, target, subtarget, mode, ignore)
3796 tree exp;
3797 rtx target;
3798 rtx subtarget;
3799 enum machine_mode mode;
3800 int ignore;
3801 {
3802 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
3803 tree arglist = TREE_OPERAND (exp, 1);
3804 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
3805
3806 /* Perform postincrements before expanding builtin functions. �*/
3807 emit_queue ();
3808
3809 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
3810 return (*targetm.expand_builtin) (exp, target, subtarget, mode, ignore);
3811
3812 /* When not optimizing, generate calls to library functions for a certain
3813 set of builtins. */
3814 if (!optimize && !CALLED_AS_BUILT_IN (fndecl))
3815 switch (fcode)
3816 {
3817 case BUILT_IN_SQRT:
3818 case BUILT_IN_SQRTF:
3819 case BUILT_IN_SQRTL:
3820 case BUILT_IN_SIN:
3821 case BUILT_IN_SINF:
3822 case BUILT_IN_SINL:
3823 case BUILT_IN_COS:
3824 case BUILT_IN_COSF:
3825 case BUILT_IN_COSL:
3826 case BUILT_IN_EXP:
3827 case BUILT_IN_EXPF:
3828 case BUILT_IN_EXPL:
3829 case BUILT_IN_MEMSET:
3830 case BUILT_IN_MEMCPY:
3831 case BUILT_IN_MEMCMP:
3832 case BUILT_IN_BCMP:
3833 case BUILT_IN_BZERO:
3834 case BUILT_IN_INDEX:
3835 case BUILT_IN_RINDEX:
3836 case BUILT_IN_STRCHR:
3837 case BUILT_IN_STRRCHR:
3838 case BUILT_IN_STRLEN:
3839 case BUILT_IN_STRCPY:
3840 case BUILT_IN_STRNCPY:
3841 case BUILT_IN_STRNCMP:
3842 case BUILT_IN_STRSTR:
3843 case BUILT_IN_STRPBRK:
3844 case BUILT_IN_STRCAT:
3845 case BUILT_IN_STRNCAT:
3846 case BUILT_IN_STRSPN:
3847 case BUILT_IN_STRCSPN:
3848 case BUILT_IN_STRCMP:
3849 case BUILT_IN_FFS:
3850 case BUILT_IN_PUTCHAR:
3851 case BUILT_IN_PUTS:
3852 case BUILT_IN_PRINTF:
3853 case BUILT_IN_FPUTC:
3854 case BUILT_IN_FPUTS:
3855 case BUILT_IN_FWRITE:
3856 case BUILT_IN_PUTCHAR_UNLOCKED:
3857 case BUILT_IN_PUTS_UNLOCKED:
3858 case BUILT_IN_PRINTF_UNLOCKED:
3859 case BUILT_IN_FPUTC_UNLOCKED:
3860 case BUILT_IN_FPUTS_UNLOCKED:
3861 case BUILT_IN_FWRITE_UNLOCKED:
3862 return expand_call (exp, target, ignore);
3863
3864 default:
3865 break;
3866 }
3867
3868 switch (fcode)
3869 {
3870 case BUILT_IN_ABS:
3871 case BUILT_IN_LABS:
3872 case BUILT_IN_LLABS:
3873 case BUILT_IN_IMAXABS:
3874 case BUILT_IN_FABS:
3875 case BUILT_IN_FABSF:
3876 case BUILT_IN_FABSL:
3877 /* build_function_call changes these into ABS_EXPR. */
3878 abort ();
3879
3880 case BUILT_IN_CONJ:
3881 case BUILT_IN_CONJF:
3882 case BUILT_IN_CONJL:
3883 case BUILT_IN_CREAL:
3884 case BUILT_IN_CREALF:
3885 case BUILT_IN_CREALL:
3886 case BUILT_IN_CIMAG:
3887 case BUILT_IN_CIMAGF:
3888 case BUILT_IN_CIMAGL:
3889 /* expand_tree_builtin changes these into CONJ_EXPR, REALPART_EXPR
3890 and IMAGPART_EXPR. */
3891 abort ();
3892
3893 case BUILT_IN_SIN:
3894 case BUILT_IN_SINF:
3895 case BUILT_IN_SINL:
3896 case BUILT_IN_COS:
3897 case BUILT_IN_COSF:
3898 case BUILT_IN_COSL:
3899 case BUILT_IN_EXP:
3900 case BUILT_IN_EXPF:
3901 case BUILT_IN_EXPL:
3902 case BUILT_IN_LOG:
3903 case BUILT_IN_LOGF:
3904 case BUILT_IN_LOGL:
3905 /* Treat these like sqrt only if unsafe math optimizations are allowed,
3906 because of possible accuracy problems. */
3907 if (! flag_unsafe_math_optimizations)
3908 break;
3909 case BUILT_IN_SQRT:
3910 case BUILT_IN_SQRTF:
3911 case BUILT_IN_SQRTL:
3912 target = expand_builtin_mathfn (exp, target, subtarget);
3913 if (target)
3914 return target;
3915 break;
3916
3917 case BUILT_IN_APPLY_ARGS:
3918 return expand_builtin_apply_args ();
3919
3920 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
3921 FUNCTION with a copy of the parameters described by
3922 ARGUMENTS, and ARGSIZE. It returns a block of memory
3923 allocated on the stack into which is stored all the registers
3924 that might possibly be used for returning the result of a
3925 function. ARGUMENTS is the value returned by
3926 __builtin_apply_args. ARGSIZE is the number of bytes of
3927 arguments that must be copied. ??? How should this value be
3928 computed? We'll also need a safe worst case value for varargs
3929 functions. */
3930 case BUILT_IN_APPLY:
3931 if (!validate_arglist (arglist, POINTER_TYPE,
3932 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
3933 && !validate_arglist (arglist, REFERENCE_TYPE,
3934 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3935 return const0_rtx;
3936 else
3937 {
3938 int i;
3939 tree t;
3940 rtx ops[3];
3941
3942 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
3943 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
3944
3945 return expand_builtin_apply (ops[0], ops[1], ops[2]);
3946 }
3947
3948 /* __builtin_return (RESULT) causes the function to return the
3949 value described by RESULT. RESULT is address of the block of
3950 memory returned by __builtin_apply. */
3951 case BUILT_IN_RETURN:
3952 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
3953 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
3954 NULL_RTX, VOIDmode, 0));
3955 return const0_rtx;
3956
3957 case BUILT_IN_SAVEREGS:
3958 return expand_builtin_saveregs ();
3959
3960 case BUILT_IN_ARGS_INFO:
3961 return expand_builtin_args_info (exp);
3962
3963 /* Return the address of the first anonymous stack arg. */
3964 case BUILT_IN_NEXT_ARG:
3965 return expand_builtin_next_arg (arglist);
3966
3967 case BUILT_IN_CLASSIFY_TYPE:
3968 return expand_builtin_classify_type (arglist);
3969
3970 case BUILT_IN_CONSTANT_P:
3971 return expand_builtin_constant_p (exp);
3972
3973 case BUILT_IN_FRAME_ADDRESS:
3974 case BUILT_IN_RETURN_ADDRESS:
3975 return expand_builtin_frame_address (exp);
3976
3977 /* Returns the address of the area where the structure is returned.
3978 0 otherwise. */
3979 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
3980 if (arglist != 0
3981 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
3982 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
3983 return const0_rtx;
3984 else
3985 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
3986
3987 case BUILT_IN_ALLOCA:
3988 target = expand_builtin_alloca (arglist, target);
3989 if (target)
3990 return target;
3991 break;
3992
3993 case BUILT_IN_FFS:
3994 target = expand_builtin_ffs (arglist, target, subtarget);
3995 if (target)
3996 return target;
3997 break;
3998
3999 case BUILT_IN_STRLEN:
4000 target = expand_builtin_strlen (exp, target);
4001 if (target)
4002 return target;
4003 break;
4004
4005 case BUILT_IN_STRCPY:
4006 #ifndef NO_UNSAFE_BUILTINS
4007 target = expand_builtin_strcpy (exp, target, mode);
4008 if (target)
4009 return target;
4010 #endif
4011 break;
4012
4013 case BUILT_IN_STRNCPY:
4014 target = expand_builtin_strncpy (arglist, target, mode);
4015 if (target)
4016 return target;
4017 break;
4018
4019 case BUILT_IN_STRCAT:
4020 #ifndef NO_UNSAFE_BUILTINS
4021 target = expand_builtin_strcat (arglist, target, mode);
4022 if (target)
4023 return target;
4024 #endif
4025 break;
4026
4027 case BUILT_IN_STRNCAT:
4028 target = expand_builtin_strncat (arglist, target, mode);
4029 if (target)
4030 return target;
4031 break;
4032
4033 case BUILT_IN_STRSPN:
4034 target = expand_builtin_strspn (arglist, target, mode);
4035 if (target)
4036 return target;
4037 break;
4038
4039 case BUILT_IN_STRCSPN:
4040 target = expand_builtin_strcspn (arglist, target, mode);
4041 if (target)
4042 return target;
4043 break;
4044
4045 case BUILT_IN_STRSTR:
4046 target = expand_builtin_strstr (arglist, target, mode);
4047 if (target)
4048 return target;
4049 break;
4050
4051 case BUILT_IN_STRPBRK:
4052 target = expand_builtin_strpbrk (arglist, target, mode);
4053 if (target)
4054 return target;
4055 break;
4056
4057 case BUILT_IN_INDEX:
4058 case BUILT_IN_STRCHR:
4059 target = expand_builtin_strchr (arglist, target, mode);
4060 if (target)
4061 return target;
4062 break;
4063
4064 case BUILT_IN_RINDEX:
4065 case BUILT_IN_STRRCHR:
4066 target = expand_builtin_strrchr (arglist, target, mode);
4067 if (target)
4068 return target;
4069 break;
4070
4071 case BUILT_IN_MEMCPY:
4072 target = expand_builtin_memcpy (arglist, target, mode);
4073 if (target)
4074 return target;
4075 break;
4076
4077 case BUILT_IN_MEMSET:
4078 target = expand_builtin_memset (exp, target, mode);
4079 if (target)
4080 return target;
4081 break;
4082
4083 case BUILT_IN_BZERO:
4084 target = expand_builtin_bzero (exp);
4085 if (target)
4086 return target;
4087 break;
4088
4089 case BUILT_IN_STRCMP:
4090 target = expand_builtin_strcmp (exp, target, mode);
4091 if (target)
4092 return target;
4093 break;
4094
4095 case BUILT_IN_STRNCMP:
4096 target = expand_builtin_strncmp (exp, target, mode);
4097 if (target)
4098 return target;
4099 break;
4100
4101 case BUILT_IN_BCMP:
4102 case BUILT_IN_MEMCMP:
4103 target = expand_builtin_memcmp (exp, arglist, target, mode);
4104 if (target)
4105 return target;
4106 break;
4107
4108 case BUILT_IN_SETJMP:
4109 target = expand_builtin_setjmp (arglist, target);
4110 if (target)
4111 return target;
4112 break;
4113
4114 /* __builtin_longjmp is passed a pointer to an array of five words.
4115 It's similar to the C library longjmp function but works with
4116 __builtin_setjmp above. */
4117 case BUILT_IN_LONGJMP:
4118 if (!validate_arglist (arglist, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
4119 break;
4120 else
4121 {
4122 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
4123 VOIDmode, 0);
4124 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
4125 NULL_RTX, VOIDmode, 0);
4126
4127 if (value != const1_rtx)
4128 {
4129 error ("__builtin_longjmp second argument must be 1");
4130 return const0_rtx;
4131 }
4132
4133 expand_builtin_longjmp (buf_addr, value);
4134 return const0_rtx;
4135 }
4136
4137 case BUILT_IN_TRAP:
4138 expand_builtin_trap ();
4139 return const0_rtx;
4140
4141 case BUILT_IN_FPUTS:
4142 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 0);
4143 if (target)
4144 return target;
4145 break;
4146 case BUILT_IN_FPUTS_UNLOCKED:
4147 target = expand_builtin_fputs (arglist, ignore,/*unlocked=*/ 1);
4148 if (target)
4149 return target;
4150 break;
4151
4152 /* Various hooks for the DWARF 2 __throw routine. */
4153 case BUILT_IN_UNWIND_INIT:
4154 expand_builtin_unwind_init ();
4155 return const0_rtx;
4156 case BUILT_IN_DWARF_CFA:
4157 return virtual_cfa_rtx;
4158 #ifdef DWARF2_UNWIND_INFO
4159 case BUILT_IN_DWARF_SP_COLUMN:
4160 return expand_builtin_dwarf_sp_column ();
4161 case BUILT_IN_INIT_DWARF_REG_SIZES:
4162 expand_builtin_init_dwarf_reg_sizes (TREE_VALUE (arglist));
4163 return const0_rtx;
4164 #endif
4165 case BUILT_IN_FROB_RETURN_ADDR:
4166 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
4167 case BUILT_IN_EXTRACT_RETURN_ADDR:
4168 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
4169 case BUILT_IN_EH_RETURN:
4170 expand_builtin_eh_return (TREE_VALUE (arglist),
4171 TREE_VALUE (TREE_CHAIN (arglist)));
4172 return const0_rtx;
4173 #ifdef EH_RETURN_DATA_REGNO
4174 case BUILT_IN_EH_RETURN_DATA_REGNO:
4175 return expand_builtin_eh_return_data_regno (arglist);
4176 #endif
4177 case BUILT_IN_VA_START:
4178 case BUILT_IN_STDARG_START:
4179 return expand_builtin_va_start (arglist);
4180 case BUILT_IN_VA_END:
4181 return expand_builtin_va_end (arglist);
4182 case BUILT_IN_VA_COPY:
4183 return expand_builtin_va_copy (arglist);
4184 case BUILT_IN_EXPECT:
4185 return expand_builtin_expect (arglist, target);
4186 case BUILT_IN_PREFETCH:
4187 expand_builtin_prefetch (arglist);
4188 return const0_rtx;
4189
4190
4191 default: /* just do library call, if unknown builtin */
4192 if (!DECL_ASSEMBLER_NAME_SET_P (fndecl))
4193 error ("built-in function `%s' not currently supported",
4194 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
4195 }
4196
4197 /* The switch statement above can drop through to cause the function
4198 to be called normally. */
4199 return expand_call (exp, target, ignore);
4200 }
4201
4202 /* Fold a call to __builtin_constant_p, if we know it will evaluate to a
4203 constant. ARGLIST is the argument list of the call. */
4204
4205 static tree
fold_builtin_constant_p(arglist)4206 fold_builtin_constant_p (arglist)
4207 tree arglist;
4208 {
4209 if (arglist == 0)
4210 return 0;
4211
4212 arglist = TREE_VALUE (arglist);
4213
4214 /* We return 1 for a numeric type that's known to be a constant
4215 value at compile-time or for an aggregate type that's a
4216 literal constant. */
4217 STRIP_NOPS (arglist);
4218
4219 /* If we know this is a constant, emit the constant of one. */
4220 if (TREE_CODE_CLASS (TREE_CODE (arglist)) == 'c'
4221 || (TREE_CODE (arglist) == CONSTRUCTOR
4222 && TREE_CONSTANT (arglist))
4223 || (TREE_CODE (arglist) == ADDR_EXPR
4224 && TREE_CODE (TREE_OPERAND (arglist, 0)) == STRING_CST))
4225 return integer_one_node;
4226
4227 /* If we aren't going to be running CSE or this expression
4228 has side effects, show we don't know it to be a constant.
4229 Likewise if it's a pointer or aggregate type since in those
4230 case we only want literals, since those are only optimized
4231 when generating RTL, not later.
4232 And finally, if we are compiling an initializer, not code, we
4233 need to return a definite result now; there's not going to be any
4234 more optimization done. */
4235 if (TREE_SIDE_EFFECTS (arglist) || cse_not_expected
4236 || AGGREGATE_TYPE_P (TREE_TYPE (arglist))
4237 || POINTER_TYPE_P (TREE_TYPE (arglist))
4238 || cfun == 0)
4239 return integer_zero_node;
4240
4241 return 0;
4242 }
4243
4244 /* Fold a call to __builtin_classify_type. */
4245
4246 static tree
fold_builtin_classify_type(arglist)4247 fold_builtin_classify_type (arglist)
4248 tree arglist;
4249 {
4250 if (arglist == 0)
4251 return build_int_2 (no_type_class, 0);
4252
4253 return build_int_2 (type_to_class (TREE_TYPE (TREE_VALUE (arglist))), 0);
4254 }
4255
4256 /* Fold a call to __builtin_inf or __builtin_huge_val. */
4257
4258 static tree
fold_builtin_inf(type,warn)4259 fold_builtin_inf (type, warn)
4260 tree type;
4261 int warn;
4262 {
4263 REAL_VALUE_TYPE real;
4264
4265 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
4266 warning ("target format does not support infinity");
4267
4268 real_inf (&real);
4269 return build_real (type, real);
4270 }
4271
4272 /* Fold a call to __builtin_nan or __builtin_nans. */
4273
4274 static tree
fold_builtin_nan(arglist,type,quiet)4275 fold_builtin_nan (arglist, type, quiet)
4276 tree arglist, type;
4277 int quiet;
4278 {
4279 REAL_VALUE_TYPE real;
4280 const char *str;
4281
4282 if (!validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4283 return 0;
4284 str = c_getstr (TREE_VALUE (arglist));
4285 if (!str)
4286 return 0;
4287
4288 if (!real_nan (&real, str, quiet, TYPE_MODE (type)))
4289 return 0;
4290
4291 return build_real (type, real);
4292 }
4293
4294 /* Used by constant folding to eliminate some builtin calls early. EXP is
4295 the CALL_EXPR of a call to a builtin function. */
4296
4297 tree
fold_builtin(exp)4298 fold_builtin (exp)
4299 tree exp;
4300 {
4301 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4302 tree arglist = TREE_OPERAND (exp, 1);
4303 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
4304
4305 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4306 return 0;
4307
4308 switch (fcode)
4309 {
4310 case BUILT_IN_CONSTANT_P:
4311 return fold_builtin_constant_p (arglist);
4312
4313 case BUILT_IN_CLASSIFY_TYPE:
4314 return fold_builtin_classify_type (arglist);
4315
4316 case BUILT_IN_STRLEN:
4317 if (validate_arglist (arglist, POINTER_TYPE, VOID_TYPE))
4318 {
4319 tree len = c_strlen (TREE_VALUE (arglist));
4320 if (len)
4321 {
4322 /* Convert from the internal "sizetype" type to "size_t". */
4323 if (size_type_node)
4324 len = convert (size_type_node, len);
4325 return len;
4326 }
4327 }
4328 break;
4329
4330 case BUILT_IN_INF:
4331 case BUILT_IN_INFF:
4332 case BUILT_IN_INFL:
4333 return fold_builtin_inf (TREE_TYPE (TREE_TYPE (fndecl)), true);
4334
4335 case BUILT_IN_HUGE_VAL:
4336 case BUILT_IN_HUGE_VALF:
4337 case BUILT_IN_HUGE_VALL:
4338 return fold_builtin_inf (TREE_TYPE (TREE_TYPE (fndecl)), false);
4339
4340 case BUILT_IN_NAN:
4341 case BUILT_IN_NANF:
4342 case BUILT_IN_NANL:
4343 return fold_builtin_nan (arglist, TREE_TYPE (TREE_TYPE (fndecl)), true);
4344
4345 case BUILT_IN_NANS:
4346 case BUILT_IN_NANSF:
4347 case BUILT_IN_NANSL:
4348 return fold_builtin_nan (arglist, TREE_TYPE (TREE_TYPE (fndecl)), false);
4349
4350 default:
4351 break;
4352 }
4353
4354 return 0;
4355 }
4356
4357 static tree
build_function_call_expr(fn,arglist)4358 build_function_call_expr (fn, arglist)
4359 tree fn, arglist;
4360 {
4361 tree call_expr;
4362
4363 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
4364 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
4365 call_expr, arglist);
4366 TREE_SIDE_EFFECTS (call_expr) = 1;
4367 return fold (call_expr);
4368 }
4369
4370 /* This function validates the types of a function call argument list
4371 represented as a tree chain of parameters against a specified list
4372 of tree_codes. If the last specifier is a 0, that represents an
4373 ellipses, otherwise the last specifier must be a VOID_TYPE. */
4374
4375 static int
validate_arglist(tree arglist,...)4376 validate_arglist VPARAMS ((tree arglist, ...))
4377 {
4378 enum tree_code code;
4379 int res = 0;
4380
4381 VA_OPEN (ap, arglist);
4382 VA_FIXEDARG (ap, tree, arglist);
4383
4384 do
4385 {
4386 code = va_arg (ap, enum tree_code);
4387 switch (code)
4388 {
4389 case 0:
4390 /* This signifies an ellipses, any further arguments are all ok. */
4391 res = 1;
4392 goto end;
4393 case VOID_TYPE:
4394 /* This signifies an endlink, if no arguments remain, return
4395 true, otherwise return false. */
4396 res = arglist == 0;
4397 goto end;
4398 default:
4399 /* If no parameters remain or the parameter's code does not
4400 match the specified code, return false. Otherwise continue
4401 checking any remaining arguments. */
4402 if (arglist == 0
4403 || code != TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))))
4404 goto end;
4405 break;
4406 }
4407 arglist = TREE_CHAIN (arglist);
4408 }
4409 while (1);
4410
4411 /* We need gotos here since we can only have one VA_CLOSE in a
4412 function. */
4413 end: ;
4414 VA_CLOSE (ap);
4415
4416 return res;
4417 }
4418
4419 /* Default version of target-specific builtin setup that does nothing. */
4420
4421 void
default_init_builtins()4422 default_init_builtins ()
4423 {
4424 }
4425
4426 /* Default target-specific builtin expander that does nothing. */
4427
4428 rtx
default_expand_builtin(exp,target,subtarget,mode,ignore)4429 default_expand_builtin (exp, target, subtarget, mode, ignore)
4430 tree exp ATTRIBUTE_UNUSED;
4431 rtx target ATTRIBUTE_UNUSED;
4432 rtx subtarget ATTRIBUTE_UNUSED;
4433 enum machine_mode mode ATTRIBUTE_UNUSED;
4434 int ignore ATTRIBUTE_UNUSED;
4435 {
4436 return NULL_RTX;
4437 }
4438