xref: /openbsd/gnu/usr.bin/gcc/gcc/calls.c (revision 069a0102)
1 /* Convert function calls to rtl insns, for GNU C compiler.
2    Copyright (C) 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3    1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "expr.h"
28 #include "libfuncs.h"
29 #include "function.h"
30 #include "regs.h"
31 #include "toplev.h"
32 #include "output.h"
33 #include "tm_p.h"
34 #include "timevar.h"
35 #include "sbitmap.h"
36 #include "langhooks.h"
37 #include "target.h"
38 #include "except.h"
39 
40 #if !defined FUNCTION_OK_FOR_SIBCALL
41 #define FUNCTION_OK_FOR_SIBCALL(DECL) 1
42 #endif
43 
44 /* Decide whether a function's arguments should be processed
45    from first to last or from last to first.
46 
47    They should if the stack and args grow in opposite directions, but
48    only if we have push insns.  */
49 
50 #ifdef PUSH_ROUNDING
51 
52 #ifndef PUSH_ARGS_REVERSED
53 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
54 #define PUSH_ARGS_REVERSED  PUSH_ARGS
55 #endif
56 #endif
57 
58 #endif
59 
60 #ifndef PUSH_ARGS_REVERSED
61 #define PUSH_ARGS_REVERSED 0
62 #endif
63 
64 #ifndef STACK_POINTER_OFFSET
65 #define STACK_POINTER_OFFSET    0
66 #endif
67 
68 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits.  */
69 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
70 
71 /* Data structure and subroutines used within expand_call.  */
72 
73 struct arg_data
74 {
75   /* Tree node for this argument.  */
76   tree tree_value;
77   /* Mode for value; TYPE_MODE unless promoted.  */
78   enum machine_mode mode;
79   /* Current RTL value for argument, or 0 if it isn't precomputed.  */
80   rtx value;
81   /* Initially-compute RTL value for argument; only for const functions.  */
82   rtx initial_value;
83   /* Register to pass this argument in, 0 if passed on stack, or an
84      PARALLEL if the arg is to be copied into multiple non-contiguous
85      registers.  */
86   rtx reg;
87   /* Register to pass this argument in when generating tail call sequence.
88      This is not the same register as for normal calls on machines with
89      register windows.  */
90   rtx tail_call_reg;
91   /* If REG was promoted from the actual mode of the argument expression,
92      indicates whether the promotion is sign- or zero-extended.  */
93   int unsignedp;
94   /* Number of registers to use.  0 means put the whole arg in registers.
95      Also 0 if not passed in registers.  */
96   int partial;
97   /* Nonzero if argument must be passed on stack.
98      Note that some arguments may be passed on the stack
99      even though pass_on_stack is zero, just because FUNCTION_ARG says so.
100      pass_on_stack identifies arguments that *cannot* go in registers.  */
101   int pass_on_stack;
102   /* Offset of this argument from beginning of stack-args.  */
103   struct args_size offset;
104   /* Similar, but offset to the start of the stack slot.  Different from
105      OFFSET if this arg pads downward.  */
106   struct args_size slot_offset;
107   /* Size of this argument on the stack, rounded up for any padding it gets,
108      parts of the argument passed in registers do not count.
109      If REG_PARM_STACK_SPACE is defined, then register parms
110      are counted here as well.  */
111   struct args_size size;
112   /* Location on the stack at which parameter should be stored.  The store
113      has already been done if STACK == VALUE.  */
114   rtx stack;
115   /* Location on the stack of the start of this argument slot.  This can
116      differ from STACK if this arg pads downward.  This location is known
117      to be aligned to FUNCTION_ARG_BOUNDARY.  */
118   rtx stack_slot;
119   /* Place that this stack area has been saved, if needed.  */
120   rtx save_area;
121   /* If an argument's alignment does not permit direct copying into registers,
122      copy in smaller-sized pieces into pseudos.  These are stored in a
123      block pointed to by this field.  The next field says how many
124      word-sized pseudos we made.  */
125   rtx *aligned_regs;
126   int n_aligned_regs;
127   /* The amount that the stack pointer needs to be adjusted to
128      force alignment for the next argument.  */
129   struct args_size alignment_pad;
130 };
131 
132 /* A vector of one char per byte of stack space.  A byte if nonzero if
133    the corresponding stack location has been used.
134    This vector is used to prevent a function call within an argument from
135    clobbering any stack already set up.  */
136 static char *stack_usage_map;
137 
138 /* Size of STACK_USAGE_MAP.  */
139 static int highest_outgoing_arg_in_use;
140 
141 /* A bitmap of virtual-incoming stack space.  Bit is set if the corresponding
142    stack location's tail call argument has been already stored into the stack.
143    This bitmap is used to prevent sibling call optimization if function tries
144    to use parent's incoming argument slots when they have been already
145    overwritten with tail call arguments.  */
146 static sbitmap stored_args_map;
147 
148 /* stack_arg_under_construction is nonzero when an argument may be
149    initialized with a constructor call (including a C function that
150    returns a BLKmode struct) and expand_call must take special action
151    to make sure the object being constructed does not overlap the
152    argument list for the constructor call.  */
153 int stack_arg_under_construction;
154 
155 static int calls_function	PARAMS ((tree, int));
156 static int calls_function_1	PARAMS ((tree, int));
157 
158 /* Nonzero if this is a call to a `const' function.  */
159 #define ECF_CONST		1
160 /* Nonzero if this is a call to a `volatile' function.  */
161 #define ECF_NORETURN		2
162 /* Nonzero if this is a call to malloc or a related function.  */
163 #define ECF_MALLOC		4
164 /* Nonzero if it is plausible that this is a call to alloca.  */
165 #define ECF_MAY_BE_ALLOCA	8
166 /* Nonzero if this is a call to a function that won't throw an exception.  */
167 #define ECF_NOTHROW		16
168 /* Nonzero if this is a call to setjmp or a related function.  */
169 #define ECF_RETURNS_TWICE	32
170 /* Nonzero if this is a call to `longjmp'.  */
171 #define ECF_LONGJMP		64
172 /* Nonzero if this is a syscall that makes a new process in the image of
173    the current one.  */
174 #define ECF_FORK_OR_EXEC	128
175 #define ECF_SIBCALL		256
176 /* Nonzero if this is a call to "pure" function (like const function,
177    but may read memory.  */
178 #define ECF_PURE		512
179 /* Nonzero if this is a call to a function that returns with the stack
180    pointer depressed.  */
181 #define ECF_SP_DEPRESSED	1024
182 /* Nonzero if this call is known to always return.  */
183 #define ECF_ALWAYS_RETURN	2048
184 /* Create libcall block around the call.  */
185 #define ECF_LIBCALL_BLOCK	4096
186 
187 static void emit_call_1		PARAMS ((rtx, tree, tree, HOST_WIDE_INT,
188 					 HOST_WIDE_INT, HOST_WIDE_INT, rtx,
189 					 rtx, int, rtx, int,
190 					 CUMULATIVE_ARGS *));
191 static void precompute_register_parameters	PARAMS ((int,
192 							 struct arg_data *,
193 							 int *));
194 static int store_one_arg	PARAMS ((struct arg_data *, rtx, int, int,
195 					 int));
196 static void store_unaligned_arguments_into_pseudos PARAMS ((struct arg_data *,
197 							    int));
198 static int finalize_must_preallocate		PARAMS ((int, int,
199 							 struct arg_data *,
200 							 struct args_size *));
201 static void precompute_arguments 		PARAMS ((int, int,
202 							 struct arg_data *));
203 static int compute_argument_block_size		PARAMS ((int,
204 							 struct args_size *,
205 							 int));
206 static void initialize_argument_information	PARAMS ((int,
207 							 struct arg_data *,
208 							 struct args_size *,
209 							 int, tree, tree,
210 							 CUMULATIVE_ARGS *,
211 							 int, rtx *, int *,
212 							 int *, int *));
213 static void compute_argument_addresses		PARAMS ((struct arg_data *,
214 							 rtx, int));
215 static rtx rtx_for_function_call		PARAMS ((tree, tree));
216 static void load_register_parameters		PARAMS ((struct arg_data *,
217 							 int, rtx *, int));
218 static rtx emit_library_call_value_1 		PARAMS ((int, rtx, rtx,
219 							 enum libcall_type,
220 							 enum machine_mode,
221 							 int, va_list));
222 static int special_function_p			PARAMS ((tree, int));
223 static int flags_from_decl_or_type 		PARAMS ((tree));
224 static rtx try_to_integrate			PARAMS ((tree, tree, rtx,
225 							 int, tree, rtx));
226 static int check_sibcall_argument_overlap_1	PARAMS ((rtx));
227 static int check_sibcall_argument_overlap	PARAMS ((rtx, struct arg_data *));
228 
229 static int combine_pending_stack_adjustment_and_call
230                                                 PARAMS ((int, struct args_size *, int));
231 static tree fix_unsafe_tree		PARAMS ((tree));
232 
233 #ifdef REG_PARM_STACK_SPACE
234 static rtx save_fixed_argument_area	PARAMS ((int, rtx, int *, int *));
235 static void restore_fixed_argument_area	PARAMS ((rtx, rtx, int, int));
236 #endif
237 
238 /* If WHICH is 1, return 1 if EXP contains a call to the built-in function
239    `alloca'.
240 
241    If WHICH is 0, return 1 if EXP contains a call to any function.
242    Actually, we only need return 1 if evaluating EXP would require pushing
243    arguments on the stack, but that is too difficult to compute, so we just
244    assume any function call might require the stack.  */
245 
246 static tree calls_function_save_exprs;
247 
248 static int
calls_function(exp,which)249 calls_function (exp, which)
250      tree exp;
251      int which;
252 {
253   int val;
254 
255   calls_function_save_exprs = 0;
256   val = calls_function_1 (exp, which);
257   calls_function_save_exprs = 0;
258   return val;
259 }
260 
261 /* Recursive function to do the work of above function.  */
262 
263 static int
calls_function_1(exp,which)264 calls_function_1 (exp, which)
265      tree exp;
266      int which;
267 {
268   int i;
269   enum tree_code code = TREE_CODE (exp);
270   int class = TREE_CODE_CLASS (code);
271   int length = first_rtl_op (code);
272 
273   /* If this code is language-specific, we don't know what it will do.  */
274   if ((int) code >= NUM_TREE_CODES)
275     return 1;
276 
277   switch (code)
278     {
279     case CALL_EXPR:
280       if (which == 0)
281 	return 1;
282       else if ((TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
283 		== FUNCTION_TYPE)
284 	       && (TYPE_RETURNS_STACK_DEPRESSED
285 		   (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
286 	return 1;
287       else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
288 	       && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
289 		   == FUNCTION_DECL)
290 	       && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
291 				       0)
292 		   & ECF_MAY_BE_ALLOCA))
293 	return 1;
294 
295       break;
296 
297     case CONSTRUCTOR:
298       {
299 	tree tem;
300 
301 	for (tem = CONSTRUCTOR_ELTS (exp); tem != 0; tem = TREE_CHAIN (tem))
302 	  if (calls_function_1 (TREE_VALUE (tem), which))
303 	    return 1;
304       }
305 
306       return 0;
307 
308     case SAVE_EXPR:
309       if (SAVE_EXPR_RTL (exp) != 0)
310 	return 0;
311       if (value_member (exp, calls_function_save_exprs))
312 	return 0;
313       calls_function_save_exprs = tree_cons (NULL_TREE, exp,
314 					     calls_function_save_exprs);
315       return (TREE_OPERAND (exp, 0) != 0
316 	      && calls_function_1 (TREE_OPERAND (exp, 0), which));
317 
318     case BLOCK:
319       {
320 	tree local;
321 	tree subblock;
322 
323 	for (local = BLOCK_VARS (exp); local; local = TREE_CHAIN (local))
324 	  if (DECL_INITIAL (local) != 0
325 	      && calls_function_1 (DECL_INITIAL (local), which))
326 	    return 1;
327 
328 	for (subblock = BLOCK_SUBBLOCKS (exp);
329 	     subblock;
330 	     subblock = TREE_CHAIN (subblock))
331 	  if (calls_function_1 (subblock, which))
332 	    return 1;
333       }
334       return 0;
335 
336     case TREE_LIST:
337       for (; exp != 0; exp = TREE_CHAIN (exp))
338 	if (calls_function_1 (TREE_VALUE (exp), which))
339 	  return 1;
340       return 0;
341 
342     default:
343       break;
344     }
345 
346   /* Only expressions, references, and blocks can contain calls.  */
347   if (! IS_EXPR_CODE_CLASS (class) && class != 'r' && class != 'b')
348     return 0;
349 
350   for (i = 0; i < length; i++)
351     if (TREE_OPERAND (exp, i) != 0
352 	&& calls_function_1 (TREE_OPERAND (exp, i), which))
353       return 1;
354 
355   return 0;
356 }
357 
358 /* Force FUNEXP into a form suitable for the address of a CALL,
359    and return that as an rtx.  Also load the static chain register
360    if FNDECL is a nested function.
361 
362    CALL_FUSAGE points to a variable holding the prospective
363    CALL_INSN_FUNCTION_USAGE information.  */
364 
365 rtx
prepare_call_address(funexp,fndecl,call_fusage,reg_parm_seen,sibcallp)366 prepare_call_address (funexp, fndecl, call_fusage, reg_parm_seen, sibcallp)
367      rtx funexp;
368      tree fndecl;
369      rtx *call_fusage;
370      int reg_parm_seen;
371      int sibcallp;
372 {
373   rtx static_chain_value = 0;
374 
375   funexp = protect_from_queue (funexp, 0);
376 
377   if (fndecl != 0)
378     /* Get possible static chain value for nested function in C.  */
379     static_chain_value = lookup_static_chain (fndecl);
380 
381   /* Make a valid memory address and copy constants thru pseudo-regs,
382      but not for a constant address if -fno-function-cse.  */
383   if (GET_CODE (funexp) != SYMBOL_REF)
384     /* If we are using registers for parameters, force the
385        function address into a register now.  */
386     funexp = ((SMALL_REGISTER_CLASSES && reg_parm_seen)
387 	      ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
388 	      : memory_address (FUNCTION_MODE, funexp));
389   else if (! sibcallp)
390     {
391 #ifndef NO_FUNCTION_CSE
392       if (optimize && ! flag_no_function_cse)
393 #ifdef NO_RECURSIVE_FUNCTION_CSE
394 	if (fndecl != current_function_decl)
395 #endif
396 	  funexp = force_reg (Pmode, funexp);
397 #endif
398     }
399 
400   if (static_chain_value != 0)
401     {
402       emit_move_insn (static_chain_rtx, static_chain_value);
403 
404       if (GET_CODE (static_chain_rtx) == REG)
405 	use_reg (call_fusage, static_chain_rtx);
406     }
407 
408   return funexp;
409 }
410 
411 /* Generate instructions to call function FUNEXP,
412    and optionally pop the results.
413    The CALL_INSN is the first insn generated.
414 
415    FNDECL is the declaration node of the function.  This is given to the
416    macro RETURN_POPS_ARGS to determine whether this function pops its own args.
417 
418    FUNTYPE is the data type of the function.  This is given to the macro
419    RETURN_POPS_ARGS to determine whether this function pops its own args.
420    We used to allow an identifier for library functions, but that doesn't
421    work when the return type is an aggregate type and the calling convention
422    says that the pointer to this aggregate is to be popped by the callee.
423 
424    STACK_SIZE is the number of bytes of arguments on the stack,
425    ROUNDED_STACK_SIZE is that number rounded up to
426    PREFERRED_STACK_BOUNDARY; zero if the size is variable.  This is
427    both to put into the call insn and to generate explicit popping
428    code if necessary.
429 
430    STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
431    It is zero if this call doesn't want a structure value.
432 
433    NEXT_ARG_REG is the rtx that results from executing
434      FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1)
435    just after all the args have had their registers assigned.
436    This could be whatever you like, but normally it is the first
437    arg-register beyond those used for args in this call,
438    or 0 if all the arg-registers are used in this call.
439    It is passed on to `gen_call' so you can put this info in the call insn.
440 
441    VALREG is a hard register in which a value is returned,
442    or 0 if the call does not return a value.
443 
444    OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
445    the args to this call were processed.
446    We restore `inhibit_defer_pop' to that value.
447 
448    CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
449    denote registers used by the called function.  */
450 
451 static void
emit_call_1(funexp,fndecl,funtype,stack_size,rounded_stack_size,struct_value_size,next_arg_reg,valreg,old_inhibit_defer_pop,call_fusage,ecf_flags,args_so_far)452 emit_call_1 (funexp, fndecl, funtype, stack_size, rounded_stack_size,
453 	     struct_value_size, next_arg_reg, valreg, old_inhibit_defer_pop,
454 	     call_fusage, ecf_flags, args_so_far)
455      rtx funexp;
456      tree fndecl ATTRIBUTE_UNUSED;
457      tree funtype ATTRIBUTE_UNUSED;
458      HOST_WIDE_INT stack_size ATTRIBUTE_UNUSED;
459      HOST_WIDE_INT rounded_stack_size;
460      HOST_WIDE_INT struct_value_size ATTRIBUTE_UNUSED;
461      rtx next_arg_reg ATTRIBUTE_UNUSED;
462      rtx valreg;
463      int old_inhibit_defer_pop;
464      rtx call_fusage;
465      int ecf_flags;
466      CUMULATIVE_ARGS *args_so_far ATTRIBUTE_UNUSED;
467 {
468   rtx rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
469   rtx call_insn;
470   int already_popped = 0;
471   HOST_WIDE_INT n_popped = RETURN_POPS_ARGS (fndecl, funtype, stack_size);
472 #if defined (HAVE_call) && defined (HAVE_call_value)
473   rtx struct_value_size_rtx;
474   struct_value_size_rtx = GEN_INT (struct_value_size);
475 #endif
476 
477 #ifdef CALL_POPS_ARGS
478   n_popped += CALL_POPS_ARGS (* args_so_far);
479 #endif
480 
481   /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
482      and we don't want to load it into a register as an optimization,
483      because prepare_call_address already did it if it should be done.  */
484   if (GET_CODE (funexp) != SYMBOL_REF)
485     funexp = memory_address (FUNCTION_MODE, funexp);
486 
487 #if defined (HAVE_sibcall_pop) && defined (HAVE_sibcall_value_pop)
488   if ((ecf_flags & ECF_SIBCALL)
489       && HAVE_sibcall_pop && HAVE_sibcall_value_pop
490       && (n_popped > 0 || stack_size == 0))
491     {
492       rtx n_pop = GEN_INT (n_popped);
493       rtx pat;
494 
495       /* If this subroutine pops its own args, record that in the call insn
496 	 if possible, for the sake of frame pointer elimination.  */
497 
498       if (valreg)
499 	pat = GEN_SIBCALL_VALUE_POP (valreg,
500 				     gen_rtx_MEM (FUNCTION_MODE, funexp),
501 				     rounded_stack_size_rtx, next_arg_reg,
502 				     n_pop);
503       else
504 	pat = GEN_SIBCALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
505 			       rounded_stack_size_rtx, next_arg_reg, n_pop);
506 
507       emit_call_insn (pat);
508       already_popped = 1;
509     }
510   else
511 #endif
512 
513 #if defined (HAVE_call_pop) && defined (HAVE_call_value_pop)
514   /* If the target has "call" or "call_value" insns, then prefer them
515      if no arguments are actually popped.  If the target does not have
516      "call" or "call_value" insns, then we must use the popping versions
517      even if the call has no arguments to pop.  */
518 #if defined (HAVE_call) && defined (HAVE_call_value)
519   if (HAVE_call && HAVE_call_value && HAVE_call_pop && HAVE_call_value_pop
520       && n_popped > 0 && ! (ecf_flags & ECF_SP_DEPRESSED))
521 #else
522   if (HAVE_call_pop && HAVE_call_value_pop)
523 #endif
524     {
525       rtx n_pop = GEN_INT (n_popped);
526       rtx pat;
527 
528       /* If this subroutine pops its own args, record that in the call insn
529 	 if possible, for the sake of frame pointer elimination.  */
530 
531       if (valreg)
532 	pat = GEN_CALL_VALUE_POP (valreg,
533 				  gen_rtx_MEM (FUNCTION_MODE, funexp),
534 				  rounded_stack_size_rtx, next_arg_reg, n_pop);
535       else
536 	pat = GEN_CALL_POP (gen_rtx_MEM (FUNCTION_MODE, funexp),
537 			    rounded_stack_size_rtx, next_arg_reg, n_pop);
538 
539       emit_call_insn (pat);
540       already_popped = 1;
541     }
542   else
543 #endif
544 
545 #if defined (HAVE_sibcall) && defined (HAVE_sibcall_value)
546   if ((ecf_flags & ECF_SIBCALL)
547       && HAVE_sibcall && HAVE_sibcall_value)
548     {
549       if (valreg)
550 	emit_call_insn (GEN_SIBCALL_VALUE (valreg,
551 					   gen_rtx_MEM (FUNCTION_MODE, funexp),
552 					   rounded_stack_size_rtx,
553 					   next_arg_reg, NULL_RTX));
554       else
555 	emit_call_insn (GEN_SIBCALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
556 				     rounded_stack_size_rtx, next_arg_reg,
557 				     struct_value_size_rtx));
558     }
559   else
560 #endif
561 
562 #if defined (HAVE_call) && defined (HAVE_call_value)
563   if (HAVE_call && HAVE_call_value)
564     {
565       if (valreg)
566 	emit_call_insn (GEN_CALL_VALUE (valreg,
567 					gen_rtx_MEM (FUNCTION_MODE, funexp),
568 					rounded_stack_size_rtx, next_arg_reg,
569 					NULL_RTX));
570       else
571 	emit_call_insn (GEN_CALL (gen_rtx_MEM (FUNCTION_MODE, funexp),
572 				  rounded_stack_size_rtx, next_arg_reg,
573 				  struct_value_size_rtx));
574     }
575   else
576 #endif
577     abort ();
578 
579   /* Find the CALL insn we just emitted.  */
580   for (call_insn = get_last_insn ();
581        call_insn && GET_CODE (call_insn) != CALL_INSN;
582        call_insn = PREV_INSN (call_insn))
583     ;
584 
585   if (! call_insn)
586     abort ();
587 
588   /* Mark memory as used for "pure" function call.  */
589   if (ecf_flags & ECF_PURE)
590     call_fusage
591       = gen_rtx_EXPR_LIST
592 	(VOIDmode,
593 	 gen_rtx_USE (VOIDmode,
594 		      gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode))),
595 	 call_fusage);
596 
597   /* Put the register usage information on the CALL.  If there is already
598      some usage information, put ours at the end.  */
599   if (CALL_INSN_FUNCTION_USAGE (call_insn))
600     {
601       rtx link;
602 
603       for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
604 	   link = XEXP (link, 1))
605 	;
606 
607       XEXP (link, 1) = call_fusage;
608     }
609   else
610     CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
611 
612   /* If this is a const call, then set the insn's unchanging bit.  */
613   if (ecf_flags & (ECF_CONST | ECF_PURE))
614     CONST_OR_PURE_CALL_P (call_insn) = 1;
615 
616   /* If this call can't throw, attach a REG_EH_REGION reg note to that
617      effect.  */
618   if (ecf_flags & ECF_NOTHROW)
619     REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_EH_REGION, const0_rtx,
620 					       REG_NOTES (call_insn));
621   else
622     note_eh_region_may_contain_throw ();
623 
624   if (ecf_flags & ECF_NORETURN)
625     REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_NORETURN, const0_rtx,
626 					       REG_NOTES (call_insn));
627   if (ecf_flags & ECF_ALWAYS_RETURN)
628     REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_ALWAYS_RETURN, const0_rtx,
629 					       REG_NOTES (call_insn));
630 
631   if (ecf_flags & ECF_RETURNS_TWICE)
632     {
633       REG_NOTES (call_insn) = gen_rtx_EXPR_LIST (REG_SETJMP, const0_rtx,
634 					         REG_NOTES (call_insn));
635       current_function_calls_setjmp = 1;
636     }
637 
638   SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
639 
640   /* Restore this now, so that we do defer pops for this call's args
641      if the context of the call as a whole permits.  */
642   inhibit_defer_pop = old_inhibit_defer_pop;
643 
644   /* Don't bother cleaning up after a noreturn function.  */
645   if (ecf_flags & (ECF_NORETURN | ECF_LONGJMP))
646     return;
647 
648   if (n_popped > 0)
649     {
650       if (!already_popped)
651 	CALL_INSN_FUNCTION_USAGE (call_insn)
652 	  = gen_rtx_EXPR_LIST (VOIDmode,
653 			       gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
654 			       CALL_INSN_FUNCTION_USAGE (call_insn));
655       rounded_stack_size -= n_popped;
656       rounded_stack_size_rtx = GEN_INT (rounded_stack_size);
657       stack_pointer_delta -= n_popped;
658     }
659 
660   if (!ACCUMULATE_OUTGOING_ARGS)
661     {
662       /* If returning from the subroutine does not automatically pop the args,
663 	 we need an instruction to pop them sooner or later.
664 	 Perhaps do it now; perhaps just record how much space to pop later.
665 
666 	 If returning from the subroutine does pop the args, indicate that the
667 	 stack pointer will be changed.  */
668 
669       if (rounded_stack_size != 0)
670 	{
671 	  if (ecf_flags & ECF_SP_DEPRESSED)
672 	    /* Just pretend we did the pop.  */
673 	    stack_pointer_delta -= rounded_stack_size;
674 	  else if (flag_defer_pop && inhibit_defer_pop == 0
675 	      && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
676 	    pending_stack_adjust += rounded_stack_size;
677 	  else
678 	    adjust_stack (rounded_stack_size_rtx);
679 	}
680     }
681   /* When we accumulate outgoing args, we must avoid any stack manipulations.
682      Restore the stack pointer to its original value now.  Usually
683      ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
684      On  i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
685      popping variants of functions exist as well.
686 
687      ??? We may optimize similar to defer_pop above, but it is
688      probably not worthwhile.
689 
690      ??? It will be worthwhile to enable combine_stack_adjustments even for
691      such machines.  */
692   else if (n_popped)
693     anti_adjust_stack (GEN_INT (n_popped));
694 }
695 
696 /* Determine if the function identified by NAME and FNDECL is one with
697    special properties we wish to know about.
698 
699    For example, if the function might return more than one time (setjmp), then
700    set RETURNS_TWICE to a nonzero value.
701 
702    Similarly set LONGJMP for if the function is in the longjmp family.
703 
704    Set MALLOC for any of the standard memory allocation functions which
705    allocate from the heap.
706 
707    Set MAY_BE_ALLOCA for any memory allocation function that might allocate
708    space from the stack such as alloca.  */
709 
710 static int
special_function_p(fndecl,flags)711 special_function_p (fndecl, flags)
712      tree fndecl;
713      int flags;
714 {
715   if (! (flags & ECF_MALLOC)
716       && fndecl && DECL_NAME (fndecl)
717       && IDENTIFIER_LENGTH (DECL_NAME (fndecl)) <= 17
718       /* Exclude functions not at the file scope, or not `extern',
719 	 since they are not the magic functions we would otherwise
720 	 think they are.  */
721       && DECL_CONTEXT (fndecl) == NULL_TREE && TREE_PUBLIC (fndecl))
722     {
723       const char *name = IDENTIFIER_POINTER (DECL_NAME (fndecl));
724       const char *tname = name;
725 
726       /* We assume that alloca will always be called by name.  It
727 	 makes no sense to pass it as a pointer-to-function to
728 	 anything that does not understand its behavior.  */
729       if (((IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 6
730 	    && name[0] == 'a'
731 	    && ! strcmp (name, "alloca"))
732 	   || (IDENTIFIER_LENGTH (DECL_NAME (fndecl)) == 16
733 	       && name[0] == '_'
734 	       && ! strcmp (name, "__builtin_alloca"))))
735 	flags |= ECF_MAY_BE_ALLOCA;
736 
737       /* Disregard prefix _, __ or __x.  */
738       if (name[0] == '_')
739 	{
740 	  if (name[1] == '_' && name[2] == 'x')
741 	    tname += 3;
742 	  else if (name[1] == '_')
743 	    tname += 2;
744 	  else
745 	    tname += 1;
746 	}
747 
748       if (tname[0] == 's')
749 	{
750 	  if ((tname[1] == 'e'
751 	       && (! strcmp (tname, "setjmp")
752 		   || ! strcmp (tname, "setjmp_syscall")))
753 	      || (tname[1] == 'i'
754 		  && ! strcmp (tname, "sigsetjmp"))
755 	      || (tname[1] == 'a'
756 		  && ! strcmp (tname, "savectx")))
757 	    flags |= ECF_RETURNS_TWICE;
758 
759 	  if (tname[1] == 'i'
760 	      && ! strcmp (tname, "siglongjmp"))
761 	    flags |= ECF_LONGJMP;
762 	}
763       else if ((tname[0] == 'q' && tname[1] == 's'
764 		&& ! strcmp (tname, "qsetjmp"))
765 	       || (tname[0] == 'v' && tname[1] == 'f'
766 		   && ! strcmp (tname, "vfork")))
767 	flags |= ECF_RETURNS_TWICE;
768 
769       else if (tname[0] == 'l' && tname[1] == 'o'
770 	       && ! strcmp (tname, "longjmp"))
771 	flags |= ECF_LONGJMP;
772 
773       else if ((tname[0] == 'f' && tname[1] == 'o'
774 		&& ! strcmp (tname, "fork"))
775 	       /* Linux specific: __clone.  check NAME to insist on the
776 		  leading underscores, to avoid polluting the ISO / POSIX
777 		  namespace.  */
778 	       || (name[0] == '_' && name[1] == '_'
779 		   && ! strcmp (tname, "clone"))
780 	       || (tname[0] == 'e' && tname[1] == 'x' && tname[2] == 'e'
781 		   && tname[3] == 'c' && (tname[4] == 'l' || tname[4] == 'v')
782 		   && (tname[5] == '\0'
783 		       || ((tname[5] == 'p' || tname[5] == 'e')
784 			   && tname[6] == '\0'))))
785 	flags |= ECF_FORK_OR_EXEC;
786 
787       /* Do not add any more malloc-like functions to this list,
788          instead mark them as malloc functions using the malloc attribute.
789          Note, realloc is not suitable for attribute malloc since
790          it may return the same address across multiple calls.
791          C++ operator new is not suitable because it is not required
792          to return a unique pointer; indeed, the standard placement new
793 	 just returns its argument.  */
794       else if (TYPE_MODE (TREE_TYPE (TREE_TYPE (fndecl))) == Pmode
795 	       && (! strcmp (tname, "malloc")
796 		   || ! strcmp (tname, "calloc")
797 		   || ! strcmp (tname, "strdup")))
798 	flags |= ECF_MALLOC;
799     }
800   return flags;
801 }
802 
803 /* Return nonzero when tree represent call to longjmp.  */
804 
805 int
setjmp_call_p(fndecl)806 setjmp_call_p (fndecl)
807      tree fndecl;
808 {
809   return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
810 }
811 
812 /* Return true when exp contains alloca call.  */
813 bool
alloca_call_p(exp)814 alloca_call_p (exp)
815      tree exp;
816 {
817   if (TREE_CODE (exp) == CALL_EXPR
818       && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
819       && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
820 	  == FUNCTION_DECL)
821       && (special_function_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
822 			      0) & ECF_MAY_BE_ALLOCA))
823     return true;
824   return false;
825 }
826 
827 /* Detect flags (function attributes) from the function decl or type node.  */
828 
829 static int
flags_from_decl_or_type(exp)830 flags_from_decl_or_type (exp)
831      tree exp;
832 {
833   int flags = 0;
834   tree type = exp;
835   /* ??? We can't set IS_MALLOC for function types?  */
836   if (DECL_P (exp))
837     {
838       type = TREE_TYPE (exp);
839 
840       /* The function exp may have the `malloc' attribute.  */
841       if (DECL_P (exp) && DECL_IS_MALLOC (exp))
842 	flags |= ECF_MALLOC;
843 
844       /* The function exp may have the `pure' attribute.  */
845       if (DECL_P (exp) && DECL_IS_PURE (exp))
846 	flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
847 
848       if (TREE_NOTHROW (exp))
849 	flags |= ECF_NOTHROW;
850 
851       if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
852 	flags |= ECF_LIBCALL_BLOCK;
853     }
854 
855   if (TREE_READONLY (exp) && ! TREE_THIS_VOLATILE (exp))
856     flags |= ECF_CONST;
857 
858   if (TREE_THIS_VOLATILE (exp))
859     flags |= ECF_NORETURN;
860 
861   /* Mark if the function returns with the stack pointer depressed.   We
862      cannot consider it pure or constant in that case.  */
863   if (TREE_CODE (type) == FUNCTION_TYPE && TYPE_RETURNS_STACK_DEPRESSED (type))
864     {
865       flags |= ECF_SP_DEPRESSED;
866       flags &= ~(ECF_PURE | ECF_CONST | ECF_LIBCALL_BLOCK);
867     }
868 
869   return flags;
870 }
871 
872 /* Precompute all register parameters as described by ARGS, storing values
873    into fields within the ARGS array.
874 
875    NUM_ACTUALS indicates the total number elements in the ARGS array.
876 
877    Set REG_PARM_SEEN if we encounter a register parameter.  */
878 
879 static void
precompute_register_parameters(num_actuals,args,reg_parm_seen)880 precompute_register_parameters (num_actuals, args, reg_parm_seen)
881      int num_actuals;
882      struct arg_data *args;
883      int *reg_parm_seen;
884 {
885   int i;
886 
887   *reg_parm_seen = 0;
888 
889   for (i = 0; i < num_actuals; i++)
890     if (args[i].reg != 0 && ! args[i].pass_on_stack)
891       {
892 	*reg_parm_seen = 1;
893 
894 	if (args[i].value == 0)
895 	  {
896 	    push_temp_slots ();
897 	    args[i].value = expand_expr (args[i].tree_value, NULL_RTX,
898 					 VOIDmode, 0);
899 	    preserve_temp_slots (args[i].value);
900 	    pop_temp_slots ();
901 
902 	    /* ANSI doesn't require a sequence point here,
903 	       but PCC has one, so this will avoid some problems.  */
904 	    emit_queue ();
905 	  }
906 
907 	/* If the value is a non-legitimate constant, force it into a
908 	   pseudo now.  TLS symbols sometimes need a call to resolve.  */
909 	if (CONSTANT_P (args[i].value)
910 	    && !LEGITIMATE_CONSTANT_P (args[i].value))
911 	  args[i].value = force_reg (args[i].mode, args[i].value);
912 
913 	/* If we are to promote the function arg to a wider mode,
914 	   do it now.  */
915 
916 	if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
917 	  args[i].value
918 	    = convert_modes (args[i].mode,
919 			     TYPE_MODE (TREE_TYPE (args[i].tree_value)),
920 			     args[i].value, args[i].unsignedp);
921 
922 	/* If the value is expensive, and we are inside an appropriately
923 	   short loop, put the value into a pseudo and then put the pseudo
924 	   into the hard reg.
925 
926 	   For small register classes, also do this if this call uses
927 	   register parameters.  This is to avoid reload conflicts while
928 	   loading the parameters registers.  */
929 
930 	if ((! (GET_CODE (args[i].value) == REG
931 		|| (GET_CODE (args[i].value) == SUBREG
932 		    && GET_CODE (SUBREG_REG (args[i].value)) == REG)))
933 	    && args[i].mode != BLKmode
934 	    && rtx_cost (args[i].value, SET) > COSTS_N_INSNS (1)
935 	    && ((SMALL_REGISTER_CLASSES && *reg_parm_seen)
936 		|| preserve_subexpressions_p ()))
937 	  args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
938       }
939 }
940 
941 #ifdef REG_PARM_STACK_SPACE
942 
943   /* The argument list is the property of the called routine and it
944      may clobber it.  If the fixed area has been used for previous
945      parameters, we must save and restore it.  */
946 
947 static rtx
save_fixed_argument_area(reg_parm_stack_space,argblock,low_to_save,high_to_save)948 save_fixed_argument_area (reg_parm_stack_space, argblock,
949 			  low_to_save, high_to_save)
950      int reg_parm_stack_space;
951      rtx argblock;
952      int *low_to_save;
953      int *high_to_save;
954 {
955   int i;
956   rtx save_area = NULL_RTX;
957 
958   /* Compute the boundary of the that needs to be saved, if any.  */
959 #ifdef ARGS_GROW_DOWNWARD
960   for (i = 0; i < reg_parm_stack_space + 1; i++)
961 #else
962   for (i = 0; i < reg_parm_stack_space; i++)
963 #endif
964     {
965       if (i >= highest_outgoing_arg_in_use
966 	  || stack_usage_map[i] == 0)
967 	continue;
968 
969       if (*low_to_save == -1)
970 	*low_to_save = i;
971 
972       *high_to_save = i;
973     }
974 
975   if (*low_to_save >= 0)
976     {
977       int num_to_save = *high_to_save - *low_to_save + 1;
978       enum machine_mode save_mode
979 	= mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
980       rtx stack_area;
981 
982       /* If we don't have the required alignment, must do this in BLKmode.  */
983       if ((*low_to_save & (MIN (GET_MODE_SIZE (save_mode),
984 				BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
985 	save_mode = BLKmode;
986 
987 #ifdef ARGS_GROW_DOWNWARD
988       stack_area
989 	= gen_rtx_MEM (save_mode,
990 		       memory_address (save_mode,
991 				       plus_constant (argblock,
992 						      - *high_to_save)));
993 #else
994       stack_area = gen_rtx_MEM (save_mode,
995 				memory_address (save_mode,
996 						plus_constant (argblock,
997 							       *low_to_save)));
998 #endif
999 
1000       set_mem_align (stack_area, PARM_BOUNDARY);
1001       if (save_mode == BLKmode)
1002 	{
1003 	  save_area = assign_stack_temp (BLKmode, num_to_save, 0);
1004 	  emit_block_move (validize_mem (save_area), stack_area,
1005 			   GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1006 	}
1007       else
1008 	{
1009 	  save_area = gen_reg_rtx (save_mode);
1010 	  emit_move_insn (save_area, stack_area);
1011 	}
1012     }
1013 
1014   return save_area;
1015 }
1016 
1017 static void
restore_fixed_argument_area(save_area,argblock,high_to_save,low_to_save)1018 restore_fixed_argument_area (save_area, argblock, high_to_save, low_to_save)
1019      rtx save_area;
1020      rtx argblock;
1021      int high_to_save;
1022      int low_to_save;
1023 {
1024   enum machine_mode save_mode = GET_MODE (save_area);
1025 #ifdef ARGS_GROW_DOWNWARD
1026   rtx stack_area
1027     = gen_rtx_MEM (save_mode,
1028 		   memory_address (save_mode,
1029 				   plus_constant (argblock,
1030 						  - high_to_save)));
1031 #else
1032   rtx stack_area
1033     = gen_rtx_MEM (save_mode,
1034 		   memory_address (save_mode,
1035 				   plus_constant (argblock,
1036 						  low_to_save)));
1037 #endif
1038 
1039   if (save_mode != BLKmode)
1040     emit_move_insn (stack_area, save_area);
1041   else
1042     emit_block_move (stack_area, validize_mem (save_area),
1043 		     GEN_INT (high_to_save - low_to_save + 1),
1044 		     BLOCK_OP_CALL_PARM);
1045 }
1046 #endif /* REG_PARM_STACK_SPACE */
1047 
1048 /* If any elements in ARGS refer to parameters that are to be passed in
1049    registers, but not in memory, and whose alignment does not permit a
1050    direct copy into registers.  Copy the values into a group of pseudos
1051    which we will later copy into the appropriate hard registers.
1052 
1053    Pseudos for each unaligned argument will be stored into the array
1054    args[argnum].aligned_regs.  The caller is responsible for deallocating
1055    the aligned_regs array if it is nonzero.  */
1056 
1057 static void
store_unaligned_arguments_into_pseudos(args,num_actuals)1058 store_unaligned_arguments_into_pseudos (args, num_actuals)
1059      struct arg_data *args;
1060      int num_actuals;
1061 {
1062   int i, j;
1063 
1064   for (i = 0; i < num_actuals; i++)
1065     if (args[i].reg != 0 && ! args[i].pass_on_stack
1066 	&& args[i].mode == BLKmode
1067 	&& (TYPE_ALIGN (TREE_TYPE (args[i].tree_value))
1068 	    < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1069       {
1070 	int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1071 	int big_endian_correction = 0;
1072 
1073 	args[i].n_aligned_regs
1074 	  = args[i].partial ? args[i].partial
1075 	    : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1076 
1077 	args[i].aligned_regs = (rtx *) xmalloc (sizeof (rtx)
1078 						* args[i].n_aligned_regs);
1079 
1080 	/* Structures smaller than a word are aligned to the least
1081 	   significant byte (to the right).  On a BYTES_BIG_ENDIAN machine,
1082 	   this means we must skip the empty high order bytes when
1083 	   calculating the bit offset.  */
1084 	if (BYTES_BIG_ENDIAN
1085 	    && bytes < UNITS_PER_WORD)
1086 	  big_endian_correction = (BITS_PER_WORD  - (bytes * BITS_PER_UNIT));
1087 
1088 	for (j = 0; j < args[i].n_aligned_regs; j++)
1089 	  {
1090 	    rtx reg = gen_reg_rtx (word_mode);
1091 	    rtx word = operand_subword_force (args[i].value, j, BLKmode);
1092 	    int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1093 
1094 	    args[i].aligned_regs[j] = reg;
1095 
1096 	    /* There is no need to restrict this code to loading items
1097 	       in TYPE_ALIGN sized hunks.  The bitfield instructions can
1098 	       load up entire word sized registers efficiently.
1099 
1100 	       ??? This may not be needed anymore.
1101 	       We use to emit a clobber here but that doesn't let later
1102 	       passes optimize the instructions we emit.  By storing 0 into
1103 	       the register later passes know the first AND to zero out the
1104 	       bitfield being set in the register is unnecessary.  The store
1105 	       of 0 will be deleted as will at least the first AND.  */
1106 
1107 	    emit_move_insn (reg, const0_rtx);
1108 
1109 	    bytes -= bitsize / BITS_PER_UNIT;
1110 	    store_bit_field (reg, bitsize, big_endian_correction, word_mode,
1111 			     extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1112 						word_mode, word_mode,
1113 						BITS_PER_WORD),
1114 			     BITS_PER_WORD);
1115 	  }
1116       }
1117 }
1118 
1119 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1120    ACTPARMS.
1121 
1122    NUM_ACTUALS is the total number of parameters.
1123 
1124    N_NAMED_ARGS is the total number of named arguments.
1125 
1126    FNDECL is the tree code for the target of this call (if known)
1127 
1128    ARGS_SO_FAR holds state needed by the target to know where to place
1129    the next argument.
1130 
1131    REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1132    for arguments which are passed in registers.
1133 
1134    OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1135    and may be modified by this routine.
1136 
1137    OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1138    flags which may may be modified by this routine.  */
1139 
1140 static void
initialize_argument_information(num_actuals,args,args_size,n_named_args,actparms,fndecl,args_so_far,reg_parm_stack_space,old_stack_level,old_pending_adj,must_preallocate,ecf_flags)1141 initialize_argument_information (num_actuals, args, args_size, n_named_args,
1142 				 actparms, fndecl, args_so_far,
1143 				 reg_parm_stack_space, old_stack_level,
1144 				 old_pending_adj, must_preallocate,
1145 				 ecf_flags)
1146      int num_actuals ATTRIBUTE_UNUSED;
1147      struct arg_data *args;
1148      struct args_size *args_size;
1149      int n_named_args ATTRIBUTE_UNUSED;
1150      tree actparms;
1151      tree fndecl;
1152      CUMULATIVE_ARGS *args_so_far;
1153      int reg_parm_stack_space;
1154      rtx *old_stack_level;
1155      int *old_pending_adj;
1156      int *must_preallocate;
1157      int *ecf_flags;
1158 {
1159   /* 1 if scanning parms front to back, -1 if scanning back to front.  */
1160   int inc;
1161 
1162   /* Count arg position in order args appear.  */
1163   int argpos;
1164 
1165   struct args_size alignment_pad;
1166   int i;
1167   tree p;
1168 
1169   args_size->constant = 0;
1170   args_size->var = 0;
1171 
1172   /* In this loop, we consider args in the order they are written.
1173      We fill up ARGS from the front or from the back if necessary
1174      so that in any case the first arg to be pushed ends up at the front.  */
1175 
1176   if (PUSH_ARGS_REVERSED)
1177     {
1178       i = num_actuals - 1, inc = -1;
1179       /* In this case, must reverse order of args
1180 	 so that we compute and push the last arg first.  */
1181     }
1182   else
1183     {
1184       i = 0, inc = 1;
1185     }
1186 
1187   /* I counts args in order (to be) pushed; ARGPOS counts in order written.  */
1188   for (p = actparms, argpos = 0; p; p = TREE_CHAIN (p), i += inc, argpos++)
1189     {
1190       tree type = TREE_TYPE (TREE_VALUE (p));
1191       int unsignedp;
1192       enum machine_mode mode;
1193 
1194       args[i].tree_value = TREE_VALUE (p);
1195 
1196       /* Replace erroneous argument with constant zero.  */
1197       if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1198 	args[i].tree_value = integer_zero_node, type = integer_type_node;
1199 
1200       /* If TYPE is a transparent union, pass things the way we would
1201 	 pass the first field of the union.  We have already verified that
1202 	 the modes are the same.  */
1203       if (TREE_CODE (type) == UNION_TYPE && TYPE_TRANSPARENT_UNION (type))
1204 	type = TREE_TYPE (TYPE_FIELDS (type));
1205 
1206       /* Decide where to pass this arg.
1207 
1208 	 args[i].reg is nonzero if all or part is passed in registers.
1209 
1210 	 args[i].partial is nonzero if part but not all is passed in registers,
1211 	 and the exact value says how many words are passed in registers.
1212 
1213 	 args[i].pass_on_stack is nonzero if the argument must at least be
1214 	 computed on the stack.  It may then be loaded back into registers
1215 	 if args[i].reg is nonzero.
1216 
1217 	 These decisions are driven by the FUNCTION_... macros and must agree
1218 	 with those made by function.c.  */
1219 
1220       /* See if this argument should be passed by invisible reference.  */
1221       if ((TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1222 	   && contains_placeholder_p (TYPE_SIZE (type)))
1223 	  || TREE_ADDRESSABLE (type)
1224 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
1225 	  || FUNCTION_ARG_PASS_BY_REFERENCE (*args_so_far, TYPE_MODE (type),
1226 					     type, argpos < n_named_args)
1227 #endif
1228 	  )
1229 	{
1230 	  /* If we're compiling a thunk, pass through invisible
1231              references instead of making a copy.  */
1232 	  if (current_function_is_thunk
1233 #ifdef FUNCTION_ARG_CALLEE_COPIES
1234 	      || (FUNCTION_ARG_CALLEE_COPIES (*args_so_far, TYPE_MODE (type),
1235 					     type, argpos < n_named_args)
1236 		  /* If it's in a register, we must make a copy of it too.  */
1237 		  /* ??? Is this a sufficient test?  Is there a better one? */
1238 		  && !(TREE_CODE (args[i].tree_value) == VAR_DECL
1239 		       && REG_P (DECL_RTL (args[i].tree_value)))
1240 		  && ! TREE_ADDRESSABLE (type))
1241 #endif
1242 	      )
1243 	    {
1244 	      /* C++ uses a TARGET_EXPR to indicate that we want to make a
1245 	         new object from the argument.  If we are passing by
1246 	         invisible reference, the callee will do that for us, so we
1247 	         can strip off the TARGET_EXPR.  This is not always safe,
1248 	         but it is safe in the only case where this is a useful
1249 	         optimization; namely, when the argument is a plain object.
1250 	         In that case, the frontend is just asking the backend to
1251 	         make a bitwise copy of the argument.  */
1252 
1253 	      if (TREE_CODE (args[i].tree_value) == TARGET_EXPR
1254 		  && (DECL_P (TREE_OPERAND (args[i].tree_value, 1)))
1255 		  && ! REG_P (DECL_RTL (TREE_OPERAND (args[i].tree_value, 1))))
1256 		args[i].tree_value = TREE_OPERAND (args[i].tree_value, 1);
1257 
1258 	      args[i].tree_value = build1 (ADDR_EXPR,
1259 					   build_pointer_type (type),
1260 					   args[i].tree_value);
1261 	      type = build_pointer_type (type);
1262 	    }
1263 	  else if (TREE_CODE (args[i].tree_value) == TARGET_EXPR)
1264 	    {
1265 	      /* In the V3 C++ ABI, parameters are destroyed in the caller.
1266 		 We implement this by passing the address of the temporary
1267 	         rather than expanding it into another allocated slot.  */
1268 	      args[i].tree_value = build1 (ADDR_EXPR,
1269 					   build_pointer_type (type),
1270 					   args[i].tree_value);
1271 	      type = build_pointer_type (type);
1272 	    }
1273 	  else
1274 	    {
1275 	      /* We make a copy of the object and pass the address to the
1276 		 function being called.  */
1277 	      rtx copy;
1278 
1279 	      if (!COMPLETE_TYPE_P (type)
1280 		  || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
1281 		  || (flag_stack_check && ! STACK_CHECK_BUILTIN
1282 		      && (0 < compare_tree_int (TYPE_SIZE_UNIT (type),
1283 						STACK_CHECK_MAX_VAR_SIZE))))
1284 		{
1285 		  /* This is a variable-sized object.  Make space on the stack
1286 		     for it.  */
1287 		  rtx size_rtx = expr_size (TREE_VALUE (p));
1288 
1289 		  if (*old_stack_level == 0)
1290 		    {
1291 		      emit_stack_save (SAVE_BLOCK, old_stack_level, NULL_RTX);
1292 		      *old_pending_adj = pending_stack_adjust;
1293 		      pending_stack_adjust = 0;
1294 		    }
1295 
1296 		  copy = gen_rtx_MEM (BLKmode,
1297 				      allocate_dynamic_stack_space
1298 				      (size_rtx, NULL_RTX, TYPE_ALIGN (type)));
1299 		  set_mem_attributes (copy, type, 1);
1300 		}
1301 	      else
1302 		copy = assign_temp (type, 0, 1, 0);
1303 
1304 	      store_expr (args[i].tree_value, copy, 0);
1305 	      *ecf_flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
1306 
1307 	      args[i].tree_value = build1 (ADDR_EXPR,
1308 					   build_pointer_type (type),
1309 					   make_tree (type, copy));
1310 	      type = build_pointer_type (type);
1311 	    }
1312 	}
1313 
1314       mode = TYPE_MODE (type);
1315       unsignedp = TREE_UNSIGNED (type);
1316 
1317 #ifdef PROMOTE_FUNCTION_ARGS
1318       mode = promote_mode (type, mode, &unsignedp, 1);
1319 #endif
1320 
1321       args[i].unsignedp = unsignedp;
1322       args[i].mode = mode;
1323 
1324       args[i].reg = FUNCTION_ARG (*args_so_far, mode, type,
1325 				  argpos < n_named_args);
1326 #ifdef FUNCTION_INCOMING_ARG
1327       /* If this is a sibling call and the machine has register windows, the
1328 	 register window has to be unwinded before calling the routine, so
1329 	 arguments have to go into the incoming registers.  */
1330       args[i].tail_call_reg = FUNCTION_INCOMING_ARG (*args_so_far, mode, type,
1331 						     argpos < n_named_args);
1332 #else
1333       args[i].tail_call_reg = args[i].reg;
1334 #endif
1335 
1336 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1337       if (args[i].reg)
1338 	args[i].partial
1339 	  = FUNCTION_ARG_PARTIAL_NREGS (*args_so_far, mode, type,
1340 					argpos < n_named_args);
1341 #endif
1342 
1343       args[i].pass_on_stack = MUST_PASS_IN_STACK (mode, type);
1344 
1345       /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
1346 	 it means that we are to pass this arg in the register(s) designated
1347 	 by the PARALLEL, but also to pass it in the stack.  */
1348       if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
1349 	  && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
1350 	args[i].pass_on_stack = 1;
1351 
1352       /* If this is an addressable type, we must preallocate the stack
1353 	 since we must evaluate the object into its final location.
1354 
1355 	 If this is to be passed in both registers and the stack, it is simpler
1356 	 to preallocate.  */
1357       if (TREE_ADDRESSABLE (type)
1358 	  || (args[i].pass_on_stack && args[i].reg != 0))
1359 	*must_preallocate = 1;
1360 
1361       /* If this is an addressable type, we cannot pre-evaluate it.  Thus,
1362 	 we cannot consider this function call constant.  */
1363       if (TREE_ADDRESSABLE (type))
1364 	*ecf_flags &= ~ECF_LIBCALL_BLOCK;
1365 
1366       /* Compute the stack-size of this argument.  */
1367       if (args[i].reg == 0 || args[i].partial != 0
1368 	  || reg_parm_stack_space > 0
1369 	  || args[i].pass_on_stack)
1370 	locate_and_pad_parm (mode, type,
1371 #ifdef STACK_PARMS_IN_REG_PARM_AREA
1372 			     1,
1373 #else
1374 			     args[i].reg != 0,
1375 #endif
1376 			     fndecl, args_size, &args[i].offset,
1377 			     &args[i].size, &alignment_pad);
1378 
1379 #ifndef ARGS_GROW_DOWNWARD
1380       args[i].slot_offset = *args_size;
1381 #endif
1382 
1383       args[i].alignment_pad = alignment_pad;
1384 
1385       /* If a part of the arg was put into registers,
1386 	 don't include that part in the amount pushed.  */
1387       if (reg_parm_stack_space == 0 && ! args[i].pass_on_stack)
1388 	args[i].size.constant -= ((args[i].partial * UNITS_PER_WORD)
1389 				  / (PARM_BOUNDARY / BITS_PER_UNIT)
1390 				  * (PARM_BOUNDARY / BITS_PER_UNIT));
1391 
1392       /* Update ARGS_SIZE, the total stack space for args so far.  */
1393 
1394       args_size->constant += args[i].size.constant;
1395       if (args[i].size.var)
1396 	{
1397 	  ADD_PARM_SIZE (*args_size, args[i].size.var);
1398 	}
1399 
1400       /* Since the slot offset points to the bottom of the slot,
1401 	 we must record it after incrementing if the args grow down.  */
1402 #ifdef ARGS_GROW_DOWNWARD
1403       args[i].slot_offset = *args_size;
1404 
1405       args[i].slot_offset.constant = -args_size->constant;
1406       if (args_size->var)
1407 	SUB_PARM_SIZE (args[i].slot_offset, args_size->var);
1408 #endif
1409 
1410       /* Increment ARGS_SO_FAR, which has info about which arg-registers
1411 	 have been used, etc.  */
1412 
1413       FUNCTION_ARG_ADVANCE (*args_so_far, TYPE_MODE (type), type,
1414 			    argpos < n_named_args);
1415     }
1416 }
1417 
1418 /* Update ARGS_SIZE to contain the total size for the argument block.
1419    Return the original constant component of the argument block's size.
1420 
1421    REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
1422    for arguments passed in registers.  */
1423 
1424 static int
compute_argument_block_size(reg_parm_stack_space,args_size,preferred_stack_boundary)1425 compute_argument_block_size (reg_parm_stack_space, args_size,
1426 			     preferred_stack_boundary)
1427      int reg_parm_stack_space;
1428      struct args_size *args_size;
1429      int preferred_stack_boundary ATTRIBUTE_UNUSED;
1430 {
1431   int unadjusted_args_size = args_size->constant;
1432 
1433   /* For accumulate outgoing args mode we don't need to align, since the frame
1434      will be already aligned.  Align to STACK_BOUNDARY in order to prevent
1435      backends from generating misaligned frame sizes.  */
1436   if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
1437     preferred_stack_boundary = STACK_BOUNDARY;
1438 
1439   /* Compute the actual size of the argument block required.  The variable
1440      and constant sizes must be combined, the size may have to be rounded,
1441      and there may be a minimum required size.  */
1442 
1443   if (args_size->var)
1444     {
1445       args_size->var = ARGS_SIZE_TREE (*args_size);
1446       args_size->constant = 0;
1447 
1448       preferred_stack_boundary /= BITS_PER_UNIT;
1449       if (preferred_stack_boundary > 1)
1450 	{
1451 	  /* We don't handle this case yet.  To handle it correctly we have
1452 	     to add the delta, round and subtract the delta.
1453 	     Currently no machine description requires this support.  */
1454 	  if (stack_pointer_delta & (preferred_stack_boundary - 1))
1455 	    abort ();
1456 	  args_size->var = round_up (args_size->var, preferred_stack_boundary);
1457 	}
1458 
1459       if (reg_parm_stack_space > 0)
1460 	{
1461 	  args_size->var
1462 	    = size_binop (MAX_EXPR, args_size->var,
1463 			  ssize_int (reg_parm_stack_space));
1464 
1465 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1466 	  /* The area corresponding to register parameters is not to count in
1467 	     the size of the block we need.  So make the adjustment.  */
1468 	  args_size->var
1469 	    = size_binop (MINUS_EXPR, args_size->var,
1470 			  ssize_int (reg_parm_stack_space));
1471 #endif
1472 	}
1473     }
1474   else
1475     {
1476       preferred_stack_boundary /= BITS_PER_UNIT;
1477       if (preferred_stack_boundary < 1)
1478 	preferred_stack_boundary = 1;
1479       args_size->constant = (((args_size->constant
1480 			       + stack_pointer_delta
1481 			       + preferred_stack_boundary - 1)
1482 			      / preferred_stack_boundary
1483 			      * preferred_stack_boundary)
1484 			     - stack_pointer_delta);
1485 
1486       args_size->constant = MAX (args_size->constant,
1487 				 reg_parm_stack_space);
1488 
1489 #ifdef MAYBE_REG_PARM_STACK_SPACE
1490       if (reg_parm_stack_space == 0)
1491 	args_size->constant = 0;
1492 #endif
1493 
1494 #ifndef OUTGOING_REG_PARM_STACK_SPACE
1495       args_size->constant -= reg_parm_stack_space;
1496 #endif
1497     }
1498   return unadjusted_args_size;
1499 }
1500 
1501 /* Precompute parameters as needed for a function call.
1502 
1503    FLAGS is mask of ECF_* constants.
1504 
1505    NUM_ACTUALS is the number of arguments.
1506 
1507    ARGS is an array containing information for each argument; this
1508    routine fills in the INITIAL_VALUE and VALUE fields for each
1509    precomputed argument.  */
1510 
1511 static void
precompute_arguments(flags,num_actuals,args)1512 precompute_arguments (flags, num_actuals, args)
1513      int flags;
1514      int num_actuals;
1515      struct arg_data *args;
1516 {
1517   int i;
1518 
1519   /* If this function call is cse'able, precompute all the parameters.
1520      Note that if the parameter is constructed into a temporary, this will
1521      cause an additional copy because the parameter will be constructed
1522      into a temporary location and then copied into the outgoing arguments.
1523      If a parameter contains a call to alloca and this function uses the
1524      stack, precompute the parameter.  */
1525 
1526   /* If we preallocated the stack space, and some arguments must be passed
1527      on the stack, then we must precompute any parameter which contains a
1528      function call which will store arguments on the stack.
1529      Otherwise, evaluating the parameter may clobber previous parameters
1530      which have already been stored into the stack.  (we have code to avoid
1531      such case by saving the outgoing stack arguments, but it results in
1532      worse code)  */
1533 
1534   for (i = 0; i < num_actuals; i++)
1535     if ((flags & ECF_LIBCALL_BLOCK)
1536 	|| calls_function (args[i].tree_value, !ACCUMULATE_OUTGOING_ARGS))
1537       {
1538 	enum machine_mode mode;
1539 
1540 	/* If this is an addressable type, we cannot pre-evaluate it.  */
1541 	if (TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value)))
1542 	  abort ();
1543 
1544 	args[i].value
1545 	  = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0);
1546 
1547 	/* ANSI doesn't require a sequence point here,
1548 	   but PCC has one, so this will avoid some problems.  */
1549 	emit_queue ();
1550 
1551 	args[i].initial_value = args[i].value
1552 	  = protect_from_queue (args[i].value, 0);
1553 
1554 	mode = TYPE_MODE (TREE_TYPE (args[i].tree_value));
1555 	if (mode != args[i].mode)
1556 	  {
1557 	    args[i].value
1558 	      = convert_modes (args[i].mode, mode,
1559 			       args[i].value, args[i].unsignedp);
1560 #ifdef PROMOTE_FOR_CALL_ONLY
1561 	    /* CSE will replace this only if it contains args[i].value
1562 	       pseudo, so convert it down to the declared mode using
1563 	       a SUBREG.  */
1564 	    if (GET_CODE (args[i].value) == REG
1565 		&& GET_MODE_CLASS (args[i].mode) == MODE_INT)
1566 	      {
1567 		args[i].initial_value
1568 		  = gen_lowpart_SUBREG (mode, args[i].value);
1569 		SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
1570 		SUBREG_PROMOTED_UNSIGNED_SET (args[i].initial_value,
1571 		  args[i].unsignedp);
1572 	      }
1573 #endif
1574 	  }
1575       }
1576 }
1577 
1578 /* Given the current state of MUST_PREALLOCATE and information about
1579    arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
1580    compute and return the final value for MUST_PREALLOCATE.  */
1581 
1582 static int
finalize_must_preallocate(must_preallocate,num_actuals,args,args_size)1583 finalize_must_preallocate (must_preallocate, num_actuals, args, args_size)
1584      int must_preallocate;
1585      int num_actuals;
1586      struct arg_data *args;
1587      struct args_size *args_size;
1588 {
1589   /* See if we have or want to preallocate stack space.
1590 
1591      If we would have to push a partially-in-regs parm
1592      before other stack parms, preallocate stack space instead.
1593 
1594      If the size of some parm is not a multiple of the required stack
1595      alignment, we must preallocate.
1596 
1597      If the total size of arguments that would otherwise create a copy in
1598      a temporary (such as a CALL) is more than half the total argument list
1599      size, preallocation is faster.
1600 
1601      Another reason to preallocate is if we have a machine (like the m88k)
1602      where stack alignment is required to be maintained between every
1603      pair of insns, not just when the call is made.  However, we assume here
1604      that such machines either do not have push insns (and hence preallocation
1605      would occur anyway) or the problem is taken care of with
1606      PUSH_ROUNDING.  */
1607 
1608   if (! must_preallocate)
1609     {
1610       int partial_seen = 0;
1611       int copy_to_evaluate_size = 0;
1612       int i;
1613 
1614       for (i = 0; i < num_actuals && ! must_preallocate; i++)
1615 	{
1616 	  if (args[i].partial > 0 && ! args[i].pass_on_stack)
1617 	    partial_seen = 1;
1618 	  else if (partial_seen && args[i].reg == 0)
1619 	    must_preallocate = 1;
1620 
1621 	  if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1622 	      && (TREE_CODE (args[i].tree_value) == CALL_EXPR
1623 		  || TREE_CODE (args[i].tree_value) == TARGET_EXPR
1624 		  || TREE_CODE (args[i].tree_value) == COND_EXPR
1625 		  || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
1626 	    copy_to_evaluate_size
1627 	      += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1628 	}
1629 
1630       if (copy_to_evaluate_size * 2 >= args_size->constant
1631 	  && args_size->constant > 0)
1632 	must_preallocate = 1;
1633     }
1634   return must_preallocate;
1635 }
1636 
1637 /* If we preallocated stack space, compute the address of each argument
1638    and store it into the ARGS array.
1639 
1640    We need not ensure it is a valid memory address here; it will be
1641    validized when it is used.
1642 
1643    ARGBLOCK is an rtx for the address of the outgoing arguments.  */
1644 
1645 static void
compute_argument_addresses(args,argblock,num_actuals)1646 compute_argument_addresses (args, argblock, num_actuals)
1647      struct arg_data *args;
1648      rtx argblock;
1649      int num_actuals;
1650 {
1651   if (argblock)
1652     {
1653       rtx arg_reg = argblock;
1654       int i, arg_offset = 0;
1655 
1656       if (GET_CODE (argblock) == PLUS)
1657 	arg_reg = XEXP (argblock, 0), arg_offset = INTVAL (XEXP (argblock, 1));
1658 
1659       for (i = 0; i < num_actuals; i++)
1660 	{
1661 	  rtx offset = ARGS_SIZE_RTX (args[i].offset);
1662 	  rtx slot_offset = ARGS_SIZE_RTX (args[i].slot_offset);
1663 	  rtx addr;
1664 
1665 	  /* Skip this parm if it will not be passed on the stack.  */
1666 	  if (! args[i].pass_on_stack && args[i].reg != 0)
1667 	    continue;
1668 
1669 	  if (GET_CODE (offset) == CONST_INT)
1670 	    addr = plus_constant (arg_reg, INTVAL (offset));
1671 	  else
1672 	    addr = gen_rtx_PLUS (Pmode, arg_reg, offset);
1673 
1674 	  addr = plus_constant (addr, arg_offset);
1675 	  args[i].stack = gen_rtx_MEM (args[i].mode, addr);
1676 	  set_mem_align (args[i].stack, PARM_BOUNDARY);
1677 	  set_mem_attributes (args[i].stack,
1678 			      TREE_TYPE (args[i].tree_value), 1);
1679 
1680 	  if (GET_CODE (slot_offset) == CONST_INT)
1681 	    addr = plus_constant (arg_reg, INTVAL (slot_offset));
1682 	  else
1683 	    addr = gen_rtx_PLUS (Pmode, arg_reg, slot_offset);
1684 
1685 	  addr = plus_constant (addr, arg_offset);
1686 	  args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
1687 	  set_mem_align (args[i].stack_slot, PARM_BOUNDARY);
1688 	  set_mem_attributes (args[i].stack_slot,
1689 			      TREE_TYPE (args[i].tree_value), 1);
1690 
1691 	  /* Function incoming arguments may overlap with sibling call
1692 	     outgoing arguments and we cannot allow reordering of reads
1693 	     from function arguments with stores to outgoing arguments
1694 	     of sibling calls.  */
1695 	  set_mem_alias_set (args[i].stack, 0);
1696 	  set_mem_alias_set (args[i].stack_slot, 0);
1697 	}
1698     }
1699 }
1700 
1701 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
1702    in a call instruction.
1703 
1704    FNDECL is the tree node for the target function.  For an indirect call
1705    FNDECL will be NULL_TREE.
1706 
1707    ADDR is the operand 0 of CALL_EXPR for this call.  */
1708 
1709 static rtx
rtx_for_function_call(fndecl,addr)1710 rtx_for_function_call (fndecl, addr)
1711      tree fndecl;
1712      tree addr;
1713 {
1714   rtx funexp;
1715 
1716   /* Get the function to call, in the form of RTL.  */
1717   if (fndecl)
1718     {
1719       /* If this is the first use of the function, see if we need to
1720 	 make an external definition for it.  */
1721       if (! TREE_USED (fndecl))
1722 	{
1723 	  assemble_external (fndecl);
1724 	  TREE_USED (fndecl) = 1;
1725 	}
1726 
1727       /* Get a SYMBOL_REF rtx for the function address.  */
1728       funexp = XEXP (DECL_RTL (fndecl), 0);
1729     }
1730   else
1731     /* Generate an rtx (probably a pseudo-register) for the address.  */
1732     {
1733       rtx funaddr;
1734       push_temp_slots ();
1735       funaddr = funexp
1736 	= expand_expr (addr, NULL_RTX, VOIDmode, 0);
1737       pop_temp_slots ();	/* FUNEXP can't be BLKmode.  */
1738       emit_queue ();
1739     }
1740   return funexp;
1741 }
1742 
1743 /* Do the register loads required for any wholly-register parms or any
1744    parms which are passed both on the stack and in a register.  Their
1745    expressions were already evaluated.
1746 
1747    Mark all register-parms as living through the call, putting these USE
1748    insns in the CALL_INSN_FUNCTION_USAGE field.  */
1749 
1750 static void
load_register_parameters(args,num_actuals,call_fusage,flags)1751 load_register_parameters (args, num_actuals, call_fusage, flags)
1752      struct arg_data *args;
1753      int num_actuals;
1754      rtx *call_fusage;
1755      int flags;
1756 {
1757   int i, j;
1758 
1759 #ifdef LOAD_ARGS_REVERSED
1760   for (i = num_actuals - 1; i >= 0; i--)
1761 #else
1762   for (i = 0; i < num_actuals; i++)
1763 #endif
1764     {
1765       rtx reg = ((flags & ECF_SIBCALL)
1766 		 ? args[i].tail_call_reg : args[i].reg);
1767       int partial = args[i].partial;
1768       int nregs;
1769 
1770       if (reg)
1771 	{
1772 	  /* Set to non-negative if must move a word at a time, even if just
1773 	     one word (e.g, partial == 1 && mode == DFmode).  Set to -1 if
1774 	     we just use a normal move insn.  This value can be zero if the
1775 	     argument is a zero size structure with no fields.  */
1776 	  nregs = (partial ? partial
1777 		   : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
1778 		      ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value))
1779 			  + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
1780 		      : -1));
1781 
1782 	  /* Handle calls that pass values in multiple non-contiguous
1783 	     locations.  The Irix 6 ABI has examples of this.  */
1784 
1785 	  if (GET_CODE (reg) == PARALLEL)
1786 	    emit_group_load (reg, args[i].value,
1787 			     int_size_in_bytes (TREE_TYPE (args[i].tree_value)));
1788 
1789 	  /* If simple case, just do move.  If normal partial, store_one_arg
1790 	     has already loaded the register for us.  In all other cases,
1791 	     load the register(s) from memory.  */
1792 
1793 	  else if (nregs == -1)
1794 	    emit_move_insn (reg, args[i].value);
1795 
1796 	  /* If we have pre-computed the values to put in the registers in
1797 	     the case of non-aligned structures, copy them in now.  */
1798 
1799 	  else if (args[i].n_aligned_regs != 0)
1800 	    for (j = 0; j < args[i].n_aligned_regs; j++)
1801 	      emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
1802 			      args[i].aligned_regs[j]);
1803 
1804 	  else if (partial == 0 || args[i].pass_on_stack)
1805 	    move_block_to_reg (REGNO (reg),
1806 			       validize_mem (args[i].value), nregs,
1807 			       args[i].mode);
1808 
1809 	  /* Handle calls that pass values in multiple non-contiguous
1810 	     locations.  The Irix 6 ABI has examples of this.  */
1811 	  if (GET_CODE (reg) == PARALLEL)
1812 	    use_group_regs (call_fusage, reg);
1813 	  else if (nregs == -1)
1814 	    use_reg (call_fusage, reg);
1815 	  else
1816 	    use_regs (call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs);
1817 	}
1818     }
1819 }
1820 
1821 /* Try to integrate function.  See expand_inline_function for documentation
1822    about the parameters.  */
1823 
1824 static rtx
try_to_integrate(fndecl,actparms,target,ignore,type,structure_value_addr)1825 try_to_integrate (fndecl, actparms, target, ignore, type, structure_value_addr)
1826      tree fndecl;
1827      tree actparms;
1828      rtx target;
1829      int ignore;
1830      tree type;
1831      rtx structure_value_addr;
1832 {
1833   rtx temp;
1834   rtx before_call;
1835   int i;
1836   rtx old_stack_level = 0;
1837   int reg_parm_stack_space = 0;
1838 
1839 #ifdef REG_PARM_STACK_SPACE
1840 #ifdef MAYBE_REG_PARM_STACK_SPACE
1841   reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
1842 #else
1843   reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
1844 #endif
1845 #endif
1846 
1847   before_call = get_last_insn ();
1848 
1849   timevar_push (TV_INTEGRATION);
1850 
1851   temp = expand_inline_function (fndecl, actparms, target,
1852 				 ignore, type,
1853 				 structure_value_addr);
1854 
1855   timevar_pop (TV_INTEGRATION);
1856 
1857   /* If inlining succeeded, return.  */
1858   if (temp != (rtx) (size_t) - 1)
1859     {
1860       if (ACCUMULATE_OUTGOING_ARGS)
1861 	{
1862 	  /* If the outgoing argument list must be preserved, push
1863 	     the stack before executing the inlined function if it
1864 	     makes any calls.  */
1865 
1866 	  for (i = reg_parm_stack_space - 1; i >= 0; i--)
1867 	    if (i < highest_outgoing_arg_in_use && stack_usage_map[i] != 0)
1868 	      break;
1869 
1870 	  if (stack_arg_under_construction || i >= 0)
1871 	    {
1872 	      rtx first_insn
1873 		= before_call ? NEXT_INSN (before_call) : get_insns ();
1874 	      rtx insn = NULL_RTX, seq;
1875 
1876 	      /* Look for a call in the inline function code.
1877 	         If DECL_SAVED_INSNS (fndecl)->outgoing_args_size is
1878 	         nonzero then there is a call and it is not necessary
1879 	         to scan the insns.  */
1880 
1881 	      if (DECL_SAVED_INSNS (fndecl)->outgoing_args_size == 0)
1882 		for (insn = first_insn; insn; insn = NEXT_INSN (insn))
1883 		  if (GET_CODE (insn) == CALL_INSN)
1884 		    break;
1885 
1886 	      if (insn)
1887 		{
1888 		  /* Reserve enough stack space so that the largest
1889 		     argument list of any function call in the inline
1890 		     function does not overlap the argument list being
1891 		     evaluated.  This is usually an overestimate because
1892 		     allocate_dynamic_stack_space reserves space for an
1893 		     outgoing argument list in addition to the requested
1894 		     space, but there is no way to ask for stack space such
1895 		     that an argument list of a certain length can be
1896 		     safely constructed.
1897 
1898 		     Add the stack space reserved for register arguments, if
1899 		     any, in the inline function.  What is really needed is the
1900 		     largest value of reg_parm_stack_space in the inline
1901 		     function, but that is not available.  Using the current
1902 		     value of reg_parm_stack_space is wrong, but gives
1903 		     correct results on all supported machines.  */
1904 
1905 		  int adjust = (DECL_SAVED_INSNS (fndecl)->outgoing_args_size
1906 				+ reg_parm_stack_space);
1907 
1908 		  start_sequence ();
1909 		  emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
1910 		  allocate_dynamic_stack_space (GEN_INT (adjust),
1911 						NULL_RTX, BITS_PER_UNIT);
1912 		  seq = get_insns ();
1913 		  end_sequence ();
1914 		  emit_insn_before (seq, first_insn);
1915 		  emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
1916 		}
1917 	    }
1918 	}
1919 
1920       /* If the result is equivalent to TARGET, return TARGET to simplify
1921          checks in store_expr.  They can be equivalent but not equal in the
1922          case of a function that returns BLKmode.  */
1923       if (temp != target && rtx_equal_p (temp, target))
1924 	return target;
1925       return temp;
1926     }
1927 
1928   /* If inlining failed, mark FNDECL as needing to be compiled
1929      separately after all.  If function was declared inline,
1930      give a warning.  */
1931   if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
1932       && optimize > 0 && !TREE_ADDRESSABLE (fndecl))
1933     {
1934       warning_with_decl (fndecl, "inlining failed in call to `%s'");
1935       warning ("called from here");
1936     }
1937   (*lang_hooks.mark_addressable) (fndecl);
1938   return (rtx) (size_t) - 1;
1939 }
1940 
1941 /* We need to pop PENDING_STACK_ADJUST bytes.  But, if the arguments
1942    wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
1943    bytes, then we would need to push some additional bytes to pad the
1944    arguments.  So, we compute an adjust to the stack pointer for an
1945    amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
1946    bytes.  Then, when the arguments are pushed the stack will be perfectly
1947    aligned.  ARGS_SIZE->CONSTANT is set to the number of bytes that should
1948    be popped after the call.  Returns the adjustment.  */
1949 
1950 static int
combine_pending_stack_adjustment_and_call(unadjusted_args_size,args_size,preferred_unit_stack_boundary)1951 combine_pending_stack_adjustment_and_call (unadjusted_args_size,
1952 					   args_size,
1953 					   preferred_unit_stack_boundary)
1954      int unadjusted_args_size;
1955      struct args_size *args_size;
1956      int preferred_unit_stack_boundary;
1957 {
1958   /* The number of bytes to pop so that the stack will be
1959      under-aligned by UNADJUSTED_ARGS_SIZE bytes.  */
1960   HOST_WIDE_INT adjustment;
1961   /* The alignment of the stack after the arguments are pushed, if we
1962      just pushed the arguments without adjust the stack here.  */
1963   HOST_WIDE_INT unadjusted_alignment;
1964 
1965   unadjusted_alignment
1966     = ((stack_pointer_delta + unadjusted_args_size)
1967        % preferred_unit_stack_boundary);
1968 
1969   /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
1970      as possible -- leaving just enough left to cancel out the
1971      UNADJUSTED_ALIGNMENT.  In other words, we want to ensure that the
1972      PENDING_STACK_ADJUST is non-negative, and congruent to
1973      -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY.  */
1974 
1975   /* Begin by trying to pop all the bytes.  */
1976   unadjusted_alignment
1977     = (unadjusted_alignment
1978        - (pending_stack_adjust % preferred_unit_stack_boundary));
1979   adjustment = pending_stack_adjust;
1980   /* Push enough additional bytes that the stack will be aligned
1981      after the arguments are pushed.  */
1982   if (preferred_unit_stack_boundary > 1)
1983     {
1984       if (unadjusted_alignment > 0)
1985 	adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
1986       else
1987 	adjustment += unadjusted_alignment;
1988     }
1989 
1990   /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
1991      bytes after the call.  The right number is the entire
1992      PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
1993      by the arguments in the first place.  */
1994   args_size->constant
1995     = pending_stack_adjust - adjustment + unadjusted_args_size;
1996 
1997   return adjustment;
1998 }
1999 
2000 /* Scan X expression if it does not dereference any argument slots
2001    we already clobbered by tail call arguments (as noted in stored_args_map
2002    bitmap).
2003    Return nonzero if X expression dereferences such argument slots,
2004    zero otherwise.  */
2005 
2006 static int
check_sibcall_argument_overlap_1(x)2007 check_sibcall_argument_overlap_1 (x)
2008      rtx x;
2009 {
2010   RTX_CODE code;
2011   int i, j;
2012   unsigned int k;
2013   const char *fmt;
2014 
2015   if (x == NULL_RTX)
2016     return 0;
2017 
2018   code = GET_CODE (x);
2019 
2020   if (code == MEM)
2021     {
2022       if (XEXP (x, 0) == current_function_internal_arg_pointer)
2023 	i = 0;
2024       else if (GET_CODE (XEXP (x, 0)) == PLUS
2025 	       && XEXP (XEXP (x, 0), 0) ==
2026 		  current_function_internal_arg_pointer
2027 	       && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2028 	i = INTVAL (XEXP (XEXP (x, 0), 1));
2029       else
2030 	return 0;
2031 
2032 #ifdef ARGS_GROW_DOWNWARD
2033       i = -i - GET_MODE_SIZE (GET_MODE (x));
2034 #endif
2035 
2036       for (k = 0; k < GET_MODE_SIZE (GET_MODE (x)); k++)
2037 	if (i + k < stored_args_map->n_bits
2038 	    && TEST_BIT (stored_args_map, i + k))
2039 	  return 1;
2040 
2041       return 0;
2042     }
2043 
2044   /* Scan all subexpressions.  */
2045   fmt = GET_RTX_FORMAT (code);
2046   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2047     {
2048       if (*fmt == 'e')
2049 	{
2050 	  if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2051 	    return 1;
2052 	}
2053       else if (*fmt == 'E')
2054 	{
2055 	  for (j = 0; j < XVECLEN (x, i); j++)
2056 	    if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2057 	      return 1;
2058 	}
2059     }
2060   return 0;
2061 }
2062 
2063 /* Scan sequence after INSN if it does not dereference any argument slots
2064    we already clobbered by tail call arguments (as noted in stored_args_map
2065    bitmap).  Add stack slots for ARG to stored_args_map bitmap afterwards.
2066    Return nonzero if sequence after INSN dereferences such argument slots,
2067    zero otherwise.  */
2068 
2069 static int
check_sibcall_argument_overlap(insn,arg)2070 check_sibcall_argument_overlap (insn, arg)
2071      rtx insn;
2072      struct arg_data *arg;
2073 {
2074   int low, high;
2075 
2076   if (insn == NULL_RTX)
2077     insn = get_insns ();
2078   else
2079     insn = NEXT_INSN (insn);
2080 
2081   for (; insn; insn = NEXT_INSN (insn))
2082     if (INSN_P (insn)
2083 	&& check_sibcall_argument_overlap_1 (PATTERN (insn)))
2084       break;
2085 
2086 #ifdef ARGS_GROW_DOWNWARD
2087   low = -arg->slot_offset.constant - arg->size.constant;
2088 #else
2089   low = arg->slot_offset.constant;
2090 #endif
2091 
2092   for (high = low + arg->size.constant; low < high; low++)
2093     SET_BIT (stored_args_map, low);
2094   return insn != NULL_RTX;
2095 }
2096 
2097 static tree
fix_unsafe_tree(t)2098 fix_unsafe_tree (t)
2099      tree t;
2100 {
2101   switch (unsafe_for_reeval (t))
2102     {
2103     case 0: /* Safe.  */
2104       break;
2105 
2106     case 1: /* Mildly unsafe.  */
2107       t = unsave_expr (t);
2108       break;
2109 
2110     case 2: /* Wildly unsafe.  */
2111       {
2112 	tree var = build_decl (VAR_DECL, NULL_TREE,
2113 			       TREE_TYPE (t));
2114 	SET_DECL_RTL (var,
2115 		      expand_expr (t, NULL_RTX, VOIDmode, EXPAND_NORMAL));
2116 	t = var;
2117       }
2118       break;
2119 
2120     default:
2121       abort ();
2122     }
2123   return t;
2124 }
2125 
2126 /* Generate all the code for a function call
2127    and return an rtx for its value.
2128    Store the value in TARGET (specified as an rtx) if convenient.
2129    If the value is stored in TARGET then TARGET is returned.
2130    If IGNORE is nonzero, then we ignore the value of the function call.  */
2131 
2132 rtx
expand_call(exp,target,ignore)2133 expand_call (exp, target, ignore)
2134      tree exp;
2135      rtx target;
2136      int ignore;
2137 {
2138   /* Nonzero if we are currently expanding a call.  */
2139   static int currently_expanding_call = 0;
2140 
2141   /* List of actual parameters.  */
2142   tree actparms = TREE_OPERAND (exp, 1);
2143   /* RTX for the function to be called.  */
2144   rtx funexp;
2145   /* Sequence of insns to perform a tail recursive "call".  */
2146   rtx tail_recursion_insns = NULL_RTX;
2147   /* Sequence of insns to perform a normal "call".  */
2148   rtx normal_call_insns = NULL_RTX;
2149   /* Sequence of insns to perform a tail recursive "call".  */
2150   rtx tail_call_insns = NULL_RTX;
2151   /* Data type of the function.  */
2152   tree funtype;
2153   /* Declaration of the function being called,
2154      or 0 if the function is computed (not known by name).  */
2155   tree fndecl = 0;
2156   rtx insn;
2157   int try_tail_call = 1;
2158   int try_tail_recursion = 1;
2159   int pass;
2160 
2161   /* Register in which non-BLKmode value will be returned,
2162      or 0 if no value or if value is BLKmode.  */
2163   rtx valreg;
2164   /* Address where we should return a BLKmode value;
2165      0 if value not BLKmode.  */
2166   rtx structure_value_addr = 0;
2167   /* Nonzero if that address is being passed by treating it as
2168      an extra, implicit first parameter.  Otherwise,
2169      it is passed by being copied directly into struct_value_rtx.  */
2170   int structure_value_addr_parm = 0;
2171   /* Size of aggregate value wanted, or zero if none wanted
2172      or if we are using the non-reentrant PCC calling convention
2173      or expecting the value in registers.  */
2174   HOST_WIDE_INT struct_value_size = 0;
2175   /* Nonzero if called function returns an aggregate in memory PCC style,
2176      by returning the address of where to find it.  */
2177   int pcc_struct_value = 0;
2178 
2179   /* Number of actual parameters in this call, including struct value addr.  */
2180   int num_actuals;
2181   /* Number of named args.  Args after this are anonymous ones
2182      and they must all go on the stack.  */
2183   int n_named_args;
2184 
2185   /* Vector of information about each argument.
2186      Arguments are numbered in the order they will be pushed,
2187      not the order they are written.  */
2188   struct arg_data *args;
2189 
2190   /* Total size in bytes of all the stack-parms scanned so far.  */
2191   struct args_size args_size;
2192   struct args_size adjusted_args_size;
2193   /* Size of arguments before any adjustments (such as rounding).  */
2194   int unadjusted_args_size;
2195   /* Data on reg parms scanned so far.  */
2196   CUMULATIVE_ARGS args_so_far;
2197   /* Nonzero if a reg parm has been scanned.  */
2198   int reg_parm_seen;
2199   /* Nonzero if this is an indirect function call.  */
2200 
2201   /* Nonzero if we must avoid push-insns in the args for this call.
2202      If stack space is allocated for register parameters, but not by the
2203      caller, then it is preallocated in the fixed part of the stack frame.
2204      So the entire argument block must then be preallocated (i.e., we
2205      ignore PUSH_ROUNDING in that case).  */
2206 
2207   int must_preallocate = !PUSH_ARGS;
2208 
2209   /* Size of the stack reserved for parameter registers.  */
2210   int reg_parm_stack_space = 0;
2211 
2212   /* Address of space preallocated for stack parms
2213      (on machines that lack push insns), or 0 if space not preallocated.  */
2214   rtx argblock = 0;
2215 
2216   /* Mask of ECF_ flags.  */
2217   int flags = 0;
2218   /* Nonzero if this is a call to an inline function.  */
2219   int is_integrable = 0;
2220 #ifdef REG_PARM_STACK_SPACE
2221   /* Define the boundary of the register parm stack space that needs to be
2222      save, if any.  */
2223   int low_to_save = -1, high_to_save;
2224   rtx save_area = 0;		/* Place that it is saved */
2225 #endif
2226 
2227   int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
2228   char *initial_stack_usage_map = stack_usage_map;
2229   int old_stack_arg_under_construction = 0;
2230 
2231   rtx old_stack_level = 0;
2232   int old_pending_adj = 0;
2233   int old_inhibit_defer_pop = inhibit_defer_pop;
2234   int old_stack_allocated;
2235   rtx call_fusage;
2236   tree p = TREE_OPERAND (exp, 0);
2237   tree addr = TREE_OPERAND (exp, 0);
2238   int i;
2239   /* The alignment of the stack, in bits.  */
2240   HOST_WIDE_INT preferred_stack_boundary;
2241   /* The alignment of the stack, in bytes.  */
2242   HOST_WIDE_INT preferred_unit_stack_boundary;
2243 
2244   /* See if this is "nothrow" function call.  */
2245   if (TREE_NOTHROW (exp))
2246     flags |= ECF_NOTHROW;
2247 
2248   /* See if we can find a DECL-node for the actual function.
2249      As a result, decide whether this is a call to an integrable function.  */
2250 
2251   fndecl = get_callee_fndecl (exp);
2252   if (fndecl)
2253     {
2254       if (!flag_no_inline
2255 	  && fndecl != current_function_decl
2256 	  && DECL_INLINE (fndecl)
2257 	  && DECL_SAVED_INSNS (fndecl)
2258 	  && DECL_SAVED_INSNS (fndecl)->inlinable)
2259 	is_integrable = 1;
2260       else if (! TREE_ADDRESSABLE (fndecl))
2261 	{
2262 	  /* In case this function later becomes inlinable,
2263 	     record that there was already a non-inline call to it.
2264 
2265 	     Use abstraction instead of setting TREE_ADDRESSABLE
2266 	     directly.  */
2267 	  if (DECL_INLINE (fndecl) && warn_inline && !flag_no_inline
2268 	      && optimize > 0)
2269 	    {
2270 	      warning_with_decl (fndecl, "can't inline call to `%s'");
2271 	      warning ("called from here");
2272 	    }
2273 	  (*lang_hooks.mark_addressable) (fndecl);
2274 	}
2275 
2276       flags |= flags_from_decl_or_type (fndecl);
2277     }
2278 
2279   /* If we don't have specific function to call, see if we have a
2280      attributes set in the type.  */
2281   else
2282     flags |= flags_from_decl_or_type (TREE_TYPE (TREE_TYPE (p)));
2283 
2284 #ifdef REG_PARM_STACK_SPACE
2285 #ifdef MAYBE_REG_PARM_STACK_SPACE
2286   reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
2287 #else
2288   reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
2289 #endif
2290 #endif
2291 
2292 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2293   if (reg_parm_stack_space > 0 && PUSH_ARGS)
2294     must_preallocate = 1;
2295 #endif
2296 
2297   /* Warn if this value is an aggregate type,
2298      regardless of which calling convention we are using for it.  */
2299   if (warn_aggregate_return && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
2300     warning ("function call has aggregate value");
2301 
2302   /* Set up a place to return a structure.  */
2303 
2304   /* Cater to broken compilers.  */
2305   if (aggregate_value_p (exp))
2306     {
2307       /* This call returns a big structure.  */
2308       flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
2309 
2310 #ifdef PCC_STATIC_STRUCT_RETURN
2311       {
2312 	pcc_struct_value = 1;
2313 	/* Easier than making that case work right.  */
2314 	if (is_integrable)
2315 	  {
2316 	    /* In case this is a static function, note that it has been
2317 	       used.  */
2318 	    if (! TREE_ADDRESSABLE (fndecl))
2319 	      (*lang_hooks.mark_addressable) (fndecl);
2320 	    is_integrable = 0;
2321 	  }
2322       }
2323 #else /* not PCC_STATIC_STRUCT_RETURN */
2324       {
2325 	struct_value_size = int_size_in_bytes (TREE_TYPE (exp));
2326 
2327 	if (target && GET_CODE (target) == MEM)
2328 	  structure_value_addr = XEXP (target, 0);
2329 	else
2330 	  {
2331 	    /* For variable-sized objects, we must be called with a target
2332 	       specified.  If we were to allocate space on the stack here,
2333 	       we would have no way of knowing when to free it.  */
2334 	    rtx d = assign_temp (TREE_TYPE (exp), 5, 1, 1);
2335 
2336 	    mark_temp_addr_taken (d);
2337 	    structure_value_addr = XEXP (d, 0);
2338 	    target = 0;
2339 	  }
2340       }
2341 #endif /* not PCC_STATIC_STRUCT_RETURN */
2342     }
2343 
2344   /* If called function is inline, try to integrate it.  */
2345 
2346   if (is_integrable)
2347     {
2348       rtx temp = try_to_integrate (fndecl, actparms, target,
2349 				   ignore, TREE_TYPE (exp),
2350 				   structure_value_addr);
2351       if (temp != (rtx) (size_t) - 1)
2352 	return temp;
2353     }
2354 
2355   /* Figure out the amount to which the stack should be aligned.  */
2356   preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2357 
2358   /* Operand 0 is a pointer-to-function; get the type of the function.  */
2359   funtype = TREE_TYPE (addr);
2360   if (! POINTER_TYPE_P (funtype))
2361     abort ();
2362   funtype = TREE_TYPE (funtype);
2363 
2364   /* See if this is a call to a function that can return more than once
2365      or a call to longjmp or malloc.  */
2366   flags |= special_function_p (fndecl, flags);
2367 
2368   if (flags & ECF_MAY_BE_ALLOCA)
2369     current_function_calls_alloca = 1;
2370 
2371   /* If struct_value_rtx is 0, it means pass the address
2372      as if it were an extra parameter.  */
2373   if (structure_value_addr && struct_value_rtx == 0)
2374     {
2375       /* If structure_value_addr is a REG other than
2376 	 virtual_outgoing_args_rtx, we can use always use it.  If it
2377 	 is not a REG, we must always copy it into a register.
2378 	 If it is virtual_outgoing_args_rtx, we must copy it to another
2379 	 register in some cases.  */
2380       rtx temp = (GET_CODE (structure_value_addr) != REG
2381 		  || (ACCUMULATE_OUTGOING_ARGS
2382 		      && stack_arg_under_construction
2383 		      && structure_value_addr == virtual_outgoing_args_rtx)
2384 		  ? copy_addr_to_reg (structure_value_addr)
2385 		  : structure_value_addr);
2386 
2387       actparms
2388 	= tree_cons (error_mark_node,
2389 		     make_tree (build_pointer_type (TREE_TYPE (funtype)),
2390 				temp),
2391 		     actparms);
2392       structure_value_addr_parm = 1;
2393     }
2394 
2395   /* Count the arguments and set NUM_ACTUALS.  */
2396   for (p = actparms, num_actuals = 0; p; p = TREE_CHAIN (p))
2397     num_actuals++;
2398 
2399   /* Compute number of named args.
2400      Normally, don't include the last named arg if anonymous args follow.
2401      We do include the last named arg if STRICT_ARGUMENT_NAMING is nonzero.
2402      (If no anonymous args follow, the result of list_length is actually
2403      one too large.  This is harmless.)
2404 
2405      If PRETEND_OUTGOING_VARARGS_NAMED is set and STRICT_ARGUMENT_NAMING is
2406      zero, this machine will be able to place unnamed args that were
2407      passed in registers into the stack.  So treat all args as named.
2408      This allows the insns emitting for a specific argument list to be
2409      independent of the function declaration.
2410 
2411      If PRETEND_OUTGOING_VARARGS_NAMED is not set, we do not have any
2412      reliable way to pass unnamed args in registers, so we must force
2413      them into memory.  */
2414 
2415   if ((STRICT_ARGUMENT_NAMING
2416        || ! PRETEND_OUTGOING_VARARGS_NAMED)
2417       && TYPE_ARG_TYPES (funtype) != 0)
2418     n_named_args
2419       = (list_length (TYPE_ARG_TYPES (funtype))
2420 	 /* Don't include the last named arg.  */
2421 	 - (STRICT_ARGUMENT_NAMING ? 0 : 1)
2422 	 /* Count the struct value address, if it is passed as a parm.  */
2423 	 + structure_value_addr_parm);
2424   else
2425     /* If we know nothing, treat all args as named.  */
2426     n_named_args = num_actuals;
2427 
2428   /* Start updating where the next arg would go.
2429 
2430      On some machines (such as the PA) indirect calls have a different
2431      calling convention than normal calls.  The last argument in
2432      INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
2433      or not.  */
2434   INIT_CUMULATIVE_ARGS (args_so_far, funtype, NULL_RTX, (fndecl == 0));
2435 
2436   /* Make a vector to hold all the information about each arg.  */
2437   args = (struct arg_data *) alloca (num_actuals * sizeof (struct arg_data));
2438   memset ((char *) args, 0, num_actuals * sizeof (struct arg_data));
2439 
2440   /* Build up entries in the ARGS array, compute the size of the
2441      arguments into ARGS_SIZE, etc.  */
2442   initialize_argument_information (num_actuals, args, &args_size,
2443 				   n_named_args, actparms, fndecl,
2444 				   &args_so_far, reg_parm_stack_space,
2445 				   &old_stack_level, &old_pending_adj,
2446 				   &must_preallocate, &flags);
2447 
2448   if (args_size.var)
2449     {
2450       /* If this function requires a variable-sized argument list, don't
2451 	 try to make a cse'able block for this call.  We may be able to
2452 	 do this eventually, but it is too complicated to keep track of
2453 	 what insns go in the cse'able block and which don't.  */
2454 
2455       flags &= ~ECF_LIBCALL_BLOCK;
2456       must_preallocate = 1;
2457     }
2458 
2459   /* Now make final decision about preallocating stack space.  */
2460   must_preallocate = finalize_must_preallocate (must_preallocate,
2461 						num_actuals, args,
2462 						&args_size);
2463 
2464   /* If the structure value address will reference the stack pointer, we
2465      must stabilize it.  We don't need to do this if we know that we are
2466      not going to adjust the stack pointer in processing this call.  */
2467 
2468   if (structure_value_addr
2469       && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
2470 	  || reg_mentioned_p (virtual_outgoing_args_rtx,
2471 			      structure_value_addr))
2472       && (args_size.var
2473 	  || (!ACCUMULATE_OUTGOING_ARGS && args_size.constant)))
2474     structure_value_addr = copy_to_reg (structure_value_addr);
2475 
2476   /* Tail calls can make things harder to debug, and we're traditionally
2477      pushed these optimizations into -O2.  Don't try if we're already
2478      expanding a call, as that means we're an argument.  Don't try if
2479      there's cleanups, as we know there's code to follow the call.
2480 
2481      If rtx_equal_function_value_matters is false, that means we've
2482      finished with regular parsing.  Which means that some of the
2483      machinery we use to generate tail-calls is no longer in place.
2484      This is most often true of sjlj-exceptions, which we couldn't
2485      tail-call to anyway.  */
2486 
2487   if (currently_expanding_call++ != 0
2488       || !flag_optimize_sibling_calls
2489       || !rtx_equal_function_value_matters
2490       || any_pending_cleanups (1)
2491       || args_size.var)
2492     try_tail_call = try_tail_recursion = 0;
2493 
2494   /* Tail recursion fails, when we are not dealing with recursive calls.  */
2495   if (!try_tail_recursion
2496       || TREE_CODE (addr) != ADDR_EXPR
2497       || TREE_OPERAND (addr, 0) != current_function_decl)
2498     try_tail_recursion = 0;
2499 
2500   /*  Rest of purposes for tail call optimizations to fail.  */
2501   if (
2502 #ifdef HAVE_sibcall_epilogue
2503       !HAVE_sibcall_epilogue
2504 #else
2505       1
2506 #endif
2507       || !try_tail_call
2508       /* Doing sibling call optimization needs some work, since
2509 	 structure_value_addr can be allocated on the stack.
2510 	 It does not seem worth the effort since few optimizable
2511 	 sibling calls will return a structure.  */
2512       || structure_value_addr != NULL_RTX
2513       /* If the register holding the address is a callee saved
2514 	 register, then we lose.  We have no way to prevent that,
2515 	 so we only allow calls to named functions.  */
2516       /* ??? This could be done by having the insn constraints
2517 	 use a register class that is all call-clobbered.  Any
2518 	 reload insns generated to fix things up would appear
2519 	 before the sibcall_epilogue.  */
2520       || fndecl == NULL_TREE
2521       || (flags & (ECF_RETURNS_TWICE | ECF_LONGJMP | ECF_NORETURN))
2522       || !FUNCTION_OK_FOR_SIBCALL (fndecl)
2523       /* If this function requires more stack slots than the current
2524 	 function, we cannot change it into a sibling call.  */
2525       || args_size.constant > current_function_args_size
2526       /* If the callee pops its own arguments, then it must pop exactly
2527 	 the same number of arguments as the current function.  */
2528       || (RETURN_POPS_ARGS (fndecl, funtype, args_size.constant)
2529 	  != RETURN_POPS_ARGS (current_function_decl,
2530 			       TREE_TYPE (current_function_decl),
2531 			       current_function_args_size))
2532       || !(*lang_hooks.decls.ok_for_sibcall) (fndecl))
2533     try_tail_call = 0;
2534 
2535   if (try_tail_call || try_tail_recursion)
2536     {
2537       int end, inc;
2538       actparms = NULL_TREE;
2539       /* Ok, we're going to give the tail call the old college try.
2540 	 This means we're going to evaluate the function arguments
2541 	 up to three times.  There are two degrees of badness we can
2542 	 encounter, those that can be unsaved and those that can't.
2543 	 (See unsafe_for_reeval commentary for details.)
2544 
2545 	 Generate a new argument list.  Pass safe arguments through
2546 	 unchanged.  For the easy badness wrap them in UNSAVE_EXPRs.
2547 	 For hard badness, evaluate them now and put their resulting
2548 	 rtx in a temporary VAR_DECL.
2549 
2550 	 initialize_argument_information has ordered the array for the
2551 	 order to be pushed, and we must remember this when reconstructing
2552 	 the original argument order.  */
2553 
2554       if (PUSH_ARGS_REVERSED)
2555 	{
2556 	  inc = 1;
2557 	  i = 0;
2558 	  end = num_actuals;
2559 	}
2560       else
2561 	{
2562 	  inc = -1;
2563 	  i = num_actuals - 1;
2564 	  end = -1;
2565 	}
2566 
2567       for (; i != end; i += inc)
2568 	{
2569           args[i].tree_value = fix_unsafe_tree (args[i].tree_value);
2570 	  /* We need to build actparms for optimize_tail_recursion.  We can
2571 	     safely trash away TREE_PURPOSE, since it is unused by this
2572 	     function.  */
2573 	  if (try_tail_recursion)
2574 	    actparms = tree_cons (NULL_TREE, args[i].tree_value, actparms);
2575 	}
2576       /* Do the same for the function address if it is an expression. */
2577       if (!fndecl)
2578         addr = fix_unsafe_tree (addr);
2579       /* Expanding one of those dangerous arguments could have added
2580 	 cleanups, but otherwise give it a whirl.  */
2581       if (any_pending_cleanups (1))
2582 	try_tail_call = try_tail_recursion = 0;
2583     }
2584 
2585   /* Generate a tail recursion sequence when calling ourselves.  */
2586 
2587   if (try_tail_recursion)
2588     {
2589       /* We want to emit any pending stack adjustments before the tail
2590 	 recursion "call".  That way we know any adjustment after the tail
2591 	 recursion call can be ignored if we indeed use the tail recursion
2592 	 call expansion.  */
2593       int save_pending_stack_adjust = pending_stack_adjust;
2594       int save_stack_pointer_delta = stack_pointer_delta;
2595 
2596       /* Emit any queued insns now; otherwise they would end up in
2597 	 only one of the alternates.  */
2598       emit_queue ();
2599 
2600       /* Use a new sequence to hold any RTL we generate.  We do not even
2601 	 know if we will use this RTL yet.  The final decision can not be
2602 	 made until after RTL generation for the entire function is
2603 	 complete.  */
2604       start_sequence ();
2605       /* If expanding any of the arguments creates cleanups, we can't
2606 	 do a tailcall.  So, we'll need to pop the pending cleanups
2607 	 list.  If, however, all goes well, and there are no cleanups
2608 	 then the call to expand_start_target_temps will have no
2609 	 effect.  */
2610       expand_start_target_temps ();
2611       if (optimize_tail_recursion (actparms, get_last_insn ()))
2612 	{
2613 	  if (any_pending_cleanups (1))
2614 	    try_tail_call = try_tail_recursion = 0;
2615 	  else
2616 	    tail_recursion_insns = get_insns ();
2617 	}
2618       expand_end_target_temps ();
2619       end_sequence ();
2620 
2621       /* Restore the original pending stack adjustment for the sibling and
2622 	 normal call cases below.  */
2623       pending_stack_adjust = save_pending_stack_adjust;
2624       stack_pointer_delta = save_stack_pointer_delta;
2625     }
2626 
2627   if (profile_arc_flag && (flags & ECF_FORK_OR_EXEC))
2628     {
2629       /* A fork duplicates the profile information, and an exec discards
2630 	 it.  We can't rely on fork/exec to be paired.  So write out the
2631 	 profile information we have gathered so far, and clear it.  */
2632       /* ??? When Linux's __clone is called with CLONE_VM set, profiling
2633 	 is subject to race conditions, just as with multithreaded
2634 	 programs.  */
2635 
2636       emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_fork_func"),
2637 		      	 LCT_ALWAYS_RETURN,
2638 			 VOIDmode, 0);
2639     }
2640 
2641   /* Ensure current function's preferred stack boundary is at least
2642      what we need.  We don't have to increase alignment for recursive
2643      functions.  */
2644   if (cfun->preferred_stack_boundary < preferred_stack_boundary
2645       && fndecl != current_function_decl)
2646     cfun->preferred_stack_boundary = preferred_stack_boundary;
2647 
2648   preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
2649 
2650   function_call_count++;
2651 
2652   /* We want to make two insn chains; one for a sibling call, the other
2653      for a normal call.  We will select one of the two chains after
2654      initial RTL generation is complete.  */
2655   for (pass = 0; pass < 2; pass++)
2656     {
2657       int sibcall_failure = 0;
2658       /* We want to emit any pending stack adjustments before the tail
2659 	 recursion "call".  That way we know any adjustment after the tail
2660 	 recursion call can be ignored if we indeed use the tail recursion
2661 	 call expansion.  */
2662       int save_pending_stack_adjust = 0;
2663       int save_stack_pointer_delta = 0;
2664       rtx insns;
2665       rtx before_call, next_arg_reg;
2666 
2667       if (pass == 0)
2668 	{
2669 	  if (! try_tail_call)
2670 	    continue;
2671 
2672 	  /* Emit any queued insns now; otherwise they would end up in
2673              only one of the alternates.  */
2674 	  emit_queue ();
2675 
2676 	  /* State variables we need to save and restore between
2677 	     iterations.  */
2678 	  save_pending_stack_adjust = pending_stack_adjust;
2679 	  save_stack_pointer_delta = stack_pointer_delta;
2680 	}
2681       if (pass)
2682 	flags &= ~ECF_SIBCALL;
2683       else
2684 	flags |= ECF_SIBCALL;
2685 
2686       /* Other state variables that we must reinitialize each time
2687 	 through the loop (that are not initialized by the loop itself).  */
2688       argblock = 0;
2689       call_fusage = 0;
2690 
2691       /* Start a new sequence for the normal call case.
2692 
2693 	 From this point on, if the sibling call fails, we want to set
2694 	 sibcall_failure instead of continuing the loop.  */
2695       start_sequence ();
2696 
2697       if (pass == 0)
2698 	{
2699 	  /* We know at this point that there are not currently any
2700 	     pending cleanups.  If, however, in the process of evaluating
2701 	     the arguments we were to create some, we'll need to be
2702 	     able to get rid of them.  */
2703 	  expand_start_target_temps ();
2704 	}
2705 
2706       /* Don't let pending stack adjusts add up to too much.
2707 	 Also, do all pending adjustments now if there is any chance
2708 	 this might be a call to alloca or if we are expanding a sibling
2709 	 call sequence or if we are calling a function that is to return
2710 	 with stack pointer depressed.  */
2711       if (pending_stack_adjust >= 32
2712 	  || (pending_stack_adjust > 0
2713 	      && (flags & (ECF_MAY_BE_ALLOCA | ECF_SP_DEPRESSED)))
2714 	  || pass == 0)
2715 	do_pending_stack_adjust ();
2716 
2717       /* When calling a const function, we must pop the stack args right away,
2718 	 so that the pop is deleted or moved with the call.  */
2719       if (pass && (flags & ECF_LIBCALL_BLOCK))
2720 	NO_DEFER_POP;
2721 
2722 #ifdef FINAL_REG_PARM_STACK_SPACE
2723       reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
2724 							 args_size.var);
2725 #endif
2726       /* Precompute any arguments as needed.  */
2727       if (pass)
2728 	precompute_arguments (flags, num_actuals, args);
2729 
2730       /* Now we are about to start emitting insns that can be deleted
2731 	 if a libcall is deleted.  */
2732       if (pass && (flags & (ECF_LIBCALL_BLOCK | ECF_MALLOC)))
2733 	start_sequence ();
2734 
2735       adjusted_args_size = args_size;
2736       /* Compute the actual size of the argument block required.  The variable
2737 	 and constant sizes must be combined, the size may have to be rounded,
2738 	 and there may be a minimum required size.  When generating a sibcall
2739 	 pattern, do not round up, since we'll be re-using whatever space our
2740 	 caller provided.  */
2741       unadjusted_args_size
2742 	= compute_argument_block_size (reg_parm_stack_space,
2743 				       &adjusted_args_size,
2744 				       (pass == 0 ? 0
2745 					: preferred_stack_boundary));
2746 
2747       old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
2748 
2749       /* The argument block when performing a sibling call is the
2750          incoming argument block.  */
2751       if (pass == 0)
2752 	{
2753 	  argblock = virtual_incoming_args_rtx;
2754 	  argblock
2755 #ifdef STACK_GROWS_DOWNWARD
2756 	    = plus_constant (argblock, current_function_pretend_args_size);
2757 #else
2758 	    = plus_constant (argblock, -current_function_pretend_args_size);
2759 #endif
2760 	  stored_args_map = sbitmap_alloc (args_size.constant);
2761 	  sbitmap_zero (stored_args_map);
2762 	}
2763 
2764       /* If we have no actual push instructions, or shouldn't use them,
2765 	 make space for all args right now.  */
2766       else if (adjusted_args_size.var != 0)
2767 	{
2768 	  if (old_stack_level == 0)
2769 	    {
2770 	      emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
2771 	      old_pending_adj = pending_stack_adjust;
2772 	      pending_stack_adjust = 0;
2773 	      /* stack_arg_under_construction says whether a stack arg is
2774 		 being constructed at the old stack level.  Pushing the stack
2775 		 gets a clean outgoing argument block.  */
2776 	      old_stack_arg_under_construction = stack_arg_under_construction;
2777 	      stack_arg_under_construction = 0;
2778 	    }
2779 	  argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
2780 	}
2781       else
2782 	{
2783 	  /* Note that we must go through the motions of allocating an argument
2784 	     block even if the size is zero because we may be storing args
2785 	     in the area reserved for register arguments, which may be part of
2786 	     the stack frame.  */
2787 
2788 	  int needed = adjusted_args_size.constant;
2789 
2790 	  /* Store the maximum argument space used.  It will be pushed by
2791 	     the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
2792 	     checking).  */
2793 
2794 	  if (needed > current_function_outgoing_args_size)
2795 	    current_function_outgoing_args_size = needed;
2796 
2797 	  if (must_preallocate)
2798 	    {
2799 	      if (ACCUMULATE_OUTGOING_ARGS)
2800 		{
2801 		  /* Since the stack pointer will never be pushed, it is
2802 		     possible for the evaluation of a parm to clobber
2803 		     something we have already written to the stack.
2804 		     Since most function calls on RISC machines do not use
2805 		     the stack, this is uncommon, but must work correctly.
2806 
2807 		     Therefore, we save any area of the stack that was already
2808 		     written and that we are using.  Here we set up to do this
2809 		     by making a new stack usage map from the old one.  The
2810 		     actual save will be done by store_one_arg.
2811 
2812 		     Another approach might be to try to reorder the argument
2813 		     evaluations to avoid this conflicting stack usage.  */
2814 
2815 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2816 		  /* Since we will be writing into the entire argument area,
2817 		     the map must be allocated for its entire size, not just
2818 		     the part that is the responsibility of the caller.  */
2819 		  needed += reg_parm_stack_space;
2820 #endif
2821 
2822 #ifdef ARGS_GROW_DOWNWARD
2823 		  highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2824 						     needed + 1);
2825 #else
2826 		  highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
2827 						     needed);
2828 #endif
2829 		  stack_usage_map
2830 		    = (char *) alloca (highest_outgoing_arg_in_use);
2831 
2832 		  if (initial_highest_arg_in_use)
2833 		    memcpy (stack_usage_map, initial_stack_usage_map,
2834 			    initial_highest_arg_in_use);
2835 
2836 		  if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
2837 		    memset (&stack_usage_map[initial_highest_arg_in_use], 0,
2838 			   (highest_outgoing_arg_in_use
2839 			    - initial_highest_arg_in_use));
2840 		  needed = 0;
2841 
2842 		  /* The address of the outgoing argument list must not be
2843 		     copied to a register here, because argblock would be left
2844 		     pointing to the wrong place after the call to
2845 		     allocate_dynamic_stack_space below.  */
2846 
2847 		  argblock = virtual_outgoing_args_rtx;
2848 		}
2849 	      else
2850 		{
2851 		  if (inhibit_defer_pop == 0)
2852 		    {
2853 		      /* Try to reuse some or all of the pending_stack_adjust
2854 			 to get this space.  */
2855 		      needed
2856 			= (combine_pending_stack_adjustment_and_call
2857 			   (unadjusted_args_size,
2858 			    &adjusted_args_size,
2859 			    preferred_unit_stack_boundary));
2860 
2861 		      /* combine_pending_stack_adjustment_and_call computes
2862 			 an adjustment before the arguments are allocated.
2863 			 Account for them and see whether or not the stack
2864 			 needs to go up or down.  */
2865 		      needed = unadjusted_args_size - needed;
2866 
2867 		      if (needed < 0)
2868 			{
2869 			  /* We're releasing stack space.  */
2870 			  /* ??? We can avoid any adjustment at all if we're
2871 			     already aligned.  FIXME.  */
2872 			  pending_stack_adjust = -needed;
2873 			  do_pending_stack_adjust ();
2874 			  needed = 0;
2875 			}
2876 		      else
2877 			/* We need to allocate space.  We'll do that in
2878 			   push_block below.  */
2879 			pending_stack_adjust = 0;
2880 		    }
2881 
2882 		  /* Special case this because overhead of `push_block' in
2883 		     this case is non-trivial.  */
2884 		  if (needed == 0)
2885 		    argblock = virtual_outgoing_args_rtx;
2886 		  else
2887 		    argblock = push_block (GEN_INT (needed), 0, 0);
2888 
2889 		  /* We only really need to call `copy_to_reg' in the case
2890 		     where push insns are going to be used to pass ARGBLOCK
2891 		     to a function call in ARGS.  In that case, the stack
2892 		     pointer changes value from the allocation point to the
2893 		     call point, and hence the value of
2894 		     VIRTUAL_OUTGOING_ARGS_RTX changes as well.  But might
2895 		     as well always do it.  */
2896 		  argblock = copy_to_reg (argblock);
2897 
2898 		  /* The save/restore code in store_one_arg handles all
2899 		     cases except one: a constructor call (including a C
2900 		     function returning a BLKmode struct) to initialize
2901 		     an argument.  */
2902 		  if (stack_arg_under_construction)
2903 		    {
2904 #ifndef OUTGOING_REG_PARM_STACK_SPACE
2905 		      rtx push_size = GEN_INT (reg_parm_stack_space
2906 					       + adjusted_args_size.constant);
2907 #else
2908 		      rtx push_size = GEN_INT (adjusted_args_size.constant);
2909 #endif
2910 		      if (old_stack_level == 0)
2911 			{
2912 			  emit_stack_save (SAVE_BLOCK, &old_stack_level,
2913 					   NULL_RTX);
2914 			  old_pending_adj = pending_stack_adjust;
2915 			  pending_stack_adjust = 0;
2916 			  /* stack_arg_under_construction says whether a stack
2917 			     arg is being constructed at the old stack level.
2918 			     Pushing the stack gets a clean outgoing argument
2919 			     block.  */
2920 			  old_stack_arg_under_construction
2921 			    = stack_arg_under_construction;
2922 			  stack_arg_under_construction = 0;
2923 			  /* Make a new map for the new argument list.  */
2924 			  stack_usage_map = (char *)
2925 			    alloca (highest_outgoing_arg_in_use);
2926 			  memset (stack_usage_map, 0, highest_outgoing_arg_in_use);
2927 			  highest_outgoing_arg_in_use = 0;
2928 			}
2929 		      allocate_dynamic_stack_space (push_size, NULL_RTX,
2930 						    BITS_PER_UNIT);
2931 		    }
2932 		  /* If argument evaluation might modify the stack pointer,
2933 		     copy the address of the argument list to a register.  */
2934 		  for (i = 0; i < num_actuals; i++)
2935 		    if (args[i].pass_on_stack)
2936 		      {
2937 			argblock = copy_addr_to_reg (argblock);
2938 			break;
2939 		      }
2940 		}
2941 	    }
2942 	}
2943 
2944       compute_argument_addresses (args, argblock, num_actuals);
2945 
2946       /* If we push args individually in reverse order, perform stack alignment
2947 	 before the first push (the last arg).  */
2948       if (PUSH_ARGS_REVERSED && argblock == 0
2949 	  && adjusted_args_size.constant != unadjusted_args_size)
2950 	{
2951 	  /* When the stack adjustment is pending, we get better code
2952 	     by combining the adjustments.  */
2953 	  if (pending_stack_adjust
2954 	      && ! (flags & ECF_LIBCALL_BLOCK)
2955 	      && ! inhibit_defer_pop)
2956 	    {
2957 	      pending_stack_adjust
2958 		= (combine_pending_stack_adjustment_and_call
2959 		   (unadjusted_args_size,
2960 		    &adjusted_args_size,
2961 		    preferred_unit_stack_boundary));
2962 	      do_pending_stack_adjust ();
2963 	    }
2964 	  else if (argblock == 0)
2965 	    anti_adjust_stack (GEN_INT (adjusted_args_size.constant
2966 					- unadjusted_args_size));
2967 	}
2968       /* Now that the stack is properly aligned, pops can't safely
2969 	 be deferred during the evaluation of the arguments.  */
2970       NO_DEFER_POP;
2971 
2972       funexp = rtx_for_function_call (fndecl, addr);
2973 
2974       /* Figure out the register where the value, if any, will come back.  */
2975       valreg = 0;
2976       if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode
2977 	  && ! structure_value_addr)
2978 	{
2979 	  if (pcc_struct_value)
2980 	    valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)),
2981 					  fndecl, (pass == 0));
2982 	  else
2983 	    valreg = hard_function_value (TREE_TYPE (exp), fndecl, (pass == 0));
2984 	}
2985 
2986       /* Precompute all register parameters.  It isn't safe to compute anything
2987 	 once we have started filling any specific hard regs.  */
2988       precompute_register_parameters (num_actuals, args, &reg_parm_seen);
2989 
2990 #ifdef REG_PARM_STACK_SPACE
2991       /* Save the fixed argument area if it's part of the caller's frame and
2992 	 is clobbered by argument setup for this call.  */
2993       if (ACCUMULATE_OUTGOING_ARGS && pass)
2994 	save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
2995 					      &low_to_save, &high_to_save);
2996 #endif
2997 
2998       /* Now store (and compute if necessary) all non-register parms.
2999 	 These come before register parms, since they can require block-moves,
3000 	 which could clobber the registers used for register parms.
3001 	 Parms which have partial registers are not stored here,
3002 	 but we do preallocate space here if they want that.  */
3003 
3004       for (i = 0; i < num_actuals; i++)
3005 	if (args[i].reg == 0 || args[i].pass_on_stack)
3006 	  {
3007 	    rtx before_arg = get_last_insn ();
3008 
3009 	    if (store_one_arg (&args[i], argblock, flags,
3010 			       adjusted_args_size.var != 0,
3011 			       reg_parm_stack_space)
3012 		|| (pass == 0
3013 		    && check_sibcall_argument_overlap (before_arg,
3014 						       &args[i])))
3015 	      sibcall_failure = 1;
3016 	  }
3017 
3018       /* If we have a parm that is passed in registers but not in memory
3019 	 and whose alignment does not permit a direct copy into registers,
3020 	 make a group of pseudos that correspond to each register that we
3021 	 will later fill.  */
3022       if (STRICT_ALIGNMENT)
3023 	store_unaligned_arguments_into_pseudos (args, num_actuals);
3024 
3025       /* Now store any partially-in-registers parm.
3026 	 This is the last place a block-move can happen.  */
3027       if (reg_parm_seen)
3028 	for (i = 0; i < num_actuals; i++)
3029 	  if (args[i].partial != 0 && ! args[i].pass_on_stack)
3030 	    {
3031 	      rtx before_arg = get_last_insn ();
3032 
3033 	      if (store_one_arg (&args[i], argblock, flags,
3034 				 adjusted_args_size.var != 0,
3035 				 reg_parm_stack_space)
3036 		  || (pass == 0
3037 		      && check_sibcall_argument_overlap (before_arg,
3038 							 &args[i])))
3039 		sibcall_failure = 1;
3040 	    }
3041 
3042       /* If we pushed args in forward order, perform stack alignment
3043 	 after pushing the last arg.  */
3044       if (!PUSH_ARGS_REVERSED && argblock == 0)
3045 	anti_adjust_stack (GEN_INT (adjusted_args_size.constant
3046 				    - unadjusted_args_size));
3047 
3048       /* If register arguments require space on the stack and stack space
3049 	 was not preallocated, allocate stack space here for arguments
3050 	 passed in registers.  */
3051 #ifdef OUTGOING_REG_PARM_STACK_SPACE
3052       if (!ACCUMULATE_OUTGOING_ARGS
3053 	  && must_preallocate == 0 && reg_parm_stack_space > 0)
3054 	anti_adjust_stack (GEN_INT (reg_parm_stack_space));
3055 #endif
3056 
3057       /* Pass the function the address in which to return a
3058 	 structure value.  */
3059       if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
3060 	{
3061 	  emit_move_insn (struct_value_rtx,
3062 			  force_reg (Pmode,
3063 				     force_operand (structure_value_addr,
3064 						    NULL_RTX)));
3065 
3066 	  if (GET_CODE (struct_value_rtx) == REG)
3067 	    use_reg (&call_fusage, struct_value_rtx);
3068 	}
3069 
3070       funexp = prepare_call_address (funexp, fndecl, &call_fusage,
3071 				     reg_parm_seen, pass == 0);
3072 
3073       load_register_parameters (args, num_actuals, &call_fusage, flags);
3074 
3075       /* Perform postincrements before actually calling the function.  */
3076       emit_queue ();
3077 
3078       /* Save a pointer to the last insn before the call, so that we can
3079 	 later safely search backwards to find the CALL_INSN.  */
3080       before_call = get_last_insn ();
3081 
3082       /* Set up next argument register.  For sibling calls on machines
3083 	 with register windows this should be the incoming register.  */
3084 #ifdef FUNCTION_INCOMING_ARG
3085       if (pass == 0)
3086 	next_arg_reg = FUNCTION_INCOMING_ARG (args_so_far, VOIDmode,
3087 					      void_type_node, 1);
3088       else
3089 #endif
3090 	next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode,
3091 				     void_type_node, 1);
3092 
3093       /* All arguments and registers used for the call must be set up by
3094 	 now!  */
3095 
3096       /* Stack must be properly aligned now.  */
3097       if (pass && stack_pointer_delta % preferred_unit_stack_boundary)
3098 	abort ();
3099 
3100       /* Generate the actual call instruction.  */
3101       emit_call_1 (funexp, fndecl, funtype, unadjusted_args_size,
3102 		   adjusted_args_size.constant, struct_value_size,
3103 		   next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
3104 		   flags, & args_so_far);
3105 
3106       /* Verify that we've deallocated all the stack we used.  */
3107       if (pass
3108 	  && ! (flags & (ECF_NORETURN | ECF_LONGJMP))
3109 	  && old_stack_allocated != stack_pointer_delta - pending_stack_adjust)
3110 	abort ();
3111 
3112       /* If call is cse'able, make appropriate pair of reg-notes around it.
3113 	 Test valreg so we don't crash; may safely ignore `const'
3114 	 if return type is void.  Disable for PARALLEL return values, because
3115 	 we have no way to move such values into a pseudo register.  */
3116       if (pass && (flags & ECF_LIBCALL_BLOCK))
3117 	{
3118 	  rtx insns;
3119 
3120 	  if (valreg == 0 || GET_CODE (valreg) == PARALLEL)
3121 	    {
3122 	      insns = get_insns ();
3123 	      end_sequence ();
3124 	      emit_insn (insns);
3125 	    }
3126 	  else
3127 	    {
3128 	      rtx note = 0;
3129 	      rtx temp = gen_reg_rtx (GET_MODE (valreg));
3130 
3131 	      /* Mark the return value as a pointer if needed.  */
3132 	      if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3133 		mark_reg_pointer (temp,
3134 				  TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp))));
3135 
3136 	      /* Construct an "equal form" for the value which mentions all the
3137 		 arguments in order as well as the function name.  */
3138 	      for (i = 0; i < num_actuals; i++)
3139 		note = gen_rtx_EXPR_LIST (VOIDmode,
3140 					  args[i].initial_value, note);
3141 	      note = gen_rtx_EXPR_LIST (VOIDmode, funexp, note);
3142 
3143 	      insns = get_insns ();
3144 	      end_sequence ();
3145 
3146 	      if (flags & ECF_PURE)
3147 		note = gen_rtx_EXPR_LIST (VOIDmode,
3148 			gen_rtx_USE (VOIDmode,
3149 				     gen_rtx_MEM (BLKmode,
3150 						  gen_rtx_SCRATCH (VOIDmode))),
3151 			note);
3152 
3153 	      emit_libcall_block (insns, temp, valreg, note);
3154 
3155 	      valreg = temp;
3156 	    }
3157 	}
3158       else if (pass && (flags & ECF_MALLOC))
3159 	{
3160 	  rtx temp = gen_reg_rtx (GET_MODE (valreg));
3161 	  rtx last, insns;
3162 
3163 	  /* The return value from a malloc-like function is a pointer.  */
3164 	  if (TREE_CODE (TREE_TYPE (exp)) == POINTER_TYPE)
3165 	    mark_reg_pointer (temp, BIGGEST_ALIGNMENT);
3166 
3167 	  emit_move_insn (temp, valreg);
3168 
3169 	  /* The return value from a malloc-like function can not alias
3170 	     anything else.  */
3171 	  last = get_last_insn ();
3172 	  REG_NOTES (last) =
3173 	    gen_rtx_EXPR_LIST (REG_NOALIAS, temp, REG_NOTES (last));
3174 
3175 	  /* Write out the sequence.  */
3176 	  insns = get_insns ();
3177 	  end_sequence ();
3178 	  emit_insn (insns);
3179 	  valreg = temp;
3180 	}
3181 
3182       /* For calls to `setjmp', etc., inform flow.c it should complain
3183 	 if nonvolatile values are live.  For functions that cannot return,
3184 	 inform flow that control does not fall through.  */
3185 
3186       if ((flags & (ECF_NORETURN | ECF_LONGJMP)) || pass == 0)
3187 	{
3188 	  /* The barrier must be emitted
3189 	     immediately after the CALL_INSN.  Some ports emit more
3190 	     than just a CALL_INSN above, so we must search for it here.  */
3191 
3192 	  rtx last = get_last_insn ();
3193 	  while (GET_CODE (last) != CALL_INSN)
3194 	    {
3195 	      last = PREV_INSN (last);
3196 	      /* There was no CALL_INSN?  */
3197 	      if (last == before_call)
3198 		abort ();
3199 	    }
3200 
3201 	  emit_barrier_after (last);
3202 
3203 	  /* Stack adjustments after a noreturn call are dead code.  */
3204 	  stack_pointer_delta = old_stack_allocated;
3205 	  pending_stack_adjust = 0;
3206 	}
3207 
3208       if (flags & ECF_LONGJMP)
3209 	current_function_calls_longjmp = 1;
3210 
3211       /* If this function is returning into a memory location marked as
3212 	 readonly, it means it is initializing that location.  But we normally
3213 	 treat functions as not clobbering such locations, so we need to
3214 	 specify that this one does.  */
3215       if (target != 0 && GET_CODE (target) == MEM
3216 	  && structure_value_addr != 0 && RTX_UNCHANGING_P (target))
3217 	emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
3218 
3219       /* If value type not void, return an rtx for the value.  */
3220 
3221       /* If there are cleanups to be called, don't use a hard reg as target.
3222 	 We need to double check this and see if it matters anymore.  */
3223       if (any_pending_cleanups (1))
3224 	{
3225 	  if (target && REG_P (target)
3226 	      && REGNO (target) < FIRST_PSEUDO_REGISTER)
3227 	    target = 0;
3228 	  sibcall_failure = 1;
3229 	}
3230 
3231       if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode
3232 	  || ignore)
3233 	target = const0_rtx;
3234       else if (structure_value_addr)
3235 	{
3236 	  if (target == 0 || GET_CODE (target) != MEM)
3237 	    {
3238 	      target
3239 		= gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3240 			       memory_address (TYPE_MODE (TREE_TYPE (exp)),
3241 					       structure_value_addr));
3242 	      set_mem_attributes (target, exp, 1);
3243 	    }
3244 	}
3245       else if (pcc_struct_value)
3246 	{
3247 	  /* This is the special C++ case where we need to
3248 	     know what the true target was.  We take care to
3249 	     never use this value more than once in one expression.  */
3250 	  target = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (exp)),
3251 				copy_to_reg (valreg));
3252 	  set_mem_attributes (target, exp, 1);
3253 	}
3254       /* Handle calls that return values in multiple non-contiguous locations.
3255 	 The Irix 6 ABI has examples of this.  */
3256       else if (GET_CODE (valreg) == PARALLEL)
3257 	{
3258 	  if (target == 0)
3259 	    {
3260 	      /* This will only be assigned once, so it can be readonly.  */
3261 	      tree nt = build_qualified_type (TREE_TYPE (exp),
3262 					      (TYPE_QUALS (TREE_TYPE (exp))
3263 					       | TYPE_QUAL_CONST));
3264 
3265 	      target = assign_temp (nt, 0, 1, 1);
3266 	      preserve_temp_slots (target);
3267 	    }
3268 
3269 	  if (! rtx_equal_p (target, valreg))
3270 	    emit_group_store (target, valreg,
3271 			      int_size_in_bytes (TREE_TYPE (exp)));
3272 
3273 	  /* We can not support sibling calls for this case.  */
3274 	  sibcall_failure = 1;
3275 	}
3276       else if (target
3277 	       && GET_MODE (target) == TYPE_MODE (TREE_TYPE (exp))
3278 	       && GET_MODE (target) == GET_MODE (valreg))
3279 	{
3280 	  /* TARGET and VALREG cannot be equal at this point because the
3281 	     latter would not have REG_FUNCTION_VALUE_P true, while the
3282 	     former would if it were referring to the same register.
3283 
3284 	     If they refer to the same register, this move will be a no-op,
3285 	     except when function inlining is being done.  */
3286 	  emit_move_insn (target, valreg);
3287 	}
3288       else if (TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
3289 	{
3290 	  target = copy_blkmode_from_reg (target, valreg, TREE_TYPE (exp));
3291 
3292 	  /* We can not support sibling calls for this case.  */
3293 	  sibcall_failure = 1;
3294 	}
3295       else
3296 	target = copy_to_reg (valreg);
3297 
3298 #ifdef PROMOTE_FUNCTION_RETURN
3299       /* If we promoted this return value, make the proper SUBREG.  TARGET
3300 	 might be const0_rtx here, so be careful.  */
3301       if (GET_CODE (target) == REG
3302 	  && TYPE_MODE (TREE_TYPE (exp)) != BLKmode
3303 	  && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3304 	{
3305 	  tree type = TREE_TYPE (exp);
3306 	  int unsignedp = TREE_UNSIGNED (type);
3307 	  int offset = 0;
3308 
3309 	  /* If we don't promote as expected, something is wrong.  */
3310 	  if (GET_MODE (target)
3311 	      != promote_mode (type, TYPE_MODE (type), &unsignedp, 1))
3312 	    abort ();
3313 
3314 	if ((WORDS_BIG_ENDIAN || BYTES_BIG_ENDIAN)
3315 	    && GET_MODE_SIZE (GET_MODE (target))
3316 	       > GET_MODE_SIZE (TYPE_MODE (type)))
3317 	  {
3318 	    offset = GET_MODE_SIZE (GET_MODE (target))
3319 		     - GET_MODE_SIZE (TYPE_MODE (type));
3320 	    if (! BYTES_BIG_ENDIAN)
3321 	      offset = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
3322 	    else if (! WORDS_BIG_ENDIAN)
3323 	      offset %= UNITS_PER_WORD;
3324 	  }
3325 	  target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
3326 	  SUBREG_PROMOTED_VAR_P (target) = 1;
3327 	  SUBREG_PROMOTED_UNSIGNED_SET (target, unsignedp);
3328 	}
3329 #endif
3330 
3331       /* If size of args is variable or this was a constructor call for a stack
3332 	 argument, restore saved stack-pointer value.  */
3333 
3334       if (old_stack_level && ! (flags & ECF_SP_DEPRESSED))
3335 	{
3336 	  emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
3337 	  pending_stack_adjust = old_pending_adj;
3338 	  stack_arg_under_construction = old_stack_arg_under_construction;
3339 	  highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3340 	  stack_usage_map = initial_stack_usage_map;
3341 	  sibcall_failure = 1;
3342 	}
3343       else if (ACCUMULATE_OUTGOING_ARGS && pass)
3344 	{
3345 #ifdef REG_PARM_STACK_SPACE
3346 	  if (save_area)
3347 	    {
3348 	      restore_fixed_argument_area (save_area, argblock,
3349 					   high_to_save, low_to_save);
3350 	    }
3351 #endif
3352 
3353 	  /* If we saved any argument areas, restore them.  */
3354 	  for (i = 0; i < num_actuals; i++)
3355 	    if (args[i].save_area)
3356 	      {
3357 		enum machine_mode save_mode = GET_MODE (args[i].save_area);
3358 		rtx stack_area
3359 		  = gen_rtx_MEM (save_mode,
3360 				 memory_address (save_mode,
3361 						 XEXP (args[i].stack_slot, 0)));
3362 
3363 		if (save_mode != BLKmode)
3364 		  emit_move_insn (stack_area, args[i].save_area);
3365 		else
3366 		  emit_block_move (stack_area, args[i].save_area,
3367 				   GEN_INT (args[i].size.constant),
3368 				   BLOCK_OP_CALL_PARM);
3369 	      }
3370 
3371 	  highest_outgoing_arg_in_use = initial_highest_arg_in_use;
3372 	  stack_usage_map = initial_stack_usage_map;
3373 	}
3374 
3375       /* If this was alloca, record the new stack level for nonlocal gotos.
3376 	 Check for the handler slots since we might not have a save area
3377 	 for non-local gotos.  */
3378 
3379       if ((flags & ECF_MAY_BE_ALLOCA) && nonlocal_goto_handler_slots != 0)
3380 	emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
3381 
3382       /* Free up storage we no longer need.  */
3383       for (i = 0; i < num_actuals; ++i)
3384 	if (args[i].aligned_regs)
3385 	  free (args[i].aligned_regs);
3386 
3387       if (pass == 0)
3388 	{
3389 	  /* Undo the fake expand_start_target_temps we did earlier.  If
3390 	     there had been any cleanups created, we've already set
3391 	     sibcall_failure.  */
3392 	  expand_end_target_temps ();
3393 	}
3394 
3395       insns = get_insns ();
3396       end_sequence ();
3397 
3398       if (pass == 0)
3399 	{
3400 	  tail_call_insns = insns;
3401 
3402 	  /* Restore the pending stack adjustment now that we have
3403 	     finished generating the sibling call sequence.  */
3404 
3405 	  pending_stack_adjust = save_pending_stack_adjust;
3406 	  stack_pointer_delta = save_stack_pointer_delta;
3407 
3408 	  /* Prepare arg structure for next iteration.  */
3409 	  for (i = 0; i < num_actuals; i++)
3410 	    {
3411 	      args[i].value = 0;
3412 	      args[i].aligned_regs = 0;
3413 	      args[i].stack = 0;
3414 	    }
3415 
3416 	  sbitmap_free (stored_args_map);
3417 	}
3418       else
3419 	normal_call_insns = insns;
3420 
3421       /* If something prevents making this a sibling call,
3422 	 zero out the sequence.  */
3423       if (sibcall_failure)
3424 	tail_call_insns = NULL_RTX;
3425     }
3426 
3427   /* The function optimize_sibling_and_tail_recursive_calls doesn't
3428      handle CALL_PLACEHOLDERs inside other CALL_PLACEHOLDERs.  This
3429      can happen if the arguments to this function call an inline
3430      function who's expansion contains another CALL_PLACEHOLDER.
3431 
3432      If there are any C_Ps in any of these sequences, replace them
3433      with their normal call.  */
3434 
3435   for (insn = normal_call_insns; insn; insn = NEXT_INSN (insn))
3436     if (GET_CODE (insn) == CALL_INSN
3437 	&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3438       replace_call_placeholder (insn, sibcall_use_normal);
3439 
3440   for (insn = tail_call_insns; insn; insn = NEXT_INSN (insn))
3441     if (GET_CODE (insn) == CALL_INSN
3442 	&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3443       replace_call_placeholder (insn, sibcall_use_normal);
3444 
3445   for (insn = tail_recursion_insns; insn; insn = NEXT_INSN (insn))
3446     if (GET_CODE (insn) == CALL_INSN
3447 	&& GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
3448       replace_call_placeholder (insn, sibcall_use_normal);
3449 
3450   /* If this was a potential tail recursion site, then emit a
3451      CALL_PLACEHOLDER with the normal and the tail recursion streams.
3452      One of them will be selected later.  */
3453   if (tail_recursion_insns || tail_call_insns)
3454     {
3455       /* The tail recursion label must be kept around.  We could expose
3456 	 its use in the CALL_PLACEHOLDER, but that creates unwanted edges
3457 	 and makes determining true tail recursion sites difficult.
3458 
3459 	 So we set LABEL_PRESERVE_P here, then clear it when we select
3460 	 one of the call sequences after rtl generation is complete.  */
3461       if (tail_recursion_insns)
3462 	LABEL_PRESERVE_P (tail_recursion_label) = 1;
3463       emit_call_insn (gen_rtx_CALL_PLACEHOLDER (VOIDmode, normal_call_insns,
3464 						tail_call_insns,
3465 						tail_recursion_insns,
3466 						tail_recursion_label));
3467     }
3468   else
3469     emit_insn (normal_call_insns);
3470 
3471   currently_expanding_call--;
3472 
3473   /* If this function returns with the stack pointer depressed, ensure
3474      this block saves and restores the stack pointer, show it was
3475      changed, and adjust for any outgoing arg space.  */
3476   if (flags & ECF_SP_DEPRESSED)
3477     {
3478       clear_pending_stack_adjust ();
3479       emit_insn (gen_rtx (CLOBBER, VOIDmode, stack_pointer_rtx));
3480       emit_move_insn (virtual_stack_dynamic_rtx, stack_pointer_rtx);
3481       save_stack_pointer ();
3482     }
3483 
3484   return target;
3485 }
3486 
3487 /* Output a library call to function FUN (a SYMBOL_REF rtx).
3488    The RETVAL parameter specifies whether return value needs to be saved, other
3489    parameters are documented in the emit_library_call function below.  */
3490 
3491 static rtx
emit_library_call_value_1(retval,orgfun,value,fn_type,outmode,nargs,p)3492 emit_library_call_value_1 (retval, orgfun, value, fn_type, outmode, nargs, p)
3493      int retval;
3494      rtx orgfun;
3495      rtx value;
3496      enum libcall_type fn_type;
3497      enum machine_mode outmode;
3498      int nargs;
3499      va_list p;
3500 {
3501   /* Total size in bytes of all the stack-parms scanned so far.  */
3502   struct args_size args_size;
3503   /* Size of arguments before any adjustments (such as rounding).  */
3504   struct args_size original_args_size;
3505   int argnum;
3506   rtx fun;
3507   int inc;
3508   int count;
3509   struct args_size alignment_pad;
3510   rtx argblock = 0;
3511   CUMULATIVE_ARGS args_so_far;
3512   struct arg
3513   {
3514     rtx value;
3515     enum machine_mode mode;
3516     rtx reg;
3517     int partial;
3518     struct args_size offset;
3519     struct args_size size;
3520     rtx save_area;
3521   };
3522   struct arg *argvec;
3523   int old_inhibit_defer_pop = inhibit_defer_pop;
3524   rtx call_fusage = 0;
3525   rtx mem_value = 0;
3526   rtx valreg;
3527   int pcc_struct_value = 0;
3528   int struct_value_size = 0;
3529   int flags;
3530   int reg_parm_stack_space = 0;
3531   int needed;
3532   rtx before_call;
3533   tree tfom;			/* type_for_mode (outmode, 0) */
3534 
3535 #ifdef REG_PARM_STACK_SPACE
3536   /* Define the boundary of the register parm stack space that needs to be
3537      save, if any.  */
3538   int low_to_save = -1, high_to_save = 0;
3539   rtx save_area = 0;            /* Place that it is saved.  */
3540 #endif
3541 
3542   /* Size of the stack reserved for parameter registers.  */
3543   int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3544   char *initial_stack_usage_map = stack_usage_map;
3545 
3546 #ifdef REG_PARM_STACK_SPACE
3547 #ifdef MAYBE_REG_PARM_STACK_SPACE
3548   reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
3549 #else
3550   reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
3551 #endif
3552 #endif
3553 
3554   /* By default, library functions can not throw.  */
3555   flags = ECF_NOTHROW;
3556 
3557   switch (fn_type)
3558     {
3559     case LCT_NORMAL:
3560       break;
3561     case LCT_CONST:
3562       flags |= ECF_CONST;
3563       break;
3564     case LCT_PURE:
3565       flags |= ECF_PURE;
3566       break;
3567     case LCT_CONST_MAKE_BLOCK:
3568       flags |= ECF_CONST | ECF_LIBCALL_BLOCK;
3569       break;
3570     case LCT_PURE_MAKE_BLOCK:
3571       flags |= ECF_PURE | ECF_LIBCALL_BLOCK;
3572       break;
3573     case LCT_NORETURN:
3574       flags |= ECF_NORETURN;
3575       break;
3576     case LCT_THROW:
3577       flags = ECF_NORETURN;
3578       break;
3579     case LCT_ALWAYS_RETURN:
3580       flags = ECF_ALWAYS_RETURN;
3581       break;
3582     case LCT_RETURNS_TWICE:
3583       flags = ECF_RETURNS_TWICE;
3584       break;
3585     }
3586   fun = orgfun;
3587 
3588   /* Ensure current function's preferred stack boundary is at least
3589      what we need.  */
3590   if (cfun->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
3591     cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3592 
3593   /* If this kind of value comes back in memory,
3594      decide where in memory it should come back.  */
3595   if (outmode != VOIDmode)
3596     {
3597       tfom = (*lang_hooks.types.type_for_mode) (outmode, 0);
3598       if (aggregate_value_p (tfom))
3599 	{
3600 #ifdef PCC_STATIC_STRUCT_RETURN
3601 	  rtx pointer_reg
3602 	    = hard_function_value (build_pointer_type (tfom), 0, 0);
3603 	  mem_value = gen_rtx_MEM (outmode, pointer_reg);
3604 	  pcc_struct_value = 1;
3605 	  if (value == 0)
3606 	    value = gen_reg_rtx (outmode);
3607 #else /* not PCC_STATIC_STRUCT_RETURN */
3608 	  struct_value_size = GET_MODE_SIZE (outmode);
3609 	  if (value != 0 && GET_CODE (value) == MEM)
3610 	    mem_value = value;
3611 	  else
3612 	    mem_value = assign_temp (tfom, 0, 1, 1);
3613 #endif
3614 	  /* This call returns a big structure.  */
3615 	  flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3616 	}
3617     }
3618   else
3619     tfom = void_type_node;
3620 
3621   /* ??? Unfinished: must pass the memory address as an argument.  */
3622 
3623   /* Copy all the libcall-arguments out of the varargs data
3624      and into a vector ARGVEC.
3625 
3626      Compute how to pass each argument.  We only support a very small subset
3627      of the full argument passing conventions to limit complexity here since
3628      library functions shouldn't have many args.  */
3629 
3630   argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
3631   memset ((char *) argvec, 0, (nargs + 1) * sizeof (struct arg));
3632 
3633 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
3634   INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far, outmode, fun);
3635 #else
3636   INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun, 0);
3637 #endif
3638 
3639   args_size.constant = 0;
3640   args_size.var = 0;
3641 
3642   count = 0;
3643 
3644   /* Now we are about to start emitting insns that can be deleted
3645      if a libcall is deleted.  */
3646   if (flags & ECF_LIBCALL_BLOCK)
3647     start_sequence ();
3648 
3649   push_temp_slots ();
3650 
3651   /* If there's a structure value address to be passed,
3652      either pass it in the special place, or pass it as an extra argument.  */
3653   if (mem_value && struct_value_rtx == 0 && ! pcc_struct_value)
3654     {
3655       rtx addr = XEXP (mem_value, 0);
3656       nargs++;
3657 
3658       /* Make sure it is a reasonable operand for a move or push insn.  */
3659       if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
3660 	  && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
3661 	addr = force_operand (addr, NULL_RTX);
3662 
3663       argvec[count].value = addr;
3664       argvec[count].mode = Pmode;
3665       argvec[count].partial = 0;
3666 
3667       argvec[count].reg = FUNCTION_ARG (args_so_far, Pmode, NULL_TREE, 1);
3668 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3669       if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, Pmode, NULL_TREE, 1))
3670 	abort ();
3671 #endif
3672 
3673       locate_and_pad_parm (Pmode, NULL_TREE,
3674 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3675                            1,
3676 #else
3677 			   argvec[count].reg != 0,
3678 #endif
3679 			   NULL_TREE, &args_size, &argvec[count].offset,
3680 			   &argvec[count].size, &alignment_pad);
3681 
3682       if (argvec[count].reg == 0 || argvec[count].partial != 0
3683 	  || reg_parm_stack_space > 0)
3684 	args_size.constant += argvec[count].size.constant;
3685 
3686       FUNCTION_ARG_ADVANCE (args_so_far, Pmode, (tree) 0, 1);
3687 
3688       count++;
3689     }
3690 
3691   for (; count < nargs; count++)
3692     {
3693       rtx val = va_arg (p, rtx);
3694       enum machine_mode mode = va_arg (p, enum machine_mode);
3695 
3696       /* We cannot convert the arg value to the mode the library wants here;
3697 	 must do it earlier where we know the signedness of the arg.  */
3698       if (mode == BLKmode
3699 	  || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
3700 	abort ();
3701 
3702       /* On some machines, there's no way to pass a float to a library fcn.
3703 	 Pass it as a double instead.  */
3704 #ifdef LIBGCC_NEEDS_DOUBLE
3705       if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
3706 	val = convert_modes (DFmode, SFmode, val, 0), mode = DFmode;
3707 #endif
3708 
3709       /* There's no need to call protect_from_queue, because
3710 	 either emit_move_insn or emit_push_insn will do that.  */
3711 
3712       /* Make sure it is a reasonable operand for a move or push insn.  */
3713       if (GET_CODE (val) != REG && GET_CODE (val) != MEM
3714 	  && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
3715 	val = force_operand (val, NULL_RTX);
3716 
3717 #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
3718       if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
3719 	{
3720 	  rtx slot;
3721 	  int must_copy = 1
3722 #ifdef FUNCTION_ARG_CALLEE_COPIES
3723 	    && ! FUNCTION_ARG_CALLEE_COPIES (args_so_far, mode,
3724 					     NULL_TREE, 1)
3725 #endif
3726 	    ;
3727 
3728 	  /* loop.c won't look at CALL_INSN_FUNCTION_USAGE of const/pure
3729 	     functions, so we have to pretend this isn't such a function.  */
3730 	  if (flags & ECF_LIBCALL_BLOCK)
3731 	    {
3732 	      rtx insns = get_insns ();
3733 	      end_sequence ();
3734 	      emit_insn (insns);
3735 	    }
3736 	  flags &= ~(ECF_CONST | ECF_PURE | ECF_LIBCALL_BLOCK);
3737 
3738 	  /* If this was a CONST function, it is now PURE since
3739 	     it now reads memory.  */
3740 	  if (flags & ECF_CONST)
3741 	    {
3742 	      flags &= ~ECF_CONST;
3743 	      flags |= ECF_PURE;
3744 	    }
3745 
3746 	  if (GET_MODE (val) == MEM && ! must_copy)
3747 	    slot = val;
3748 	  else if (must_copy)
3749 	    {
3750 	      slot = assign_temp ((*lang_hooks.types.type_for_mode) (mode, 0),
3751 				  0, 1, 1);
3752 	      emit_move_insn (slot, val);
3753 	    }
3754 	  else
3755 	    {
3756 	      tree type = (*lang_hooks.types.type_for_mode) (mode, 0);
3757 
3758 	      slot = gen_rtx_MEM (mode,
3759 				  expand_expr (build1 (ADDR_EXPR,
3760 						       build_pointer_type
3761 						       (type),
3762 						       make_tree (type, val)),
3763 					       NULL_RTX, VOIDmode, 0));
3764 	    }
3765 
3766 	  call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3767 					   gen_rtx_USE (VOIDmode, slot),
3768 					   call_fusage);
3769 	  if (must_copy)
3770 	    call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
3771 					     gen_rtx_CLOBBER (VOIDmode,
3772 							      slot),
3773 					     call_fusage);
3774 
3775 	  mode = Pmode;
3776 	  val = force_operand (XEXP (slot, 0), NULL_RTX);
3777 	}
3778 #endif
3779 
3780       argvec[count].value = val;
3781       argvec[count].mode = mode;
3782 
3783       argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
3784 
3785 #ifdef FUNCTION_ARG_PARTIAL_NREGS
3786       argvec[count].partial
3787 	= FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
3788 #else
3789       argvec[count].partial = 0;
3790 #endif
3791 
3792       locate_and_pad_parm (mode, NULL_TREE,
3793 #ifdef STACK_PARMS_IN_REG_PARM_AREA
3794 			   1,
3795 #else
3796 			   argvec[count].reg != 0,
3797 #endif
3798 			   NULL_TREE, &args_size, &argvec[count].offset,
3799 			   &argvec[count].size, &alignment_pad);
3800 
3801       if (argvec[count].size.var)
3802 	abort ();
3803 
3804       if (reg_parm_stack_space == 0 && argvec[count].partial)
3805 	argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
3806 
3807       if (argvec[count].reg == 0 || argvec[count].partial != 0
3808 	  || reg_parm_stack_space > 0)
3809 	args_size.constant += argvec[count].size.constant;
3810 
3811       FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree) 0, 1);
3812     }
3813 
3814 #ifdef FINAL_REG_PARM_STACK_SPACE
3815   reg_parm_stack_space = FINAL_REG_PARM_STACK_SPACE (args_size.constant,
3816 						     args_size.var);
3817 #endif
3818   /* If this machine requires an external definition for library
3819      functions, write one out.  */
3820   assemble_external_libcall (fun);
3821 
3822   original_args_size = args_size;
3823   args_size.constant = (((args_size.constant
3824 			  + stack_pointer_delta
3825 			  + STACK_BYTES - 1)
3826 			  / STACK_BYTES
3827 			  * STACK_BYTES)
3828 			 - stack_pointer_delta);
3829 
3830   args_size.constant = MAX (args_size.constant,
3831 			    reg_parm_stack_space);
3832 
3833 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3834   args_size.constant -= reg_parm_stack_space;
3835 #endif
3836 
3837   if (args_size.constant > current_function_outgoing_args_size)
3838     current_function_outgoing_args_size = args_size.constant;
3839 
3840   if (ACCUMULATE_OUTGOING_ARGS)
3841     {
3842       /* Since the stack pointer will never be pushed, it is possible for
3843 	 the evaluation of a parm to clobber something we have already
3844 	 written to the stack.  Since most function calls on RISC machines
3845 	 do not use the stack, this is uncommon, but must work correctly.
3846 
3847 	 Therefore, we save any area of the stack that was already written
3848 	 and that we are using.  Here we set up to do this by making a new
3849 	 stack usage map from the old one.
3850 
3851 	 Another approach might be to try to reorder the argument
3852 	 evaluations to avoid this conflicting stack usage.  */
3853 
3854       needed = args_size.constant;
3855 
3856 #ifndef OUTGOING_REG_PARM_STACK_SPACE
3857       /* Since we will be writing into the entire argument area, the
3858 	 map must be allocated for its entire size, not just the part that
3859 	 is the responsibility of the caller.  */
3860       needed += reg_parm_stack_space;
3861 #endif
3862 
3863 #ifdef ARGS_GROW_DOWNWARD
3864       highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3865 					 needed + 1);
3866 #else
3867       highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
3868 					 needed);
3869 #endif
3870       stack_usage_map = (char *) alloca (highest_outgoing_arg_in_use);
3871 
3872       if (initial_highest_arg_in_use)
3873 	memcpy (stack_usage_map, initial_stack_usage_map,
3874 		initial_highest_arg_in_use);
3875 
3876       if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3877 	memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3878 	       highest_outgoing_arg_in_use - initial_highest_arg_in_use);
3879       needed = 0;
3880 
3881       /* We must be careful to use virtual regs before they're instantiated,
3882          and real regs afterwards.  Loop optimization, for example, can create
3883 	 new libcalls after we've instantiated the virtual regs, and if we
3884 	 use virtuals anyway, they won't match the rtl patterns.  */
3885 
3886       if (virtuals_instantiated)
3887 	argblock = plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET);
3888       else
3889 	argblock = virtual_outgoing_args_rtx;
3890     }
3891   else
3892     {
3893       if (!PUSH_ARGS)
3894 	argblock = push_block (GEN_INT (args_size.constant), 0, 0);
3895     }
3896 
3897   /* If we push args individually in reverse order, perform stack alignment
3898      before the first push (the last arg).  */
3899   if (argblock == 0 && PUSH_ARGS_REVERSED)
3900     anti_adjust_stack (GEN_INT (args_size.constant
3901 				- original_args_size.constant));
3902 
3903   if (PUSH_ARGS_REVERSED)
3904     {
3905       inc = -1;
3906       argnum = nargs - 1;
3907     }
3908   else
3909     {
3910       inc = 1;
3911       argnum = 0;
3912     }
3913 
3914 #ifdef REG_PARM_STACK_SPACE
3915   if (ACCUMULATE_OUTGOING_ARGS)
3916     {
3917       /* The argument list is the property of the called routine and it
3918 	 may clobber it.  If the fixed area has been used for previous
3919 	 parameters, we must save and restore it.
3920 
3921 	 Here we compute the boundary of the that needs to be saved, if any.  */
3922 
3923 #ifdef ARGS_GROW_DOWNWARD
3924       for (count = 0; count < reg_parm_stack_space + 1; count++)
3925 #else
3926       for (count = 0; count < reg_parm_stack_space; count++)
3927 #endif
3928 	{
3929 	  if (count >= highest_outgoing_arg_in_use
3930 	      || stack_usage_map[count] == 0)
3931 	    continue;
3932 
3933 	  if (low_to_save == -1)
3934 	    low_to_save = count;
3935 
3936 	  high_to_save = count;
3937 	}
3938 
3939       if (low_to_save >= 0)
3940 	{
3941 	  int num_to_save = high_to_save - low_to_save + 1;
3942 	  enum machine_mode save_mode
3943 	    = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1);
3944 	  rtx stack_area;
3945 
3946 	  /* If we don't have the required alignment, must do this in BLKmode.  */
3947 	  if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode),
3948 				   BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)))
3949 	    save_mode = BLKmode;
3950 
3951 #ifdef ARGS_GROW_DOWNWARD
3952 	  stack_area = gen_rtx_MEM (save_mode,
3953 				    memory_address (save_mode,
3954 						    plus_constant (argblock,
3955 								   -high_to_save)));
3956 #else
3957 	  stack_area = gen_rtx_MEM (save_mode,
3958 				    memory_address (save_mode,
3959 						    plus_constant (argblock,
3960 								   low_to_save)));
3961 #endif
3962 	  if (save_mode == BLKmode)
3963 	    {
3964 	      save_area = assign_stack_temp (BLKmode, num_to_save, 0);
3965 	      set_mem_align (save_area, PARM_BOUNDARY);
3966 	      emit_block_move (save_area, stack_area, GEN_INT (num_to_save),
3967 			       BLOCK_OP_CALL_PARM);
3968 	    }
3969 	  else
3970 	    {
3971 	      save_area = gen_reg_rtx (save_mode);
3972 	      emit_move_insn (save_area, stack_area);
3973 	    }
3974 	}
3975     }
3976 #endif
3977 
3978   /* Push the args that need to be pushed.  */
3979 
3980   /* ARGNUM indexes the ARGVEC array in the order in which the arguments
3981      are to be pushed.  */
3982   for (count = 0; count < nargs; count++, argnum += inc)
3983     {
3984       enum machine_mode mode = argvec[argnum].mode;
3985       rtx val = argvec[argnum].value;
3986       rtx reg = argvec[argnum].reg;
3987       int partial = argvec[argnum].partial;
3988       int lower_bound = 0, upper_bound = 0, i;
3989 
3990       if (! (reg != 0 && partial == 0))
3991 	{
3992 	  if (ACCUMULATE_OUTGOING_ARGS)
3993 	    {
3994 	      /* If this is being stored into a pre-allocated, fixed-size,
3995 		 stack area, save any previous data at that location.  */
3996 
3997 #ifdef ARGS_GROW_DOWNWARD
3998 	      /* stack_slot is negative, but we want to index stack_usage_map
3999 		 with positive values.  */
4000 	      upper_bound = -argvec[argnum].offset.constant + 1;
4001 	      lower_bound = upper_bound - argvec[argnum].size.constant;
4002 #else
4003 	      lower_bound = argvec[argnum].offset.constant;
4004 	      upper_bound = lower_bound + argvec[argnum].size.constant;
4005 #endif
4006 
4007 	      for (i = lower_bound; i < upper_bound; i++)
4008 		if (stack_usage_map[i]
4009 		    /* Don't store things in the fixed argument area at this
4010 		       point; it has already been saved.  */
4011 		    && i > reg_parm_stack_space)
4012 		  break;
4013 
4014 	      if (i != upper_bound)
4015 		{
4016 		  /* We need to make a save area.  See what mode we can make
4017 		     it.  */
4018 		  enum machine_mode save_mode
4019 		    = mode_for_size (argvec[argnum].size.constant
4020 				     * BITS_PER_UNIT,
4021 				     MODE_INT, 1);
4022 		  rtx stack_area
4023 		    = gen_rtx_MEM
4024 		      (save_mode,
4025 		       memory_address
4026 		       (save_mode,
4027 			plus_constant (argblock,
4028 				       argvec[argnum].offset.constant)));
4029 		  if (save_mode == BLKmode)
4030 		    {
4031 		      argvec[argnum].save_area
4032 			= assign_stack_temp (BLKmode,
4033 				             argvec[argnum].size.constant, 0);
4034 
4035 		      emit_block_move (validize_mem (argvec[argnum].save_area),
4036 			  	       stack_area,
4037 				       GEN_INT (argvec[argnum].size.constant),
4038 				       BLOCK_OP_CALL_PARM);
4039 		    }
4040 		  else
4041 		    {
4042 		      argvec[argnum].save_area = gen_reg_rtx (save_mode);
4043 
4044 		      emit_move_insn (argvec[argnum].save_area, stack_area);
4045 		    }
4046 		}
4047 	    }
4048 
4049 	  emit_push_insn (val, mode, NULL_TREE, NULL_RTX, PARM_BOUNDARY,
4050 			  partial, reg, 0, argblock,
4051 			  GEN_INT (argvec[argnum].offset.constant),
4052 			  reg_parm_stack_space, ARGS_SIZE_RTX (alignment_pad));
4053 
4054 	  /* Now mark the segment we just used.  */
4055 	  if (ACCUMULATE_OUTGOING_ARGS)
4056 	    for (i = lower_bound; i < upper_bound; i++)
4057 	      stack_usage_map[i] = 1;
4058 
4059 	  NO_DEFER_POP;
4060 	}
4061     }
4062 
4063   /* If we pushed args in forward order, perform stack alignment
4064      after pushing the last arg.  */
4065   if (argblock == 0 && !PUSH_ARGS_REVERSED)
4066     anti_adjust_stack (GEN_INT (args_size.constant
4067 				- original_args_size.constant));
4068 
4069   if (PUSH_ARGS_REVERSED)
4070     argnum = nargs - 1;
4071   else
4072     argnum = 0;
4073 
4074   fun = prepare_call_address (fun, NULL_TREE, &call_fusage, 0, 0);
4075 
4076   /* Now load any reg parms into their regs.  */
4077 
4078   /* ARGNUM indexes the ARGVEC array in the order in which the arguments
4079      are to be pushed.  */
4080   for (count = 0; count < nargs; count++, argnum += inc)
4081     {
4082       rtx val = argvec[argnum].value;
4083       rtx reg = argvec[argnum].reg;
4084       int partial = argvec[argnum].partial;
4085 
4086       /* Handle calls that pass values in multiple non-contiguous
4087 	 locations.  The PA64 has examples of this for library calls.  */
4088       if (reg != 0 && GET_CODE (reg) == PARALLEL)
4089 	emit_group_load (reg, val, GET_MODE_SIZE (GET_MODE (val)));
4090       else if (reg != 0 && partial == 0)
4091 	emit_move_insn (reg, val);
4092 
4093       NO_DEFER_POP;
4094     }
4095 
4096   /* Any regs containing parms remain in use through the call.  */
4097   for (count = 0; count < nargs; count++)
4098     {
4099       rtx reg = argvec[count].reg;
4100       if (reg != 0 && GET_CODE (reg) == PARALLEL)
4101 	use_group_regs (&call_fusage, reg);
4102       else if (reg != 0)
4103 	use_reg (&call_fusage, reg);
4104     }
4105 
4106   /* Pass the function the address in which to return a structure value.  */
4107   if (mem_value != 0 && struct_value_rtx != 0 && ! pcc_struct_value)
4108     {
4109       emit_move_insn (struct_value_rtx,
4110 		      force_reg (Pmode,
4111 				 force_operand (XEXP (mem_value, 0),
4112 						NULL_RTX)));
4113       if (GET_CODE (struct_value_rtx) == REG)
4114 	use_reg (&call_fusage, struct_value_rtx);
4115     }
4116 
4117   /* Don't allow popping to be deferred, since then
4118      cse'ing of library calls could delete a call and leave the pop.  */
4119   NO_DEFER_POP;
4120   valreg = (mem_value == 0 && outmode != VOIDmode
4121 	    ? hard_libcall_value (outmode) : NULL_RTX);
4122 
4123   /* Stack must be properly aligned now.  */
4124   if (stack_pointer_delta & (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1))
4125     abort ();
4126 
4127   before_call = get_last_insn ();
4128 
4129   /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
4130      will set inhibit_defer_pop to that value.  */
4131   /* The return type is needed to decide how many bytes the function pops.
4132      Signedness plays no role in that, so for simplicity, we pretend it's
4133      always signed.  We also assume that the list of arguments passed has
4134      no impact, so we pretend it is unknown.  */
4135 
4136   emit_call_1 (fun,
4137 	       get_identifier (XSTR (orgfun, 0)),
4138 	       build_function_type (tfom, NULL_TREE),
4139 	       original_args_size.constant, args_size.constant,
4140 	       struct_value_size,
4141 	       FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
4142 	       valreg,
4143 	       old_inhibit_defer_pop + 1, call_fusage, flags, & args_so_far);
4144 
4145   /* For calls to `setjmp', etc., inform flow.c it should complain
4146      if nonvolatile values are live.  For functions that cannot return,
4147      inform flow that control does not fall through.  */
4148 
4149   if (flags & (ECF_NORETURN | ECF_LONGJMP))
4150     {
4151       /* The barrier note must be emitted
4152 	 immediately after the CALL_INSN.  Some ports emit more than
4153 	 just a CALL_INSN above, so we must search for it here.  */
4154 
4155       rtx last = get_last_insn ();
4156       while (GET_CODE (last) != CALL_INSN)
4157 	{
4158 	  last = PREV_INSN (last);
4159 	  /* There was no CALL_INSN?  */
4160 	  if (last == before_call)
4161 	    abort ();
4162 	}
4163 
4164       emit_barrier_after (last);
4165     }
4166 
4167   /* Now restore inhibit_defer_pop to its actual original value.  */
4168   OK_DEFER_POP;
4169 
4170   /* If call is cse'able, make appropriate pair of reg-notes around it.
4171      Test valreg so we don't crash; may safely ignore `const'
4172      if return type is void.  Disable for PARALLEL return values, because
4173      we have no way to move such values into a pseudo register.  */
4174   if (flags & ECF_LIBCALL_BLOCK)
4175     {
4176       rtx insns;
4177 
4178       if (valreg == 0)
4179 	{
4180 	  insns = get_insns ();
4181 	  end_sequence ();
4182 	  emit_insn (insns);
4183 	}
4184       else
4185 	{
4186 	  rtx note = 0;
4187 	  rtx temp;
4188 	  int i;
4189 
4190 	  if (GET_CODE (valreg) == PARALLEL)
4191 	    {
4192 	      temp = gen_reg_rtx (outmode);
4193 	      emit_group_store (temp, valreg, outmode);
4194 	      valreg = temp;
4195 	    }
4196 
4197 	  temp = gen_reg_rtx (GET_MODE (valreg));
4198 
4199 	  /* Construct an "equal form" for the value which mentions all the
4200 	     arguments in order as well as the function name.  */
4201 	  for (i = 0; i < nargs; i++)
4202 	    note = gen_rtx_EXPR_LIST (VOIDmode, argvec[i].value, note);
4203 	  note = gen_rtx_EXPR_LIST (VOIDmode, fun, note);
4204 
4205 	  insns = get_insns ();
4206 	  end_sequence ();
4207 
4208 	  if (flags & ECF_PURE)
4209 	    note = gen_rtx_EXPR_LIST (VOIDmode,
4210 			gen_rtx_USE (VOIDmode,
4211 				     gen_rtx_MEM (BLKmode,
4212 						  gen_rtx_SCRATCH (VOIDmode))),
4213 			note);
4214 
4215 	  emit_libcall_block (insns, temp, valreg, note);
4216 
4217 	  valreg = temp;
4218 	}
4219     }
4220   pop_temp_slots ();
4221 
4222   /* Copy the value to the right place.  */
4223   if (outmode != VOIDmode && retval)
4224     {
4225       if (mem_value)
4226 	{
4227 	  if (value == 0)
4228 	    value = mem_value;
4229 	  if (value != mem_value)
4230 	    emit_move_insn (value, mem_value);
4231 	}
4232       else if (GET_CODE (valreg) == PARALLEL)
4233 	{
4234 	  if (value == 0)
4235 	    value = gen_reg_rtx (outmode);
4236 	  emit_group_store (value, valreg, outmode);
4237 	}
4238       else if (value != 0)
4239 	emit_move_insn (value, valreg);
4240       else
4241 	value = valreg;
4242     }
4243 
4244   if (ACCUMULATE_OUTGOING_ARGS)
4245     {
4246 #ifdef REG_PARM_STACK_SPACE
4247       if (save_area)
4248 	{
4249 	  enum machine_mode save_mode = GET_MODE (save_area);
4250 #ifdef ARGS_GROW_DOWNWARD
4251 	  rtx stack_area
4252 	    = gen_rtx_MEM (save_mode,
4253 			   memory_address (save_mode,
4254 					   plus_constant (argblock,
4255 							  - high_to_save)));
4256 #else
4257 	  rtx stack_area
4258 	    = gen_rtx_MEM (save_mode,
4259 			   memory_address (save_mode,
4260 					   plus_constant (argblock, low_to_save)));
4261 #endif
4262 
4263 	  set_mem_align (stack_area, PARM_BOUNDARY);
4264 	  if (save_mode != BLKmode)
4265 	    emit_move_insn (stack_area, save_area);
4266 	  else
4267 	    emit_block_move (stack_area, save_area,
4268 			     GEN_INT (high_to_save - low_to_save + 1),
4269 			     BLOCK_OP_CALL_PARM);
4270 	}
4271 #endif
4272 
4273       /* If we saved any argument areas, restore them.  */
4274       for (count = 0; count < nargs; count++)
4275 	if (argvec[count].save_area)
4276 	  {
4277 	    enum machine_mode save_mode = GET_MODE (argvec[count].save_area);
4278 	    rtx stack_area
4279 	      = gen_rtx_MEM (save_mode,
4280 			     memory_address
4281 			     (save_mode,
4282 			      plus_constant (argblock,
4283 					     argvec[count].offset.constant)));
4284 
4285 	    if (save_mode == BLKmode)
4286 	      emit_block_move (stack_area,
4287 		  	       validize_mem (argvec[count].save_area),
4288 			       GEN_INT (argvec[count].size.constant),
4289 			       BLOCK_OP_CALL_PARM);
4290 	    else
4291 	      emit_move_insn (stack_area, argvec[count].save_area);
4292 	  }
4293 
4294       highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4295       stack_usage_map = initial_stack_usage_map;
4296     }
4297 
4298   return value;
4299 
4300 }
4301 
4302 /* Output a library call to function FUN (a SYMBOL_REF rtx)
4303    (emitting the queue unless NO_QUEUE is nonzero),
4304    for a value of mode OUTMODE,
4305    with NARGS different arguments, passed as alternating rtx values
4306    and machine_modes to convert them to.
4307    The rtx values should have been passed through protect_from_queue already.
4308 
4309    FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for `const'
4310    calls, LCT_PURE for `pure' calls, LCT_CONST_MAKE_BLOCK for `const' calls
4311    which should be enclosed in REG_LIBCALL/REG_RETVAL notes,
4312    LCT_PURE_MAKE_BLOCK for `purep' calls which should be enclosed in
4313    REG_LIBCALL/REG_RETVAL notes with extra (use (memory (scratch)),
4314    or other LCT_ value for other types of library calls.  */
4315 
4316 void
emit_library_call(rtx orgfun,enum libcall_type fn_type,enum machine_mode outmode,int nargs,...)4317 emit_library_call VPARAMS((rtx orgfun, enum libcall_type fn_type,
4318 			   enum machine_mode outmode, int nargs, ...))
4319 {
4320   VA_OPEN (p, nargs);
4321   VA_FIXEDARG (p, rtx, orgfun);
4322   VA_FIXEDARG (p, int, fn_type);
4323   VA_FIXEDARG (p, enum machine_mode, outmode);
4324   VA_FIXEDARG (p, int, nargs);
4325 
4326   emit_library_call_value_1 (0, orgfun, NULL_RTX, fn_type, outmode, nargs, p);
4327 
4328   VA_CLOSE (p);
4329 }
4330 
4331 /* Like emit_library_call except that an extra argument, VALUE,
4332    comes second and says where to store the result.
4333    (If VALUE is zero, this function chooses a convenient way
4334    to return the value.
4335 
4336    This function returns an rtx for where the value is to be found.
4337    If VALUE is nonzero, VALUE is returned.  */
4338 
4339 rtx
emit_library_call_value(rtx orgfun,rtx value,enum libcall_type fn_type,enum machine_mode outmode,int nargs,...)4340 emit_library_call_value VPARAMS((rtx orgfun, rtx value,
4341 				 enum libcall_type fn_type,
4342 				 enum machine_mode outmode, int nargs, ...))
4343 {
4344   rtx result;
4345 
4346   VA_OPEN (p, nargs);
4347   VA_FIXEDARG (p, rtx, orgfun);
4348   VA_FIXEDARG (p, rtx, value);
4349   VA_FIXEDARG (p, int, fn_type);
4350   VA_FIXEDARG (p, enum machine_mode, outmode);
4351   VA_FIXEDARG (p, int, nargs);
4352 
4353   result = emit_library_call_value_1 (1, orgfun, value, fn_type, outmode,
4354 				      nargs, p);
4355 
4356   VA_CLOSE (p);
4357 
4358   return result;
4359 }
4360 
4361 /* Store a single argument for a function call
4362    into the register or memory area where it must be passed.
4363    *ARG describes the argument value and where to pass it.
4364 
4365    ARGBLOCK is the address of the stack-block for all the arguments,
4366    or 0 on a machine where arguments are pushed individually.
4367 
4368    MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
4369    so must be careful about how the stack is used.
4370 
4371    VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
4372    argument stack.  This is used if ACCUMULATE_OUTGOING_ARGS to indicate
4373    that we need not worry about saving and restoring the stack.
4374 
4375    FNDECL is the declaration of the function we are calling.
4376 
4377    Return nonzero if this arg should cause sibcall failure,
4378    zero otherwise.  */
4379 
4380 static int
store_one_arg(arg,argblock,flags,variable_size,reg_parm_stack_space)4381 store_one_arg (arg, argblock, flags, variable_size, reg_parm_stack_space)
4382      struct arg_data *arg;
4383      rtx argblock;
4384      int flags;
4385      int variable_size ATTRIBUTE_UNUSED;
4386      int reg_parm_stack_space;
4387 {
4388   tree pval = arg->tree_value;
4389   rtx reg = 0;
4390   int partial = 0;
4391   int used = 0;
4392   int i, lower_bound = 0, upper_bound = 0;
4393   int sibcall_failure = 0;
4394 
4395   if (TREE_CODE (pval) == ERROR_MARK)
4396     return 1;
4397 
4398   /* Push a new temporary level for any temporaries we make for
4399      this argument.  */
4400   push_temp_slots ();
4401 
4402   if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
4403     {
4404       /* If this is being stored into a pre-allocated, fixed-size, stack area,
4405 	 save any previous data at that location.  */
4406       if (argblock && ! variable_size && arg->stack)
4407 	{
4408 #ifdef ARGS_GROW_DOWNWARD
4409 	  /* stack_slot is negative, but we want to index stack_usage_map
4410 	     with positive values.  */
4411 	  if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4412 	    upper_bound = -INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1)) + 1;
4413 	  else
4414 	    upper_bound = 0;
4415 
4416 	  lower_bound = upper_bound - arg->size.constant;
4417 #else
4418 	  if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
4419 	    lower_bound = INTVAL (XEXP (XEXP (arg->stack_slot, 0), 1));
4420 	  else
4421 	    lower_bound = 0;
4422 
4423 	  upper_bound = lower_bound + arg->size.constant;
4424 #endif
4425 
4426 	  for (i = lower_bound; i < upper_bound; i++)
4427 	    if (stack_usage_map[i]
4428 		/* Don't store things in the fixed argument area at this point;
4429 		   it has already been saved.  */
4430 		&& i > reg_parm_stack_space)
4431 	      break;
4432 
4433 	  if (i != upper_bound)
4434 	    {
4435 	      /* We need to make a save area.  See what mode we can make it.  */
4436 	      enum machine_mode save_mode
4437 		= mode_for_size (arg->size.constant * BITS_PER_UNIT, MODE_INT, 1);
4438 	      rtx stack_area
4439 		= gen_rtx_MEM (save_mode,
4440 			       memory_address (save_mode,
4441 					       XEXP (arg->stack_slot, 0)));
4442 
4443 	      if (save_mode == BLKmode)
4444 		{
4445 		  tree ot = TREE_TYPE (arg->tree_value);
4446 		  tree nt = build_qualified_type (ot, (TYPE_QUALS (ot)
4447 						       | TYPE_QUAL_CONST));
4448 
4449 		  arg->save_area = assign_temp (nt, 0, 1, 1);
4450 		  preserve_temp_slots (arg->save_area);
4451 		  emit_block_move (validize_mem (arg->save_area), stack_area,
4452 				   expr_size (arg->tree_value),
4453 				   BLOCK_OP_CALL_PARM);
4454 		}
4455 	      else
4456 		{
4457 		  arg->save_area = gen_reg_rtx (save_mode);
4458 		  emit_move_insn (arg->save_area, stack_area);
4459 		}
4460 	    }
4461 	}
4462     }
4463 
4464   /* If this isn't going to be placed on both the stack and in registers,
4465      set up the register and number of words.  */
4466   if (! arg->pass_on_stack)
4467     {
4468       if (flags & ECF_SIBCALL)
4469 	reg = arg->tail_call_reg;
4470       else
4471 	reg = arg->reg;
4472       partial = arg->partial;
4473     }
4474 
4475   if (reg != 0 && partial == 0)
4476     /* Being passed entirely in a register.  We shouldn't be called in
4477        this case.  */
4478     abort ();
4479 
4480   /* If this arg needs special alignment, don't load the registers
4481      here.  */
4482   if (arg->n_aligned_regs != 0)
4483     reg = 0;
4484 
4485   /* If this is being passed partially in a register, we can't evaluate
4486      it directly into its stack slot.  Otherwise, we can.  */
4487   if (arg->value == 0)
4488     {
4489       /* stack_arg_under_construction is nonzero if a function argument is
4490 	 being evaluated directly into the outgoing argument list and
4491 	 expand_call must take special action to preserve the argument list
4492 	 if it is called recursively.
4493 
4494 	 For scalar function arguments stack_usage_map is sufficient to
4495 	 determine which stack slots must be saved and restored.  Scalar
4496 	 arguments in general have pass_on_stack == 0.
4497 
4498 	 If this argument is initialized by a function which takes the
4499 	 address of the argument (a C++ constructor or a C function
4500 	 returning a BLKmode structure), then stack_usage_map is
4501 	 insufficient and expand_call must push the stack around the
4502 	 function call.  Such arguments have pass_on_stack == 1.
4503 
4504 	 Note that it is always safe to set stack_arg_under_construction,
4505 	 but this generates suboptimal code if set when not needed.  */
4506 
4507       if (arg->pass_on_stack)
4508 	stack_arg_under_construction++;
4509 
4510       arg->value = expand_expr (pval,
4511 				(partial
4512 				 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
4513 				? NULL_RTX : arg->stack,
4514 				VOIDmode, EXPAND_STACK_PARM);
4515 
4516       /* If we are promoting object (or for any other reason) the mode
4517 	 doesn't agree, convert the mode.  */
4518 
4519       if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
4520 	arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
4521 				    arg->value, arg->unsignedp);
4522 
4523       if (arg->pass_on_stack)
4524 	stack_arg_under_construction--;
4525     }
4526 
4527   /* Don't allow anything left on stack from computation
4528      of argument to alloca.  */
4529   if (flags & ECF_MAY_BE_ALLOCA)
4530     do_pending_stack_adjust ();
4531 
4532   if (arg->value == arg->stack)
4533     /* If the value is already in the stack slot, we are done.  */
4534     ;
4535   else if (arg->mode != BLKmode)
4536     {
4537       int size;
4538 
4539       /* Argument is a scalar, not entirely passed in registers.
4540 	 (If part is passed in registers, arg->partial says how much
4541 	 and emit_push_insn will take care of putting it there.)
4542 
4543 	 Push it, and if its size is less than the
4544 	 amount of space allocated to it,
4545 	 also bump stack pointer by the additional space.
4546 	 Note that in C the default argument promotions
4547 	 will prevent such mismatches.  */
4548 
4549       size = GET_MODE_SIZE (arg->mode);
4550       /* Compute how much space the push instruction will push.
4551 	 On many machines, pushing a byte will advance the stack
4552 	 pointer by a halfword.  */
4553 #ifdef PUSH_ROUNDING
4554       size = PUSH_ROUNDING (size);
4555 #endif
4556       used = size;
4557 
4558       /* Compute how much space the argument should get:
4559 	 round up to a multiple of the alignment for arguments.  */
4560       if (none != FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)))
4561 	used = (((size + PARM_BOUNDARY / BITS_PER_UNIT - 1)
4562 		 / (PARM_BOUNDARY / BITS_PER_UNIT))
4563 		* (PARM_BOUNDARY / BITS_PER_UNIT));
4564 
4565       /* This isn't already where we want it on the stack, so put it there.
4566 	 This can either be done with push or copy insns.  */
4567       emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), NULL_RTX,
4568 		      PARM_BOUNDARY, partial, reg, used - size, argblock,
4569 		      ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4570 		      ARGS_SIZE_RTX (arg->alignment_pad));
4571 
4572       /* Unless this is a partially-in-register argument, the argument is now
4573 	 in the stack.  */
4574       if (partial == 0)
4575 	arg->value = arg->stack;
4576     }
4577   else
4578     {
4579       /* BLKmode, at least partly to be pushed.  */
4580 
4581       unsigned int parm_align;
4582       int excess;
4583       rtx size_rtx;
4584 
4585       /* Pushing a nonscalar.
4586 	 If part is passed in registers, PARTIAL says how much
4587 	 and emit_push_insn will take care of putting it there.  */
4588 
4589       /* Round its size up to a multiple
4590 	 of the allocation unit for arguments.  */
4591 
4592       if (arg->size.var != 0)
4593 	{
4594 	  excess = 0;
4595 	  size_rtx = ARGS_SIZE_RTX (arg->size);
4596 	}
4597       else
4598 	{
4599 	  /* PUSH_ROUNDING has no effect on us, because
4600 	     emit_push_insn for BLKmode is careful to avoid it.  */
4601 	  excess = (arg->size.constant - int_size_in_bytes (TREE_TYPE (pval))
4602 		    + partial * UNITS_PER_WORD);
4603 	  size_rtx = expand_expr (size_in_bytes (TREE_TYPE (pval)),
4604 				  NULL_RTX, TYPE_MODE (sizetype), 0);
4605 	}
4606 
4607       /* Some types will require stricter alignment, which will be
4608 	 provided for elsewhere in argument layout.  */
4609       parm_align = MAX (PARM_BOUNDARY, TYPE_ALIGN (TREE_TYPE (pval)));
4610 
4611       /* When an argument is padded down, the block is aligned to
4612 	 PARM_BOUNDARY, but the actual argument isn't.  */
4613       if (FUNCTION_ARG_PADDING (arg->mode, TREE_TYPE (pval)) == downward)
4614 	{
4615 	  if (arg->size.var)
4616 	    parm_align = BITS_PER_UNIT;
4617 	  else if (excess)
4618 	    {
4619 	      unsigned int excess_align = (excess & -excess) * BITS_PER_UNIT;
4620 	      parm_align = MIN (parm_align, excess_align);
4621 	    }
4622 	}
4623 
4624       if ((flags & ECF_SIBCALL) && GET_CODE (arg->value) == MEM)
4625 	{
4626 	  /* emit_push_insn might not work properly if arg->value and
4627 	     argblock + arg->offset areas overlap.  */
4628 	  rtx x = arg->value;
4629 	  int i = 0;
4630 
4631 	  if (XEXP (x, 0) == current_function_internal_arg_pointer
4632 	      || (GET_CODE (XEXP (x, 0)) == PLUS
4633 		  && XEXP (XEXP (x, 0), 0) ==
4634 		     current_function_internal_arg_pointer
4635 		  && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT))
4636 	    {
4637 	      if (XEXP (x, 0) != current_function_internal_arg_pointer)
4638 		i = INTVAL (XEXP (XEXP (x, 0), 1));
4639 
4640 	      /* expand_call should ensure this */
4641 	      if (arg->offset.var || GET_CODE (size_rtx) != CONST_INT)
4642 		abort ();
4643 
4644 	      if (arg->offset.constant > i)
4645 		{
4646 		  if (arg->offset.constant < i + INTVAL (size_rtx))
4647 		    sibcall_failure = 1;
4648 		}
4649 	      else if (arg->offset.constant < i)
4650 		{
4651 		  if (i < arg->offset.constant + INTVAL (size_rtx))
4652 		    sibcall_failure = 1;
4653 		}
4654 	    }
4655 	}
4656 
4657       emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
4658 		      parm_align, partial, reg, excess, argblock,
4659 		      ARGS_SIZE_RTX (arg->offset), reg_parm_stack_space,
4660 		      ARGS_SIZE_RTX (arg->alignment_pad));
4661 
4662       /* Unless this is a partially-in-register argument, the argument is now
4663 	 in the stack.
4664 
4665 	 ??? Unlike the case above, in which we want the actual
4666 	 address of the data, so that we can load it directly into a
4667 	 register, here we want the address of the stack slot, so that
4668 	 it's properly aligned for word-by-word copying or something
4669 	 like that.  It's not clear that this is always correct.  */
4670       if (partial == 0)
4671 	arg->value = arg->stack_slot;
4672     }
4673 
4674   /* Mark all slots this store used.  */
4675   if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
4676       && argblock && ! variable_size && arg->stack)
4677     for (i = lower_bound; i < upper_bound; i++)
4678       stack_usage_map[i] = 1;
4679 
4680   /* Once we have pushed something, pops can't safely
4681      be deferred during the rest of the arguments.  */
4682   NO_DEFER_POP;
4683 
4684   /* ANSI doesn't require a sequence point here,
4685      but PCC has one, so this will avoid some problems.  */
4686   emit_queue ();
4687 
4688   /* Free any temporary slots made in processing this argument.  Show
4689      that we might have taken the address of something and pushed that
4690      as an operand.  */
4691   preserve_temp_slots (NULL_RTX);
4692   free_temp_slots ();
4693   pop_temp_slots ();
4694 
4695   return sibcall_failure;
4696 }
4697 
4698 /* Nonzero if we do not know how to pass TYPE solely in registers.
4699    We cannot do so in the following cases:
4700 
4701    - if the type has variable size
4702    - if the type is marked as addressable (it is required to be constructed
4703      into the stack)
4704    - if the padding and mode of the type is such that a copy into a register
4705      would put it into the wrong part of the register.
4706 
4707    Which padding can't be supported depends on the byte endianness.
4708 
4709    A value in a register is implicitly padded at the most significant end.
4710    On a big-endian machine, that is the lower end in memory.
4711    So a value padded in memory at the upper end can't go in a register.
4712    For a little-endian machine, the reverse is true.  */
4713 
4714 bool
default_must_pass_in_stack(enum machine_mode mode,tree type)4715 default_must_pass_in_stack (enum machine_mode mode, tree type)
4716 {
4717   if (!type)
4718     return false;
4719 
4720   /* If the type has variable size...  */
4721   if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4722     return true;
4723 
4724   /* If the type is marked as addressable (it is required
4725      to be constructed into the stack)...  */
4726   if (TREE_ADDRESSABLE (type))
4727     return true;
4728 
4729   /* If the padding and mode of the type is such that a copy into
4730      a register would put it into the wrong part of the register.  */
4731   if (mode == BLKmode
4732       && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
4733       && (FUNCTION_ARG_PADDING (mode, type)
4734 	  == (BYTES_BIG_ENDIAN ? upward : downward)))
4735     return true;
4736 
4737   return false;
4738 }
4739