xref: /dragonfly/contrib/gcc-8.0/gcc/calls.c (revision 106728aa)
1 /* Convert function calls to rtl insns, for GNU C compiler.
2    Copyright (C) 1989-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "stringpool.h"
32 #include "expmed.h"
33 #include "optabs.h"
34 #include "emit-rtl.h"
35 #include "cgraph.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "varasm.h"
40 #include "internal-fn.h"
41 #include "dojump.h"
42 #include "explow.h"
43 #include "calls.h"
44 #include "expr.h"
45 #include "output.h"
46 #include "langhooks.h"
47 #include "except.h"
48 #include "dbgcnt.h"
49 #include "rtl-iter.h"
50 #include "tree-chkp.h"
51 #include "tree-vrp.h"
52 #include "tree-ssanames.h"
53 #include "rtl-chkp.h"
54 #include "intl.h"
55 #include "stringpool.h"
56 #include "attribs.h"
57 #include "builtins.h"
58 
59 /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits.  */
60 #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
61 
62 /* Data structure and subroutines used within expand_call.  */
63 
64 struct arg_data
65 {
66   /* Tree node for this argument.  */
67   tree tree_value;
68   /* Mode for value; TYPE_MODE unless promoted.  */
69   machine_mode mode;
70   /* Current RTL value for argument, or 0 if it isn't precomputed.  */
71   rtx value;
72   /* Initially-compute RTL value for argument; only for const functions.  */
73   rtx initial_value;
74   /* Register to pass this argument in, 0 if passed on stack, or an
75      PARALLEL if the arg is to be copied into multiple non-contiguous
76      registers.  */
77   rtx reg;
78   /* Register to pass this argument in when generating tail call sequence.
79      This is not the same register as for normal calls on machines with
80      register windows.  */
81   rtx tail_call_reg;
82   /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
83      form for emit_group_move.  */
84   rtx parallel_value;
85   /* If value is passed in neither reg nor stack, this field holds a number
86      of a special slot to be used.  */
87   rtx special_slot;
88   /* For pointer bounds hold an index of parm bounds are bound to.  -1 if
89      there is no such pointer.  */
90   int pointer_arg;
91   /* If pointer_arg refers a structure, then pointer_offset holds an offset
92      of a pointer in this structure.  */
93   int pointer_offset;
94   /* If REG was promoted from the actual mode of the argument expression,
95      indicates whether the promotion is sign- or zero-extended.  */
96   int unsignedp;
97   /* Number of bytes to put in registers.  0 means put the whole arg
98      in registers.  Also 0 if not passed in registers.  */
99   int partial;
100   /* Nonzero if argument must be passed on stack.
101      Note that some arguments may be passed on the stack
102      even though pass_on_stack is zero, just because FUNCTION_ARG says so.
103      pass_on_stack identifies arguments that *cannot* go in registers.  */
104   int pass_on_stack;
105   /* Some fields packaged up for locate_and_pad_parm.  */
106   struct locate_and_pad_arg_data locate;
107   /* Location on the stack at which parameter should be stored.  The store
108      has already been done if STACK == VALUE.  */
109   rtx stack;
110   /* Location on the stack of the start of this argument slot.  This can
111      differ from STACK if this arg pads downward.  This location is known
112      to be aligned to TARGET_FUNCTION_ARG_BOUNDARY.  */
113   rtx stack_slot;
114   /* Place that this stack area has been saved, if needed.  */
115   rtx save_area;
116   /* If an argument's alignment does not permit direct copying into registers,
117      copy in smaller-sized pieces into pseudos.  These are stored in a
118      block pointed to by this field.  The next field says how many
119      word-sized pseudos we made.  */
120   rtx *aligned_regs;
121   int n_aligned_regs;
122 };
123 
124 /* A vector of one char per byte of stack space.  A byte if nonzero if
125    the corresponding stack location has been used.
126    This vector is used to prevent a function call within an argument from
127    clobbering any stack already set up.  */
128 static char *stack_usage_map;
129 
130 /* Size of STACK_USAGE_MAP.  */
131 static unsigned int highest_outgoing_arg_in_use;
132 
133 /* Assume that any stack location at this byte index is used,
134    without checking the contents of stack_usage_map.  */
135 static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
136 
137 /* A bitmap of virtual-incoming stack space.  Bit is set if the corresponding
138    stack location's tail call argument has been already stored into the stack.
139    This bitmap is used to prevent sibling call optimization if function tries
140    to use parent's incoming argument slots when they have been already
141    overwritten with tail call arguments.  */
142 static sbitmap stored_args_map;
143 
144 /* Assume that any virtual-incoming location at this byte index has been
145    stored, without checking the contents of stored_args_map.  */
146 static unsigned HOST_WIDE_INT stored_args_watermark;
147 
148 /* stack_arg_under_construction is nonzero when an argument may be
149    initialized with a constructor call (including a C function that
150    returns a BLKmode struct) and expand_call must take special action
151    to make sure the object being constructed does not overlap the
152    argument list for the constructor call.  */
153 static int stack_arg_under_construction;
154 
155 static void precompute_register_parameters (int, struct arg_data *, int *);
156 static void store_bounds (struct arg_data *, struct arg_data *);
157 static int store_one_arg (struct arg_data *, rtx, int, int, int);
158 static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
159 static int finalize_must_preallocate (int, int, struct arg_data *,
160 				      struct args_size *);
161 static void precompute_arguments (int, struct arg_data *);
162 static void compute_argument_addresses (struct arg_data *, rtx, int);
163 static rtx rtx_for_function_call (tree, tree);
164 static void load_register_parameters (struct arg_data *, int, rtx *, int,
165 				      int, int *);
166 static int special_function_p (const_tree, int);
167 static int check_sibcall_argument_overlap_1 (rtx);
168 static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
169 
170 static tree split_complex_types (tree);
171 
172 #ifdef REG_PARM_STACK_SPACE
173 static rtx save_fixed_argument_area (int, rtx, int *, int *);
174 static void restore_fixed_argument_area (rtx, rtx, int, int);
175 #endif
176 
177 /* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
178    stack region might already be in use.  */
179 
180 static bool
181 stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
182 			   unsigned int reg_parm_stack_space)
183 {
184   unsigned HOST_WIDE_INT const_lower, const_upper;
185   const_lower = constant_lower_bound (lower_bound);
186   if (!upper_bound.is_constant (&const_upper))
187     const_upper = HOST_WIDE_INT_M1U;
188 
189   if (const_upper > stack_usage_watermark)
190     return true;
191 
192   /* Don't worry about things in the fixed argument area;
193      it has already been saved.  */
194   const_lower = MAX (const_lower, reg_parm_stack_space);
195   const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
196   for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
197     if (stack_usage_map[i])
198       return true;
199   return false;
200 }
201 
202 /* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
203    stack region are now in use.  */
204 
205 static void
206 mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
207 {
208   unsigned HOST_WIDE_INT const_lower, const_upper;
209   const_lower = constant_lower_bound (lower_bound);
210   if (upper_bound.is_constant (&const_upper))
211     for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
212       stack_usage_map[i] = 1;
213   else
214     stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
215 }
216 
217 /* Force FUNEXP into a form suitable for the address of a CALL,
218    and return that as an rtx.  Also load the static chain register
219    if FNDECL is a nested function.
220 
221    CALL_FUSAGE points to a variable holding the prospective
222    CALL_INSN_FUNCTION_USAGE information.  */
223 
224 rtx
225 prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
226 		      rtx *call_fusage, int reg_parm_seen, int flags)
227 {
228   /* Make a valid memory address and copy constants through pseudo-regs,
229      but not for a constant address if -fno-function-cse.  */
230   if (GET_CODE (funexp) != SYMBOL_REF)
231     {
232       /* If it's an indirect call by descriptor, generate code to perform
233 	 runtime identification of the pointer and load the descriptor.  */
234       if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
235 	{
236 	  const int bit_val = targetm.calls.custom_function_descriptors;
237 	  rtx call_lab = gen_label_rtx ();
238 
239 	  gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
240 	  fndecl_or_type
241 	    = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
242 			  fndecl_or_type);
243 	  DECL_STATIC_CHAIN (fndecl_or_type) = 1;
244 	  rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
245 
246 	  if (GET_MODE (funexp) != Pmode)
247 	    funexp = convert_memory_address (Pmode, funexp);
248 
249 	  /* Avoid long live ranges around function calls.  */
250 	  funexp = copy_to_mode_reg (Pmode, funexp);
251 
252 	  if (REG_P (chain))
253 	    emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
254 
255 	  /* Emit the runtime identification pattern.  */
256 	  rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
257 	  emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
258 				   call_lab);
259 
260 	  /* Statically predict the branch to very likely taken.  */
261 	  rtx_insn *insn = get_last_insn ();
262 	  if (JUMP_P (insn))
263 	    predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
264 
265 	  /* Load the descriptor.  */
266 	  rtx mem = gen_rtx_MEM (ptr_mode,
267 				 plus_constant (Pmode, funexp, - bit_val));
268 	  MEM_NOTRAP_P (mem) = 1;
269 	  mem = convert_memory_address (Pmode, mem);
270 	  emit_move_insn (chain, mem);
271 
272 	  mem = gen_rtx_MEM (ptr_mode,
273 			     plus_constant (Pmode, funexp,
274 					    POINTER_SIZE / BITS_PER_UNIT
275 					      - bit_val));
276 	  MEM_NOTRAP_P (mem) = 1;
277 	  mem = convert_memory_address (Pmode, mem);
278 	  emit_move_insn (funexp, mem);
279 
280 	  emit_label (call_lab);
281 
282 	  if (REG_P (chain))
283 	    {
284 	      use_reg (call_fusage, chain);
285 	      STATIC_CHAIN_REG_P (chain) = 1;
286 	    }
287 
288 	  /* Make sure we're not going to be overwritten below.  */
289 	  gcc_assert (!static_chain_value);
290 	}
291 
292       /* If we are using registers for parameters, force the
293 	 function address into a register now.  */
294       funexp = ((reg_parm_seen
295 		 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
296 		 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
297 		 : memory_address (FUNCTION_MODE, funexp));
298     }
299   else
300     {
301       /* funexp could be a SYMBOL_REF represents a function pointer which is
302 	 of ptr_mode.  In this case, it should be converted into address mode
303 	 to be a valid address for memory rtx pattern.  See PR 64971.  */
304       if (GET_MODE (funexp) != Pmode)
305 	funexp = convert_memory_address (Pmode, funexp);
306 
307       if (!(flags & ECF_SIBCALL))
308 	{
309 	  if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
310 	    funexp = force_reg (Pmode, funexp);
311 	}
312     }
313 
314   if (static_chain_value != 0
315       && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
316 	  || DECL_STATIC_CHAIN (fndecl_or_type)))
317     {
318       rtx chain;
319 
320       chain = targetm.calls.static_chain (fndecl_or_type, false);
321       static_chain_value = convert_memory_address (Pmode, static_chain_value);
322 
323       emit_move_insn (chain, static_chain_value);
324       if (REG_P (chain))
325 	{
326 	  use_reg (call_fusage, chain);
327 	  STATIC_CHAIN_REG_P (chain) = 1;
328 	}
329     }
330 
331   return funexp;
332 }
333 
334 /* Generate instructions to call function FUNEXP,
335    and optionally pop the results.
336    The CALL_INSN is the first insn generated.
337 
338    FNDECL is the declaration node of the function.  This is given to the
339    hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
340    its own args.
341 
342    FUNTYPE is the data type of the function.  This is given to the hook
343    TARGET_RETURN_POPS_ARGS to determine whether this function pops its
344    own args.  We used to allow an identifier for library functions, but
345    that doesn't work when the return type is an aggregate type and the
346    calling convention says that the pointer to this aggregate is to be
347    popped by the callee.
348 
349    STACK_SIZE is the number of bytes of arguments on the stack,
350    ROUNDED_STACK_SIZE is that number rounded up to
351    PREFERRED_STACK_BOUNDARY; zero if the size is variable.  This is
352    both to put into the call insn and to generate explicit popping
353    code if necessary.
354 
355    STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
356    It is zero if this call doesn't want a structure value.
357 
358    NEXT_ARG_REG is the rtx that results from executing
359      targetm.calls.function_arg (&args_so_far, VOIDmode, void_type_node, true)
360    just after all the args have had their registers assigned.
361    This could be whatever you like, but normally it is the first
362    arg-register beyond those used for args in this call,
363    or 0 if all the arg-registers are used in this call.
364    It is passed on to `gen_call' so you can put this info in the call insn.
365 
366    VALREG is a hard register in which a value is returned,
367    or 0 if the call does not return a value.
368 
369    OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
370    the args to this call were processed.
371    We restore `inhibit_defer_pop' to that value.
372 
373    CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
374    denote registers used by the called function.  */
375 
376 static void
377 emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
378 	     tree funtype ATTRIBUTE_UNUSED,
379 	     poly_int64 stack_size ATTRIBUTE_UNUSED,
380 	     poly_int64 rounded_stack_size,
381 	     poly_int64 struct_value_size ATTRIBUTE_UNUSED,
382 	     rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
383 	     int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
384 	     cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
385 {
386   rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
387   rtx call, funmem, pat;
388   int already_popped = 0;
389   poly_int64 n_popped = 0;
390 
391   /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
392      patterns exist).  Any popping that the callee does on return will
393      be from our caller's frame rather than ours.  */
394   if (!(ecf_flags & ECF_SIBCALL))
395     {
396       n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
397 
398 #ifdef CALL_POPS_ARGS
399       n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
400 #endif
401     }
402 
403   /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
404      and we don't want to load it into a register as an optimization,
405      because prepare_call_address already did it if it should be done.  */
406   if (GET_CODE (funexp) != SYMBOL_REF)
407     funexp = memory_address (FUNCTION_MODE, funexp);
408 
409   funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
410   if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
411     {
412       tree t = fndecl;
413 
414       /* Although a built-in FUNCTION_DECL and its non-__builtin
415 	 counterpart compare equal and get a shared mem_attrs, they
416 	 produce different dump output in compare-debug compilations,
417 	 if an entry gets garbage collected in one compilation, then
418 	 adds a different (but equivalent) entry, while the other
419 	 doesn't run the garbage collector at the same spot and then
420 	 shares the mem_attr with the equivalent entry. */
421       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
422 	{
423 	  tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
424 	  if (t2)
425 	    t = t2;
426 	}
427 
428 	set_mem_expr (funmem, t);
429     }
430   else if (fntree)
431     set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
432 
433   if (ecf_flags & ECF_SIBCALL)
434     {
435       if (valreg)
436 	pat = targetm.gen_sibcall_value (valreg, funmem,
437 					 rounded_stack_size_rtx,
438 					 next_arg_reg, NULL_RTX);
439       else
440 	pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
441 				   next_arg_reg,
442 				   gen_int_mode (struct_value_size, Pmode));
443     }
444   /* If the target has "call" or "call_value" insns, then prefer them
445      if no arguments are actually popped.  If the target does not have
446      "call" or "call_value" insns, then we must use the popping versions
447      even if the call has no arguments to pop.  */
448   else if (maybe_ne (n_popped, 0)
449 	   || !(valreg
450 		? targetm.have_call_value ()
451 		: targetm.have_call ()))
452     {
453       rtx n_pop = gen_int_mode (n_popped, Pmode);
454 
455       /* If this subroutine pops its own args, record that in the call insn
456 	 if possible, for the sake of frame pointer elimination.  */
457 
458       if (valreg)
459 	pat = targetm.gen_call_value_pop (valreg, funmem,
460 					  rounded_stack_size_rtx,
461 					  next_arg_reg, n_pop);
462       else
463 	pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
464 				    next_arg_reg, n_pop);
465 
466       already_popped = 1;
467     }
468   else
469     {
470       if (valreg)
471 	pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
472 				      next_arg_reg, NULL_RTX);
473       else
474 	pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
475 				gen_int_mode (struct_value_size, Pmode));
476     }
477   emit_insn (pat);
478 
479   /* Find the call we just emitted.  */
480   rtx_call_insn *call_insn = last_call_insn ();
481 
482   /* Some target create a fresh MEM instead of reusing the one provided
483      above.  Set its MEM_EXPR.  */
484   call = get_call_rtx_from (call_insn);
485   if (call
486       && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
487       && MEM_EXPR (funmem) != NULL_TREE)
488     set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
489 
490   /* Mark instrumented calls.  */
491   if (call && fntree)
492     CALL_EXPR_WITH_BOUNDS_P (call) = CALL_WITH_BOUNDS_P (fntree);
493 
494   /* Put the register usage information there.  */
495   add_function_usage_to (call_insn, call_fusage);
496 
497   /* If this is a const call, then set the insn's unchanging bit.  */
498   if (ecf_flags & ECF_CONST)
499     RTL_CONST_CALL_P (call_insn) = 1;
500 
501   /* If this is a pure call, then set the insn's unchanging bit.  */
502   if (ecf_flags & ECF_PURE)
503     RTL_PURE_CALL_P (call_insn) = 1;
504 
505   /* If this is a const call, then set the insn's unchanging bit.  */
506   if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
507     RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
508 
509   /* Create a nothrow REG_EH_REGION note, if needed.  */
510   make_reg_eh_region_note (call_insn, ecf_flags, 0);
511 
512   if (ecf_flags & ECF_NORETURN)
513     add_reg_note (call_insn, REG_NORETURN, const0_rtx);
514 
515   if (ecf_flags & ECF_RETURNS_TWICE)
516     {
517       add_reg_note (call_insn, REG_SETJMP, const0_rtx);
518       cfun->calls_setjmp = 1;
519     }
520 
521   SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
522 
523   /* Restore this now, so that we do defer pops for this call's args
524      if the context of the call as a whole permits.  */
525   inhibit_defer_pop = old_inhibit_defer_pop;
526 
527   if (maybe_ne (n_popped, 0))
528     {
529       if (!already_popped)
530 	CALL_INSN_FUNCTION_USAGE (call_insn)
531 	  = gen_rtx_EXPR_LIST (VOIDmode,
532 			       gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
533 			       CALL_INSN_FUNCTION_USAGE (call_insn));
534       rounded_stack_size -= n_popped;
535       rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
536       stack_pointer_delta -= n_popped;
537 
538       add_args_size_note (call_insn, stack_pointer_delta);
539 
540       /* If popup is needed, stack realign must use DRAP  */
541       if (SUPPORTS_STACK_ALIGNMENT)
542         crtl->need_drap = true;
543     }
544   /* For noreturn calls when not accumulating outgoing args force
545      REG_ARGS_SIZE note to prevent crossjumping of calls with different
546      args sizes.  */
547   else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
548     add_args_size_note (call_insn, stack_pointer_delta);
549 
550   if (!ACCUMULATE_OUTGOING_ARGS)
551     {
552       /* If returning from the subroutine does not automatically pop the args,
553 	 we need an instruction to pop them sooner or later.
554 	 Perhaps do it now; perhaps just record how much space to pop later.
555 
556 	 If returning from the subroutine does pop the args, indicate that the
557 	 stack pointer will be changed.  */
558 
559       if (maybe_ne (rounded_stack_size, 0))
560 	{
561 	  if (ecf_flags & ECF_NORETURN)
562 	    /* Just pretend we did the pop.  */
563 	    stack_pointer_delta -= rounded_stack_size;
564 	  else if (flag_defer_pop && inhibit_defer_pop == 0
565 	      && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
566 	    pending_stack_adjust += rounded_stack_size;
567 	  else
568 	    adjust_stack (rounded_stack_size_rtx);
569 	}
570     }
571   /* When we accumulate outgoing args, we must avoid any stack manipulations.
572      Restore the stack pointer to its original value now.  Usually
573      ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
574      On  i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
575      popping variants of functions exist as well.
576 
577      ??? We may optimize similar to defer_pop above, but it is
578      probably not worthwhile.
579 
580      ??? It will be worthwhile to enable combine_stack_adjustments even for
581      such machines.  */
582   else if (maybe_ne (n_popped, 0))
583     anti_adjust_stack (gen_int_mode (n_popped, Pmode));
584 }
585 
586 /* Determine if the function identified by FNDECL is one with
587    special properties we wish to know about.  Modify FLAGS accordingly.
588 
589    For example, if the function might return more than one time (setjmp), then
590    set ECF_RETURNS_TWICE.
591 
592    Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
593    space from the stack such as alloca.  */
594 
595 static int
596 special_function_p (const_tree fndecl, int flags)
597 {
598   tree name_decl = DECL_NAME (fndecl);
599 
600   /* For instrumentation clones we want to derive flags
601      from the original name.  */
602   if (cgraph_node::get (fndecl)
603       && cgraph_node::get (fndecl)->instrumentation_clone)
604     name_decl = DECL_NAME (cgraph_node::get (fndecl)->orig_decl);
605 
606   if (fndecl && name_decl
607       && IDENTIFIER_LENGTH (name_decl) <= 11
608       /* Exclude functions not at the file scope, or not `extern',
609 	 since they are not the magic functions we would otherwise
610 	 think they are.
611 	 FIXME: this should be handled with attributes, not with this
612 	 hacky imitation of DECL_ASSEMBLER_NAME.  It's (also) wrong
613 	 because you can declare fork() inside a function if you
614 	 wish.  */
615       && (DECL_CONTEXT (fndecl) == NULL_TREE
616 	  || TREE_CODE (DECL_CONTEXT (fndecl)) == TRANSLATION_UNIT_DECL)
617       && TREE_PUBLIC (fndecl))
618     {
619       const char *name = IDENTIFIER_POINTER (name_decl);
620       const char *tname = name;
621 
622       /* We assume that alloca will always be called by name.  It
623 	 makes no sense to pass it as a pointer-to-function to
624 	 anything that does not understand its behavior.  */
625       if (IDENTIFIER_LENGTH (name_decl) == 6
626 	  && name[0] == 'a'
627 	  && ! strcmp (name, "alloca"))
628 	flags |= ECF_MAY_BE_ALLOCA;
629 
630       /* Disregard prefix _ or __.  */
631       if (name[0] == '_')
632 	{
633 	  if (name[1] == '_')
634 	    tname += 2;
635 	  else
636 	    tname += 1;
637 	}
638 
639       /* ECF_RETURNS_TWICE is safe even for -ffreestanding.  */
640       if (! strcmp (tname, "setjmp")
641 	  || ! strcmp (tname, "sigsetjmp")
642 	  || ! strcmp (name, "savectx")
643 	  || ! strcmp (name, "vfork")
644 	  || ! strcmp (name, "getcontext"))
645 	flags |= ECF_RETURNS_TWICE;
646     }
647 
648   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
649       && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
650     flags |= ECF_MAY_BE_ALLOCA;
651 
652   return flags;
653 }
654 
655 /* Similar to special_function_p; return a set of ERF_ flags for the
656    function FNDECL.  */
657 static int
658 decl_return_flags (tree fndecl)
659 {
660   tree attr;
661   tree type = TREE_TYPE (fndecl);
662   if (!type)
663     return 0;
664 
665   attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
666   if (!attr)
667     return 0;
668 
669   attr = TREE_VALUE (TREE_VALUE (attr));
670   if (!attr || TREE_STRING_LENGTH (attr) < 1)
671     return 0;
672 
673   switch (TREE_STRING_POINTER (attr)[0])
674     {
675     case '1':
676     case '2':
677     case '3':
678     case '4':
679       return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
680 
681     case 'm':
682       return ERF_NOALIAS;
683 
684     case '.':
685     default:
686       return 0;
687     }
688 }
689 
690 /* Return nonzero when FNDECL represents a call to setjmp.  */
691 
692 int
693 setjmp_call_p (const_tree fndecl)
694 {
695   if (DECL_IS_RETURNS_TWICE (fndecl))
696     return ECF_RETURNS_TWICE;
697   return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
698 }
699 
700 
701 /* Return true if STMT may be an alloca call.  */
702 
703 bool
704 gimple_maybe_alloca_call_p (const gimple *stmt)
705 {
706   tree fndecl;
707 
708   if (!is_gimple_call (stmt))
709     return false;
710 
711   fndecl = gimple_call_fndecl (stmt);
712   if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
713     return true;
714 
715   return false;
716 }
717 
718 /* Return true if STMT is a builtin alloca call.  */
719 
720 bool
721 gimple_alloca_call_p (const gimple *stmt)
722 {
723   tree fndecl;
724 
725   if (!is_gimple_call (stmt))
726     return false;
727 
728   fndecl = gimple_call_fndecl (stmt);
729   if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
730     switch (DECL_FUNCTION_CODE (fndecl))
731       {
732       CASE_BUILT_IN_ALLOCA:
733 	return gimple_call_num_args (stmt) > 0;
734       default:
735 	break;
736       }
737 
738   return false;
739 }
740 
741 /* Return true when exp contains a builtin alloca call.  */
742 
743 bool
744 alloca_call_p (const_tree exp)
745 {
746   tree fndecl;
747   if (TREE_CODE (exp) == CALL_EXPR
748       && (fndecl = get_callee_fndecl (exp))
749       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
750     switch (DECL_FUNCTION_CODE (fndecl))
751       {
752       CASE_BUILT_IN_ALLOCA:
753         return true;
754       default:
755 	break;
756       }
757 
758   return false;
759 }
760 
761 /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
762    function.  Return FALSE otherwise.  */
763 
764 static bool
765 is_tm_builtin (const_tree fndecl)
766 {
767   if (fndecl == NULL)
768     return false;
769 
770   if (decl_is_tm_clone (fndecl))
771     return true;
772 
773   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
774     {
775       switch (DECL_FUNCTION_CODE (fndecl))
776 	{
777 	case BUILT_IN_TM_COMMIT:
778 	case BUILT_IN_TM_COMMIT_EH:
779 	case BUILT_IN_TM_ABORT:
780 	case BUILT_IN_TM_IRREVOCABLE:
781 	case BUILT_IN_TM_GETTMCLONE_IRR:
782 	case BUILT_IN_TM_MEMCPY:
783 	case BUILT_IN_TM_MEMMOVE:
784 	case BUILT_IN_TM_MEMSET:
785 	CASE_BUILT_IN_TM_STORE (1):
786 	CASE_BUILT_IN_TM_STORE (2):
787 	CASE_BUILT_IN_TM_STORE (4):
788 	CASE_BUILT_IN_TM_STORE (8):
789 	CASE_BUILT_IN_TM_STORE (FLOAT):
790 	CASE_BUILT_IN_TM_STORE (DOUBLE):
791 	CASE_BUILT_IN_TM_STORE (LDOUBLE):
792 	CASE_BUILT_IN_TM_STORE (M64):
793 	CASE_BUILT_IN_TM_STORE (M128):
794 	CASE_BUILT_IN_TM_STORE (M256):
795 	CASE_BUILT_IN_TM_LOAD (1):
796 	CASE_BUILT_IN_TM_LOAD (2):
797 	CASE_BUILT_IN_TM_LOAD (4):
798 	CASE_BUILT_IN_TM_LOAD (8):
799 	CASE_BUILT_IN_TM_LOAD (FLOAT):
800 	CASE_BUILT_IN_TM_LOAD (DOUBLE):
801 	CASE_BUILT_IN_TM_LOAD (LDOUBLE):
802 	CASE_BUILT_IN_TM_LOAD (M64):
803 	CASE_BUILT_IN_TM_LOAD (M128):
804 	CASE_BUILT_IN_TM_LOAD (M256):
805 	case BUILT_IN_TM_LOG:
806 	case BUILT_IN_TM_LOG_1:
807 	case BUILT_IN_TM_LOG_2:
808 	case BUILT_IN_TM_LOG_4:
809 	case BUILT_IN_TM_LOG_8:
810 	case BUILT_IN_TM_LOG_FLOAT:
811 	case BUILT_IN_TM_LOG_DOUBLE:
812 	case BUILT_IN_TM_LOG_LDOUBLE:
813 	case BUILT_IN_TM_LOG_M64:
814 	case BUILT_IN_TM_LOG_M128:
815 	case BUILT_IN_TM_LOG_M256:
816 	  return true;
817 	default:
818 	  break;
819 	}
820     }
821   return false;
822 }
823 
824 /* Detect flags (function attributes) from the function decl or type node.  */
825 
826 int
827 flags_from_decl_or_type (const_tree exp)
828 {
829   int flags = 0;
830 
831   if (DECL_P (exp))
832     {
833       /* The function exp may have the `malloc' attribute.  */
834       if (DECL_IS_MALLOC (exp))
835 	flags |= ECF_MALLOC;
836 
837       /* The function exp may have the `returns_twice' attribute.  */
838       if (DECL_IS_RETURNS_TWICE (exp))
839 	flags |= ECF_RETURNS_TWICE;
840 
841       /* Process the pure and const attributes.  */
842       if (TREE_READONLY (exp))
843 	flags |= ECF_CONST;
844       if (DECL_PURE_P (exp))
845 	flags |= ECF_PURE;
846       if (DECL_LOOPING_CONST_OR_PURE_P (exp))
847 	flags |= ECF_LOOPING_CONST_OR_PURE;
848 
849       if (DECL_IS_NOVOPS (exp))
850 	flags |= ECF_NOVOPS;
851       if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
852 	flags |= ECF_LEAF;
853       if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
854 	flags |= ECF_COLD;
855 
856       if (TREE_NOTHROW (exp))
857 	flags |= ECF_NOTHROW;
858 
859       if (flag_tm)
860 	{
861 	  if (is_tm_builtin (exp))
862 	    flags |= ECF_TM_BUILTIN;
863 	  else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
864 		   || lookup_attribute ("transaction_pure",
865 					TYPE_ATTRIBUTES (TREE_TYPE (exp))))
866 	    flags |= ECF_TM_PURE;
867 	}
868 
869       flags = special_function_p (exp, flags);
870     }
871   else if (TYPE_P (exp))
872     {
873       if (TYPE_READONLY (exp))
874 	flags |= ECF_CONST;
875 
876       if (flag_tm
877 	  && ((flags & ECF_CONST) != 0
878 	      || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
879 	flags |= ECF_TM_PURE;
880     }
881   else
882     gcc_unreachable ();
883 
884   if (TREE_THIS_VOLATILE (exp))
885     {
886       flags |= ECF_NORETURN;
887       if (flags & (ECF_CONST|ECF_PURE))
888 	flags |= ECF_LOOPING_CONST_OR_PURE;
889     }
890 
891   return flags;
892 }
893 
894 /* Detect flags from a CALL_EXPR.  */
895 
896 int
897 call_expr_flags (const_tree t)
898 {
899   int flags;
900   tree decl = get_callee_fndecl (t);
901 
902   if (decl)
903     flags = flags_from_decl_or_type (decl);
904   else if (CALL_EXPR_FN (t) == NULL_TREE)
905     flags = internal_fn_flags (CALL_EXPR_IFN (t));
906   else
907     {
908       tree type = TREE_TYPE (CALL_EXPR_FN (t));
909       if (type && TREE_CODE (type) == POINTER_TYPE)
910 	flags = flags_from_decl_or_type (TREE_TYPE (type));
911       else
912 	flags = 0;
913       if (CALL_EXPR_BY_DESCRIPTOR (t))
914 	flags |= ECF_BY_DESCRIPTOR;
915     }
916 
917   return flags;
918 }
919 
920 /* Return true if TYPE should be passed by invisible reference.  */
921 
922 bool
923 pass_by_reference (CUMULATIVE_ARGS *ca, machine_mode mode,
924 		   tree type, bool named_arg)
925 {
926   if (type)
927     {
928       /* If this type contains non-trivial constructors, then it is
929 	 forbidden for the middle-end to create any new copies.  */
930       if (TREE_ADDRESSABLE (type))
931 	return true;
932 
933       /* GCC post 3.4 passes *all* variable sized types by reference.  */
934       if (!TYPE_SIZE (type) || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
935 	return true;
936 
937       /* If a record type should be passed the same as its first (and only)
938 	 member, use the type and mode of that member.  */
939       if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
940 	{
941 	  type = TREE_TYPE (first_field (type));
942 	  mode = TYPE_MODE (type);
943 	}
944     }
945 
946   return targetm.calls.pass_by_reference (pack_cumulative_args (ca), mode,
947 					  type, named_arg);
948 }
949 
950 /* Return true if TYPE, which is passed by reference, should be callee
951    copied instead of caller copied.  */
952 
953 bool
954 reference_callee_copied (CUMULATIVE_ARGS *ca, machine_mode mode,
955 			 tree type, bool named_arg)
956 {
957   if (type && TREE_ADDRESSABLE (type))
958     return false;
959   return targetm.calls.callee_copies (pack_cumulative_args (ca), mode, type,
960 				      named_arg);
961 }
962 
963 
964 /* Precompute all register parameters as described by ARGS, storing values
965    into fields within the ARGS array.
966 
967    NUM_ACTUALS indicates the total number elements in the ARGS array.
968 
969    Set REG_PARM_SEEN if we encounter a register parameter.  */
970 
971 static void
972 precompute_register_parameters (int num_actuals, struct arg_data *args,
973 				int *reg_parm_seen)
974 {
975   int i;
976 
977   *reg_parm_seen = 0;
978 
979   for (i = 0; i < num_actuals; i++)
980     if (args[i].reg != 0 && ! args[i].pass_on_stack)
981       {
982 	*reg_parm_seen = 1;
983 
984 	if (args[i].value == 0)
985 	  {
986 	    push_temp_slots ();
987 	    args[i].value = expand_normal (args[i].tree_value);
988 	    preserve_temp_slots (args[i].value);
989 	    pop_temp_slots ();
990 	  }
991 
992 	/* If we are to promote the function arg to a wider mode,
993 	   do it now.  */
994 
995 	if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
996 	  args[i].value
997 	    = convert_modes (args[i].mode,
998 			     TYPE_MODE (TREE_TYPE (args[i].tree_value)),
999 			     args[i].value, args[i].unsignedp);
1000 
1001 	/* If the value is a non-legitimate constant, force it into a
1002 	   pseudo now.  TLS symbols sometimes need a call to resolve.  */
1003 	if (CONSTANT_P (args[i].value)
1004 	    && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
1005 	  args[i].value = force_reg (args[i].mode, args[i].value);
1006 
1007 	/* If we're going to have to load the value by parts, pull the
1008 	   parts into pseudos.  The part extraction process can involve
1009 	   non-trivial computation.  */
1010 	if (GET_CODE (args[i].reg) == PARALLEL)
1011 	  {
1012 	    tree type = TREE_TYPE (args[i].tree_value);
1013 	    args[i].parallel_value
1014 	      = emit_group_load_into_temps (args[i].reg, args[i].value,
1015 					    type, int_size_in_bytes (type));
1016 	  }
1017 
1018 	/* If the value is expensive, and we are inside an appropriately
1019 	   short loop, put the value into a pseudo and then put the pseudo
1020 	   into the hard reg.
1021 
1022 	   For small register classes, also do this if this call uses
1023 	   register parameters.  This is to avoid reload conflicts while
1024 	   loading the parameters registers.  */
1025 
1026 	else if ((! (REG_P (args[i].value)
1027 		     || (GET_CODE (args[i].value) == SUBREG
1028 			 && REG_P (SUBREG_REG (args[i].value)))))
1029 		 && args[i].mode != BLKmode
1030 		 && (set_src_cost (args[i].value, args[i].mode,
1031 				   optimize_insn_for_speed_p ())
1032 		     > COSTS_N_INSNS (1))
1033 		 && ((*reg_parm_seen
1034 		      && targetm.small_register_classes_for_mode_p (args[i].mode))
1035 		     || optimize))
1036 	  args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
1037       }
1038 }
1039 
1040 #ifdef REG_PARM_STACK_SPACE
1041 
1042   /* The argument list is the property of the called routine and it
1043      may clobber it.  If the fixed area has been used for previous
1044      parameters, we must save and restore it.  */
1045 
1046 static rtx
1047 save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
1048 {
1049   unsigned int low;
1050   unsigned int high;
1051 
1052   /* Compute the boundary of the area that needs to be saved, if any.  */
1053   high = reg_parm_stack_space;
1054   if (ARGS_GROW_DOWNWARD)
1055     high += 1;
1056 
1057   if (high > highest_outgoing_arg_in_use)
1058     high = highest_outgoing_arg_in_use;
1059 
1060   for (low = 0; low < high; low++)
1061     if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
1062       {
1063 	int num_to_save;
1064 	machine_mode save_mode;
1065 	int delta;
1066 	rtx addr;
1067 	rtx stack_area;
1068 	rtx save_area;
1069 
1070 	while (stack_usage_map[--high] == 0)
1071 	  ;
1072 
1073 	*low_to_save = low;
1074 	*high_to_save = high;
1075 
1076 	num_to_save = high - low + 1;
1077 
1078 	/* If we don't have the required alignment, must do this
1079 	   in BLKmode.  */
1080 	scalar_int_mode imode;
1081 	if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1082 	    && (low & (MIN (GET_MODE_SIZE (imode),
1083 			    BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1084 	  save_mode = imode;
1085 	else
1086 	  save_mode = BLKmode;
1087 
1088 	if (ARGS_GROW_DOWNWARD)
1089 	  delta = -high;
1090 	else
1091 	  delta = low;
1092 
1093 	addr = plus_constant (Pmode, argblock, delta);
1094 	stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1095 
1096 	set_mem_align (stack_area, PARM_BOUNDARY);
1097 	if (save_mode == BLKmode)
1098 	  {
1099 	    save_area = assign_stack_temp (BLKmode, num_to_save);
1100 	    emit_block_move (validize_mem (save_area), stack_area,
1101 			     GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
1102 	  }
1103 	else
1104 	  {
1105 	    save_area = gen_reg_rtx (save_mode);
1106 	    emit_move_insn (save_area, stack_area);
1107 	  }
1108 
1109 	return save_area;
1110       }
1111 
1112   return NULL_RTX;
1113 }
1114 
1115 static void
1116 restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
1117 {
1118   machine_mode save_mode = GET_MODE (save_area);
1119   int delta;
1120   rtx addr, stack_area;
1121 
1122   if (ARGS_GROW_DOWNWARD)
1123     delta = -high_to_save;
1124   else
1125     delta = low_to_save;
1126 
1127   addr = plus_constant (Pmode, argblock, delta);
1128   stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
1129   set_mem_align (stack_area, PARM_BOUNDARY);
1130 
1131   if (save_mode != BLKmode)
1132     emit_move_insn (stack_area, save_area);
1133   else
1134     emit_block_move (stack_area, validize_mem (save_area),
1135 		     GEN_INT (high_to_save - low_to_save + 1),
1136 		     BLOCK_OP_CALL_PARM);
1137 }
1138 #endif /* REG_PARM_STACK_SPACE */
1139 
1140 /* If any elements in ARGS refer to parameters that are to be passed in
1141    registers, but not in memory, and whose alignment does not permit a
1142    direct copy into registers.  Copy the values into a group of pseudos
1143    which we will later copy into the appropriate hard registers.
1144 
1145    Pseudos for each unaligned argument will be stored into the array
1146    args[argnum].aligned_regs.  The caller is responsible for deallocating
1147    the aligned_regs array if it is nonzero.  */
1148 
1149 static void
1150 store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
1151 {
1152   int i, j;
1153 
1154   for (i = 0; i < num_actuals; i++)
1155     if (args[i].reg != 0 && ! args[i].pass_on_stack
1156 	&& GET_CODE (args[i].reg) != PARALLEL
1157 	&& args[i].mode == BLKmode
1158 	&& MEM_P (args[i].value)
1159 	&& (MEM_ALIGN (args[i].value)
1160 	    < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
1161       {
1162 	int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
1163 	int endian_correction = 0;
1164 
1165 	if (args[i].partial)
1166 	  {
1167 	    gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
1168 	    args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
1169 	  }
1170 	else
1171 	  {
1172 	    args[i].n_aligned_regs
1173 	      = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
1174 	  }
1175 
1176 	args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
1177 
1178 	/* Structures smaller than a word are normally aligned to the
1179 	   least significant byte.  On a BYTES_BIG_ENDIAN machine,
1180 	   this means we must skip the empty high order bytes when
1181 	   calculating the bit offset.  */
1182 	if (bytes < UNITS_PER_WORD
1183 #ifdef BLOCK_REG_PADDING
1184 	    && (BLOCK_REG_PADDING (args[i].mode,
1185 				   TREE_TYPE (args[i].tree_value), 1)
1186 		== PAD_DOWNWARD)
1187 #else
1188 	    && BYTES_BIG_ENDIAN
1189 #endif
1190 	    )
1191 	  endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
1192 
1193 	for (j = 0; j < args[i].n_aligned_regs; j++)
1194 	  {
1195 	    rtx reg = gen_reg_rtx (word_mode);
1196 	    rtx word = operand_subword_force (args[i].value, j, BLKmode);
1197 	    int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
1198 
1199 	    args[i].aligned_regs[j] = reg;
1200 	    word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1201 				      word_mode, word_mode, false, NULL);
1202 
1203 	    /* There is no need to restrict this code to loading items
1204 	       in TYPE_ALIGN sized hunks.  The bitfield instructions can
1205 	       load up entire word sized registers efficiently.
1206 
1207 	       ??? This may not be needed anymore.
1208 	       We use to emit a clobber here but that doesn't let later
1209 	       passes optimize the instructions we emit.  By storing 0 into
1210 	       the register later passes know the first AND to zero out the
1211 	       bitfield being set in the register is unnecessary.  The store
1212 	       of 0 will be deleted as will at least the first AND.  */
1213 
1214 	    emit_move_insn (reg, const0_rtx);
1215 
1216 	    bytes -= bitsize / BITS_PER_UNIT;
1217 	    store_bit_field (reg, bitsize, endian_correction, 0, 0,
1218 			     word_mode, word, false);
1219 	  }
1220       }
1221 }
1222 
1223 /* The limit set by -Walloc-larger-than=.  */
1224 static GTY(()) tree alloc_object_size_limit;
1225 
1226 /* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
1227    setting if the option is specified, or to the maximum object size if it
1228    is not.  Return the initialized value.  */
1229 
1230 static tree
1231 alloc_max_size (void)
1232 {
1233   if (!alloc_object_size_limit)
1234     {
1235       alloc_object_size_limit = max_object_size ();
1236 
1237       if (warn_alloc_size_limit)
1238 	{
1239 	  char *end = NULL;
1240 	  errno = 0;
1241 	  unsigned HOST_WIDE_INT unit = 1;
1242 	  unsigned HOST_WIDE_INT limit
1243 	    = strtoull (warn_alloc_size_limit, &end, 10);
1244 
1245 	  if (!errno)
1246 	    {
1247 	      if (end && *end)
1248 		{
1249 		  /* Numeric option arguments are at most INT_MAX.  Make it
1250 		     possible to specify a larger value by accepting common
1251 		     suffixes.  */
1252 		  if (!strcmp (end, "kB"))
1253 		    unit = 1000;
1254 		  else if (!strcasecmp (end, "KiB") || strcmp (end, "KB"))
1255 		    unit = 1024;
1256 		  else if (!strcmp (end, "MB"))
1257 		    unit = HOST_WIDE_INT_UC (1000) * 1000;
1258 		  else if (!strcasecmp (end, "MiB"))
1259 		    unit = HOST_WIDE_INT_UC (1024) * 1024;
1260 		  else if (!strcasecmp (end, "GB"))
1261 		    unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000;
1262 		  else if (!strcasecmp (end, "GiB"))
1263 		    unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024;
1264 		  else if (!strcasecmp (end, "TB"))
1265 		    unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000;
1266 		  else if (!strcasecmp (end, "TiB"))
1267 		    unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024;
1268 		  else if (!strcasecmp (end, "PB"))
1269 		    unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000;
1270 		  else if (!strcasecmp (end, "PiB"))
1271 		    unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024;
1272 		  else if (!strcasecmp (end, "EB"))
1273 		    unit = HOST_WIDE_INT_UC (1000) * 1000 * 1000 * 1000 * 1000
1274 			   * 1000;
1275 		  else if (!strcasecmp (end, "EiB"))
1276 		    unit = HOST_WIDE_INT_UC (1024) * 1024 * 1024 * 1024 * 1024
1277 			   * 1024;
1278 		  else
1279 		    unit = 0;
1280 		}
1281 
1282 	      if (unit)
1283 		{
1284 		  widest_int w = wi::mul (limit, unit);
1285 		  if (w < wi::to_widest (alloc_object_size_limit))
1286 		    alloc_object_size_limit
1287 		      = wide_int_to_tree (ptrdiff_type_node, w);
1288 		}
1289 	    }
1290 	}
1291     }
1292   return alloc_object_size_limit;
1293 }
1294 
1295 /* Return true when EXP's range can be determined and set RANGE[] to it
1296    after adjusting it if necessary to make EXP a represents a valid size
1297    of object, or a valid size argument to an allocation function declared
1298    with attribute alloc_size (whose argument may be signed), or to a string
1299    manipulation function like memset.  When ALLOW_ZERO is true, allow
1300    returning a range of [0, 0] for a size in an anti-range [1, N] where
1301    N > PTRDIFF_MAX.  A zero range is a (nearly) invalid argument to
1302    allocation functions like malloc but it is a valid argument to
1303    functions like memset.  */
1304 
1305 bool
1306 get_size_range (tree exp, tree range[2], bool allow_zero /* = false */)
1307 {
1308   if (tree_fits_uhwi_p (exp))
1309     {
1310       /* EXP is a constant.  */
1311       range[0] = range[1] = exp;
1312       return true;
1313     }
1314 
1315   tree exptype = TREE_TYPE (exp);
1316   bool integral = INTEGRAL_TYPE_P (exptype);
1317 
1318   wide_int min, max;
1319   enum value_range_type range_type;
1320 
1321   if (TREE_CODE (exp) == SSA_NAME && integral)
1322     range_type = get_range_info (exp, &min, &max);
1323   else
1324     range_type = VR_VARYING;
1325 
1326   if (range_type == VR_VARYING)
1327     {
1328       if (integral)
1329 	{
1330 	  /* Use the full range of the type of the expression when
1331 	     no value range information is available.  */
1332 	  range[0] = TYPE_MIN_VALUE (exptype);
1333 	  range[1] = TYPE_MAX_VALUE (exptype);
1334 	  return true;
1335 	}
1336 
1337       range[0] = NULL_TREE;
1338       range[1] = NULL_TREE;
1339       return false;
1340     }
1341 
1342   unsigned expprec = TYPE_PRECISION (exptype);
1343 
1344   bool signed_p = !TYPE_UNSIGNED (exptype);
1345 
1346   if (range_type == VR_ANTI_RANGE)
1347     {
1348       if (signed_p)
1349 	{
1350 	  if (wi::les_p (max, 0))
1351 	    {
1352 	      /* EXP is not in a strictly negative range.  That means
1353 		 it must be in some (not necessarily strictly) positive
1354 		 range which includes zero.  Since in signed to unsigned
1355 		 conversions negative values end up converted to large
1356 		 positive values, and otherwise they are not valid sizes,
1357 		 the resulting range is in both cases [0, TYPE_MAX].  */
1358 	      min = wi::zero (expprec);
1359 	      max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1360 	    }
1361 	  else if (wi::les_p (min - 1, 0))
1362 	    {
1363 	      /* EXP is not in a negative-positive range.  That means EXP
1364 		 is either negative, or greater than max.  Since negative
1365 		 sizes are invalid make the range [MAX + 1, TYPE_MAX].  */
1366 	      min = max + 1;
1367 	      max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1368 	    }
1369 	  else
1370 	    {
1371 	      max = min - 1;
1372 	      min = wi::zero (expprec);
1373 	    }
1374 	}
1375       else if (wi::eq_p (0, min - 1))
1376 	{
1377 	  /* EXP is unsigned and not in the range [1, MAX].  That means
1378 	     it's either zero or greater than MAX.  Even though 0 would
1379 	     normally be detected by -Walloc-zero, unless ALLOW_ZERO
1380 	     is true, set the range to [MAX, TYPE_MAX] so that when MAX
1381 	     is greater than the limit the whole range is diagnosed.  */
1382 	  if (allow_zero)
1383 	    min = max = wi::zero (expprec);
1384 	  else
1385 	    {
1386 	      min = max + 1;
1387 	      max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1388 	    }
1389 	}
1390       else
1391 	{
1392 	  max = min - 1;
1393 	  min = wi::zero (expprec);
1394 	}
1395     }
1396 
1397   range[0] = wide_int_to_tree (exptype, min);
1398   range[1] = wide_int_to_tree (exptype, max);
1399 
1400   return true;
1401 }
1402 
1403 /* Diagnose a call EXP to function FN decorated with attribute alloc_size
1404    whose argument numbers given by IDX with values given by ARGS exceed
1405    the maximum object size or cause an unsigned oveflow (wrapping) when
1406    multiplied.  When ARGS[0] is null the function does nothing.  ARGS[1]
1407    may be null for functions like malloc, and non-null for those like
1408    calloc that are decorated with a two-argument attribute alloc_size.  */
1409 
1410 void
1411 maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
1412 {
1413   /* The range each of the (up to) two arguments is known to be in.  */
1414   tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
1415 
1416   /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2.  */
1417   tree maxobjsize = alloc_max_size ();
1418 
1419   location_t loc = EXPR_LOCATION (exp);
1420 
1421   bool warned = false;
1422 
1423   /* Validate each argument individually.  */
1424   for (unsigned i = 0; i != 2 && args[i]; ++i)
1425     {
1426       if (TREE_CODE (args[i]) == INTEGER_CST)
1427 	{
1428 	  argrange[i][0] = args[i];
1429 	  argrange[i][1] = args[i];
1430 
1431 	  if (tree_int_cst_lt (args[i], integer_zero_node))
1432 	    {
1433 	      warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1434 				   "%Kargument %i value %qE is negative",
1435 				   exp, idx[i] + 1, args[i]);
1436 	    }
1437 	  else if (integer_zerop (args[i]))
1438 	    {
1439 	      /* Avoid issuing -Walloc-zero for allocation functions other
1440 		 than __builtin_alloca that are declared with attribute
1441 		 returns_nonnull because there's no portability risk.  This
1442 		 avoids warning for such calls to libiberty's xmalloc and
1443 		 friends.
1444 		 Also avoid issuing the warning for calls to function named
1445 		 "alloca".  */
1446 	      if ((DECL_FUNCTION_CODE (fn) == BUILT_IN_ALLOCA
1447 		   && IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6)
1448 		  || (DECL_FUNCTION_CODE (fn) != BUILT_IN_ALLOCA
1449 		      && !lookup_attribute ("returns_nonnull",
1450 					    TYPE_ATTRIBUTES (TREE_TYPE (fn)))))
1451 		warned = warning_at (loc, OPT_Walloc_zero,
1452 				     "%Kargument %i value is zero",
1453 				     exp, idx[i] + 1);
1454 	    }
1455 	  else if (tree_int_cst_lt (maxobjsize, args[i]))
1456 	    {
1457 	      /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
1458 		 mode and with -fno-exceptions as a way to indicate array
1459 		 size overflow.  There's no good way to detect C++98 here
1460 		 so avoid diagnosing these calls for all C++ modes.  */
1461 	      if (i == 0
1462 		  && !args[1]
1463 		  && lang_GNU_CXX ()
1464 		  && DECL_IS_OPERATOR_NEW (fn)
1465 		  && integer_all_onesp (args[i]))
1466 		continue;
1467 
1468 	      warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1469 				   "%Kargument %i value %qE exceeds "
1470 				   "maximum object size %E",
1471 				   exp, idx[i] + 1, args[i], maxobjsize);
1472 	    }
1473 	}
1474       else if (TREE_CODE (args[i]) == SSA_NAME
1475 	       && get_size_range (args[i], argrange[i]))
1476 	{
1477 	  /* Verify that the argument's range is not negative (including
1478 	     upper bound of zero).  */
1479 	  if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
1480 	      && tree_int_cst_le (argrange[i][1], integer_zero_node))
1481 	    {
1482 	      warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1483 				   "%Kargument %i range [%E, %E] is negative",
1484 				   exp, idx[i] + 1,
1485 				   argrange[i][0], argrange[i][1]);
1486 	    }
1487 	  else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
1488 	    {
1489 	      warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1490 				   "%Kargument %i range [%E, %E] exceeds "
1491 				   "maximum object size %E",
1492 				   exp, idx[i] + 1,
1493 				   argrange[i][0], argrange[i][1],
1494 				   maxobjsize);
1495 	    }
1496 	}
1497     }
1498 
1499   if (!argrange[0])
1500     return;
1501 
1502   /* For a two-argument alloc_size, validate the product of the two
1503      arguments if both of their values or ranges are known.  */
1504   if (!warned && tree_fits_uhwi_p (argrange[0][0])
1505       && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
1506       && !integer_onep (argrange[0][0])
1507       && !integer_onep (argrange[1][0]))
1508     {
1509       /* Check for overflow in the product of a function decorated with
1510 	 attribute alloc_size (X, Y).  */
1511       unsigned szprec = TYPE_PRECISION (size_type_node);
1512       wide_int x = wi::to_wide (argrange[0][0], szprec);
1513       wide_int y = wi::to_wide (argrange[1][0], szprec);
1514 
1515       bool vflow;
1516       wide_int prod = wi::umul (x, y, &vflow);
1517 
1518       if (vflow)
1519 	warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1520 			     "%Kproduct %<%E * %E%> of arguments %i and %i "
1521 			     "exceeds %<SIZE_MAX%>",
1522 			     exp, argrange[0][0], argrange[1][0],
1523 			     idx[0] + 1, idx[1] + 1);
1524       else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
1525 	warned = warning_at (loc, OPT_Walloc_size_larger_than_,
1526 			     "%Kproduct %<%E * %E%> of arguments %i and %i "
1527 			     "exceeds maximum object size %E",
1528 			     exp, argrange[0][0], argrange[1][0],
1529 			     idx[0] + 1, idx[1] + 1,
1530 			     maxobjsize);
1531 
1532       if (warned)
1533 	{
1534 	  /* Print the full range of each of the two arguments to make
1535 	     it clear when it is, in fact, in a range and not constant.  */
1536 	  if (argrange[0][0] != argrange [0][1])
1537 	    inform (loc, "argument %i in the range [%E, %E]",
1538 		    idx[0] + 1, argrange[0][0], argrange[0][1]);
1539 	  if (argrange[1][0] != argrange [1][1])
1540 	    inform (loc, "argument %i in the range [%E, %E]",
1541 		    idx[1] + 1, argrange[1][0], argrange[1][1]);
1542 	}
1543     }
1544 
1545   if (warned)
1546     {
1547       location_t fnloc = DECL_SOURCE_LOCATION (fn);
1548 
1549       if (DECL_IS_BUILTIN (fn))
1550 	inform (loc,
1551 		"in a call to built-in allocation function %qD", fn);
1552       else
1553 	inform (fnloc,
1554 		"in a call to allocation function %qD declared here", fn);
1555     }
1556 }
1557 
1558 /* If EXPR refers to a character array or pointer declared attribute
1559    nonstring return a decl for that array or pointer and set *REF to
1560    the referenced enclosing object or pointer.  Otherwise returns
1561    null.  */
1562 
1563 tree
1564 get_attr_nonstring_decl (tree expr, tree *ref)
1565 {
1566   tree decl = expr;
1567   if (TREE_CODE (decl) == SSA_NAME)
1568     {
1569       gimple *def = SSA_NAME_DEF_STMT (decl);
1570 
1571       if (is_gimple_assign (def))
1572 	{
1573 	  tree_code code = gimple_assign_rhs_code (def);
1574 	  if (code == ADDR_EXPR
1575 	      || code == COMPONENT_REF
1576 	      || code == VAR_DECL)
1577 	    decl = gimple_assign_rhs1 (def);
1578 	}
1579       else if (tree var = SSA_NAME_VAR (decl))
1580 	decl = var;
1581     }
1582 
1583   if (TREE_CODE (decl) == ADDR_EXPR)
1584     decl = TREE_OPERAND (decl, 0);
1585 
1586   if (ref)
1587     *ref = decl;
1588 
1589   if (TREE_CODE (decl) == COMPONENT_REF)
1590     decl = TREE_OPERAND (decl, 1);
1591 
1592   if (DECL_P (decl)
1593       && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1594     return decl;
1595 
1596   return NULL_TREE;
1597 }
1598 
1599 /* Warn about passing a non-string array/pointer to a function that
1600    expects a nul-terminated string argument.  */
1601 
1602 void
1603 maybe_warn_nonstring_arg (tree fndecl, tree exp)
1604 {
1605   if (!fndecl || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
1606     return;
1607 
1608   bool with_bounds = CALL_WITH_BOUNDS_P (exp);
1609 
1610   unsigned nargs = call_expr_nargs (exp);
1611 
1612   /* The bound argument to a bounded string function like strncpy.  */
1613   tree bound = NULL_TREE;
1614 
1615   /* It's safe to call "bounded" string functions with a non-string
1616      argument since the functions provide an explicit bound for this
1617      purpose.  */
1618   switch (DECL_FUNCTION_CODE (fndecl))
1619     {
1620     case BUILT_IN_STPNCPY:
1621     case BUILT_IN_STPNCPY_CHK:
1622     case BUILT_IN_STRNCMP:
1623     case BUILT_IN_STRNCASECMP:
1624     case BUILT_IN_STRNCPY:
1625     case BUILT_IN_STRNCPY_CHK:
1626       {
1627 	unsigned argno = with_bounds ? 4 : 2;
1628 	if (argno < nargs)
1629 	  bound = CALL_EXPR_ARG (exp, argno);
1630 	break;
1631       }
1632 
1633     case BUILT_IN_STRNDUP:
1634       {
1635 	unsigned argno = with_bounds ? 2 : 1;
1636 	if (argno < nargs)
1637 	  bound = CALL_EXPR_ARG (exp, argno);
1638 	break;
1639       }
1640 
1641     default:
1642       break;
1643     }
1644 
1645   /* Determine the range of the bound argument (if specified).  */
1646   tree bndrng[2] = { NULL_TREE, NULL_TREE };
1647   if (bound)
1648     get_size_range (bound, bndrng);
1649 
1650   /* Iterate over the built-in function's formal arguments and check
1651      each const char* against the actual argument.  If the actual
1652      argument is declared attribute non-string issue a warning unless
1653      the argument's maximum length is bounded.  */
1654   function_args_iterator it;
1655   function_args_iter_init (&it, TREE_TYPE (fndecl));
1656 
1657   for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
1658     {
1659       /* Avoid iterating past the declared argument in a call
1660 	 to function declared without a prototype.  */
1661       if (argno >= nargs)
1662 	break;
1663 
1664       tree argtype = function_args_iter_cond (&it);
1665       if (!argtype)
1666 	break;
1667 
1668       if (TREE_CODE (argtype) != POINTER_TYPE)
1669 	continue;
1670 
1671       argtype = TREE_TYPE (argtype);
1672 
1673       if (TREE_CODE (argtype) != INTEGER_TYPE
1674 	  || !TYPE_READONLY (argtype))
1675 	continue;
1676 
1677       argtype = TYPE_MAIN_VARIANT (argtype);
1678       if (argtype != char_type_node)
1679 	continue;
1680 
1681       tree callarg = CALL_EXPR_ARG (exp, argno);
1682       if (TREE_CODE (callarg) == ADDR_EXPR)
1683 	callarg = TREE_OPERAND (callarg, 0);
1684 
1685       /* See if the destination is declared with attribute "nonstring".  */
1686       tree decl = get_attr_nonstring_decl (callarg);
1687       if (!decl)
1688 	continue;
1689 
1690       tree type = TREE_TYPE (decl);
1691 
1692       offset_int wibnd = 0;
1693       if (bndrng[0])
1694 	wibnd = wi::to_offset (bndrng[0]);
1695 
1696       offset_int asize = wibnd;
1697 
1698       if (TREE_CODE (type) == ARRAY_TYPE)
1699 	if (tree arrbnd = TYPE_DOMAIN (type))
1700 	  {
1701 	    if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
1702 	      asize = wi::to_offset (arrbnd) + 1;
1703 	  }
1704 
1705       location_t loc = EXPR_LOCATION (exp);
1706 
1707       bool warned = false;
1708 
1709       if (wi::ltu_p (asize, wibnd))
1710 	warned = warning_at (loc, OPT_Wstringop_overflow_,
1711 			     "%qD argument %i declared attribute %<nonstring%> "
1712 			     "is smaller than the specified bound %E",
1713 			     fndecl, argno + 1, bndrng[0]);
1714       else if (!bound)
1715 	warned = warning_at (loc, OPT_Wstringop_overflow_,
1716 			     "%qD argument %i declared attribute %<nonstring%>",
1717 			     fndecl, argno + 1);
1718 
1719       if (warned)
1720 	inform (DECL_SOURCE_LOCATION (decl),
1721 		"argument %qD declared here", decl);
1722     }
1723 }
1724 
1725 /* Issue an error if CALL_EXPR was flagged as requiring
1726    tall-call optimization.  */
1727 
1728 static void
1729 maybe_complain_about_tail_call (tree call_expr, const char *reason)
1730 {
1731   gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
1732   if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
1733     return;
1734 
1735   error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
1736 }
1737 
1738 /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
1739    CALL_EXPR EXP.
1740 
1741    NUM_ACTUALS is the total number of parameters.
1742 
1743    N_NAMED_ARGS is the total number of named arguments.
1744 
1745    STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
1746    value, or null.
1747 
1748    FNDECL is the tree code for the target of this call (if known)
1749 
1750    ARGS_SO_FAR holds state needed by the target to know where to place
1751    the next argument.
1752 
1753    REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
1754    for arguments which are passed in registers.
1755 
1756    OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
1757    and may be modified by this routine.
1758 
1759    OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
1760    flags which may be modified by this routine.
1761 
1762    MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
1763    that requires allocation of stack space.
1764 
1765    CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
1766    the thunked-to function.  */
1767 
1768 static void
1769 initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
1770 				 struct arg_data *args,
1771 				 struct args_size *args_size,
1772 				 int n_named_args ATTRIBUTE_UNUSED,
1773 				 tree exp, tree struct_value_addr_value,
1774 				 tree fndecl, tree fntype,
1775 				 cumulative_args_t args_so_far,
1776 				 int reg_parm_stack_space,
1777 				 rtx *old_stack_level,
1778 				 poly_int64_pod *old_pending_adj,
1779 				 int *must_preallocate, int *ecf_flags,
1780 				 bool *may_tailcall, bool call_from_thunk_p)
1781 {
1782   CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
1783   location_t loc = EXPR_LOCATION (exp);
1784 
1785   /* Count arg position in order args appear.  */
1786   int argpos;
1787 
1788   int i;
1789 
1790   args_size->constant = 0;
1791   args_size->var = 0;
1792 
1793   bitmap_obstack_initialize (NULL);
1794 
1795   /* In this loop, we consider args in the order they are written.
1796      We fill up ARGS from the back.  */
1797 
1798   i = num_actuals - 1;
1799   {
1800     int j = i, ptr_arg = -1;
1801     call_expr_arg_iterator iter;
1802     tree arg;
1803     bitmap slots = NULL;
1804 
1805     if (struct_value_addr_value)
1806       {
1807 	args[j].tree_value = struct_value_addr_value;
1808 	j--;
1809 
1810 	/* If we pass structure address then we need to
1811 	   create bounds for it.  Since created bounds is
1812 	   a call statement, we expand it right here to avoid
1813 	   fixing all other places where it may be expanded.  */
1814 	if (CALL_WITH_BOUNDS_P (exp))
1815 	  {
1816 	    args[j].value = gen_reg_rtx (targetm.chkp_bound_mode ());
1817 	    args[j].tree_value
1818 	      = chkp_make_bounds_for_struct_addr (struct_value_addr_value);
1819 	    expand_expr_real (args[j].tree_value, args[j].value, VOIDmode,
1820 			      EXPAND_NORMAL, 0, false);
1821 	    args[j].pointer_arg = j + 1;
1822 	    j--;
1823 	  }
1824       }
1825     argpos = 0;
1826     FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
1827       {
1828 	tree argtype = TREE_TYPE (arg);
1829 
1830 	/* Remember last param with pointer and associate it
1831 	   with following pointer bounds.  */
1832 	if (CALL_WITH_BOUNDS_P (exp)
1833 	    && chkp_type_has_pointer (argtype))
1834 	  {
1835 	    if (slots)
1836 	      BITMAP_FREE (slots);
1837 	    ptr_arg = j;
1838 	    if (!BOUNDED_TYPE_P (argtype))
1839 	      {
1840 		slots = BITMAP_ALLOC (NULL);
1841 		chkp_find_bound_slots (argtype, slots);
1842 	      }
1843 	  }
1844 	else if (CALL_WITH_BOUNDS_P (exp)
1845 		 && pass_by_reference (NULL, TYPE_MODE (argtype), argtype,
1846 				       argpos < n_named_args))
1847 	  {
1848 	    if (slots)
1849 	      BITMAP_FREE (slots);
1850 	    ptr_arg = j;
1851 	  }
1852 	else if (POINTER_BOUNDS_TYPE_P (argtype))
1853 	  {
1854 	    /* We expect bounds in instrumented calls only.
1855 	       Otherwise it is a sign we lost flag due to some optimization
1856 	       and may emit call args incorrectly.  */
1857 	    gcc_assert (CALL_WITH_BOUNDS_P (exp));
1858 
1859 	    /* For structures look for the next available pointer.  */
1860 	    if (ptr_arg != -1 && slots)
1861 	      {
1862 		unsigned bnd_no = bitmap_first_set_bit (slots);
1863 		args[j].pointer_offset =
1864 		  bnd_no * POINTER_SIZE / BITS_PER_UNIT;
1865 
1866 		bitmap_clear_bit (slots, bnd_no);
1867 
1868 		/* Check we have no more pointers in the structure.  */
1869 		if (bitmap_empty_p (slots))
1870 		  BITMAP_FREE (slots);
1871 	      }
1872 	    args[j].pointer_arg = ptr_arg;
1873 
1874 	    /* Check we covered all pointers in the previous
1875 	       non bounds arg.  */
1876 	    if (!slots)
1877 	      ptr_arg = -1;
1878 	  }
1879 	else
1880 	  ptr_arg = -1;
1881 
1882 	if (targetm.calls.split_complex_arg
1883 	    && argtype
1884 	    && TREE_CODE (argtype) == COMPLEX_TYPE
1885 	    && targetm.calls.split_complex_arg (argtype))
1886 	  {
1887 	    tree subtype = TREE_TYPE (argtype);
1888 	    args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
1889 	    j--;
1890 	    args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
1891 	  }
1892 	else
1893 	  args[j].tree_value = arg;
1894 	j--;
1895 	argpos++;
1896       }
1897 
1898     if (slots)
1899       BITMAP_FREE (slots);
1900   }
1901 
1902   bitmap_obstack_release (NULL);
1903 
1904   /* Extract attribute alloc_size and if set, store the indices of
1905      the corresponding arguments in ALLOC_IDX, and then the actual
1906      argument(s) at those indices in ALLOC_ARGS.  */
1907   int alloc_idx[2] = { -1, -1 };
1908   if (tree alloc_size
1909       = (fndecl ? lookup_attribute ("alloc_size",
1910 				    TYPE_ATTRIBUTES (TREE_TYPE (fndecl)))
1911 	 : NULL_TREE))
1912     {
1913       tree args = TREE_VALUE (alloc_size);
1914       alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
1915       if (TREE_CHAIN (args))
1916 	alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
1917     }
1918 
1919   /* Array for up to the two attribute alloc_size arguments.  */
1920   tree alloc_args[] = { NULL_TREE, NULL_TREE };
1921 
1922   /* I counts args in order (to be) pushed; ARGPOS counts in order written.  */
1923   for (argpos = 0; argpos < num_actuals; i--, argpos++)
1924     {
1925       tree type = TREE_TYPE (args[i].tree_value);
1926       int unsignedp;
1927       machine_mode mode;
1928 
1929       /* Replace erroneous argument with constant zero.  */
1930       if (type == error_mark_node || !COMPLETE_TYPE_P (type))
1931 	args[i].tree_value = integer_zero_node, type = integer_type_node;
1932 
1933       /* If TYPE is a transparent union or record, pass things the way
1934 	 we would pass the first field of the union or record.  We have
1935 	 already verified that the modes are the same.  */
1936       if ((TREE_CODE (type) == UNION_TYPE || TREE_CODE (type) == RECORD_TYPE)
1937 	   && TYPE_TRANSPARENT_AGGR (type))
1938 	type = TREE_TYPE (first_field (type));
1939 
1940       /* Decide where to pass this arg.
1941 
1942 	 args[i].reg is nonzero if all or part is passed in registers.
1943 
1944 	 args[i].partial is nonzero if part but not all is passed in registers,
1945 	 and the exact value says how many bytes are passed in registers.
1946 
1947 	 args[i].pass_on_stack is nonzero if the argument must at least be
1948 	 computed on the stack.  It may then be loaded back into registers
1949 	 if args[i].reg is nonzero.
1950 
1951 	 These decisions are driven by the FUNCTION_... macros and must agree
1952 	 with those made by function.c.  */
1953 
1954       /* See if this argument should be passed by invisible reference.  */
1955       if (pass_by_reference (args_so_far_pnt, TYPE_MODE (type),
1956 			     type, argpos < n_named_args))
1957 	{
1958 	  bool callee_copies;
1959 	  tree base = NULL_TREE;
1960 
1961 	  callee_copies
1962 	    = reference_callee_copied (args_so_far_pnt, TYPE_MODE (type),
1963 				       type, argpos < n_named_args);
1964 
1965 	  /* If we're compiling a thunk, pass through invisible references
1966 	     instead of making a copy.  */
1967 	  if (call_from_thunk_p
1968 	      || (callee_copies
1969 		  && !TREE_ADDRESSABLE (type)
1970 		  && (base = get_base_address (args[i].tree_value))
1971 		  && TREE_CODE (base) != SSA_NAME
1972 		  && (!DECL_P (base) || MEM_P (DECL_RTL (base)))))
1973 	    {
1974 	      /* We may have turned the parameter value into an SSA name.
1975 		 Go back to the original parameter so we can take the
1976 		 address.  */
1977 	      if (TREE_CODE (args[i].tree_value) == SSA_NAME)
1978 		{
1979 		  gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
1980 		  args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
1981 		  gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
1982 		}
1983 	      /* Argument setup code may have copied the value to register.  We
1984 		 revert that optimization now because the tail call code must
1985 		 use the original location.  */
1986 	      if (TREE_CODE (args[i].tree_value) == PARM_DECL
1987 		  && !MEM_P (DECL_RTL (args[i].tree_value))
1988 		  && DECL_INCOMING_RTL (args[i].tree_value)
1989 		  && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
1990 		set_decl_rtl (args[i].tree_value,
1991 			      DECL_INCOMING_RTL (args[i].tree_value));
1992 
1993 	      mark_addressable (args[i].tree_value);
1994 
1995 	      /* We can't use sibcalls if a callee-copied argument is
1996 		 stored in the current function's frame.  */
1997 	      if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
1998 		{
1999 		  *may_tailcall = false;
2000 		  maybe_complain_about_tail_call (exp,
2001 						  "a callee-copied argument is"
2002 						  " stored in the current"
2003 						  " function's frame");
2004 		}
2005 
2006 	      args[i].tree_value = build_fold_addr_expr_loc (loc,
2007 							 args[i].tree_value);
2008 	      type = TREE_TYPE (args[i].tree_value);
2009 
2010 	      if (*ecf_flags & ECF_CONST)
2011 		*ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
2012 	    }
2013 	  else
2014 	    {
2015 	      /* We make a copy of the object and pass the address to the
2016 		 function being called.  */
2017 	      rtx copy;
2018 
2019 	      if (!COMPLETE_TYPE_P (type)
2020 		  || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2021 		  || (flag_stack_check == GENERIC_STACK_CHECK
2022 		      && compare_tree_int (TYPE_SIZE_UNIT (type),
2023 					   STACK_CHECK_MAX_VAR_SIZE) > 0))
2024 		{
2025 		  /* This is a variable-sized object.  Make space on the stack
2026 		     for it.  */
2027 		  rtx size_rtx = expr_size (args[i].tree_value);
2028 
2029 		  if (*old_stack_level == 0)
2030 		    {
2031 		      emit_stack_save (SAVE_BLOCK, old_stack_level);
2032 		      *old_pending_adj = pending_stack_adjust;
2033 		      pending_stack_adjust = 0;
2034 		    }
2035 
2036 		  /* We can pass TRUE as the 4th argument because we just
2037 		     saved the stack pointer and will restore it right after
2038 		     the call.  */
2039 		  copy = allocate_dynamic_stack_space (size_rtx,
2040 						       TYPE_ALIGN (type),
2041 						       TYPE_ALIGN (type),
2042 						       max_int_size_in_bytes
2043 						       (type),
2044 						       true);
2045 		  copy = gen_rtx_MEM (BLKmode, copy);
2046 		  set_mem_attributes (copy, type, 1);
2047 		}
2048 	      else
2049 		copy = assign_temp (type, 1, 0);
2050 
2051 	      store_expr (args[i].tree_value, copy, 0, false, false);
2052 
2053 	      /* Just change the const function to pure and then let
2054 		 the next test clear the pure based on
2055 		 callee_copies.  */
2056 	      if (*ecf_flags & ECF_CONST)
2057 		{
2058 		  *ecf_flags &= ~ECF_CONST;
2059 		  *ecf_flags |= ECF_PURE;
2060 		}
2061 
2062 	      if (!callee_copies && *ecf_flags & ECF_PURE)
2063 		*ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
2064 
2065 	      args[i].tree_value
2066 		= build_fold_addr_expr_loc (loc, make_tree (type, copy));
2067 	      type = TREE_TYPE (args[i].tree_value);
2068 	      *may_tailcall = false;
2069 	      maybe_complain_about_tail_call (exp,
2070 					      "argument must be passed"
2071 					      " by copying");
2072 	    }
2073 	}
2074 
2075       unsignedp = TYPE_UNSIGNED (type);
2076       mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
2077 				    fndecl ? TREE_TYPE (fndecl) : fntype, 0);
2078 
2079       args[i].unsignedp = unsignedp;
2080       args[i].mode = mode;
2081 
2082       targetm.calls.warn_parameter_passing_abi (args_so_far, type);
2083 
2084       args[i].reg = targetm.calls.function_arg (args_so_far, mode, type,
2085 						argpos < n_named_args);
2086 
2087       if (args[i].reg && CONST_INT_P (args[i].reg))
2088 	{
2089 	  args[i].special_slot = args[i].reg;
2090 	  args[i].reg = NULL;
2091 	}
2092 
2093       /* If this is a sibling call and the machine has register windows, the
2094 	 register window has to be unwinded before calling the routine, so
2095 	 arguments have to go into the incoming registers.  */
2096       if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
2097 	args[i].tail_call_reg
2098 	  = targetm.calls.function_incoming_arg (args_so_far, mode, type,
2099 						 argpos < n_named_args);
2100       else
2101 	args[i].tail_call_reg = args[i].reg;
2102 
2103       if (args[i].reg)
2104 	args[i].partial
2105 	  = targetm.calls.arg_partial_bytes (args_so_far, mode, type,
2106 					     argpos < n_named_args);
2107 
2108       args[i].pass_on_stack = targetm.calls.must_pass_in_stack (mode, type);
2109 
2110       /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
2111 	 it means that we are to pass this arg in the register(s) designated
2112 	 by the PARALLEL, but also to pass it in the stack.  */
2113       if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
2114 	  && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
2115 	args[i].pass_on_stack = 1;
2116 
2117       /* If this is an addressable type, we must preallocate the stack
2118 	 since we must evaluate the object into its final location.
2119 
2120 	 If this is to be passed in both registers and the stack, it is simpler
2121 	 to preallocate.  */
2122       if (TREE_ADDRESSABLE (type)
2123 	  || (args[i].pass_on_stack && args[i].reg != 0))
2124 	*must_preallocate = 1;
2125 
2126       /* No stack allocation and padding for bounds.  */
2127       if (POINTER_BOUNDS_P (args[i].tree_value))
2128 	;
2129       /* Compute the stack-size of this argument.  */
2130       else if (args[i].reg == 0 || args[i].partial != 0
2131 	       || reg_parm_stack_space > 0
2132 	       || args[i].pass_on_stack)
2133 	locate_and_pad_parm (mode, type,
2134 #ifdef STACK_PARMS_IN_REG_PARM_AREA
2135 			     1,
2136 #else
2137 			     args[i].reg != 0,
2138 #endif
2139 			     reg_parm_stack_space,
2140 			     args[i].pass_on_stack ? 0 : args[i].partial,
2141 			     fndecl, args_size, &args[i].locate);
2142 #ifdef BLOCK_REG_PADDING
2143       else
2144 	/* The argument is passed entirely in registers.  See at which
2145 	   end it should be padded.  */
2146 	args[i].locate.where_pad =
2147 	  BLOCK_REG_PADDING (mode, type,
2148 			     int_size_in_bytes (type) <= UNITS_PER_WORD);
2149 #endif
2150 
2151       /* Update ARGS_SIZE, the total stack space for args so far.  */
2152 
2153       args_size->constant += args[i].locate.size.constant;
2154       if (args[i].locate.size.var)
2155 	ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
2156 
2157       /* Increment ARGS_SO_FAR, which has info about which arg-registers
2158 	 have been used, etc.  */
2159 
2160       targetm.calls.function_arg_advance (args_so_far, TYPE_MODE (type),
2161 					  type, argpos < n_named_args);
2162 
2163       /* Store argument values for functions decorated with attribute
2164 	 alloc_size.  */
2165       if (argpos == alloc_idx[0])
2166 	alloc_args[0] = args[i].tree_value;
2167       else if (argpos == alloc_idx[1])
2168 	alloc_args[1] = args[i].tree_value;
2169     }
2170 
2171   if (alloc_args[0])
2172     {
2173       /* Check the arguments of functions decorated with attribute
2174 	 alloc_size.  */
2175       maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
2176     }
2177 
2178   /* Detect passing non-string arguments to functions expecting
2179      nul-terminated strings.  */
2180   maybe_warn_nonstring_arg (fndecl, exp);
2181 }
2182 
2183 /* Update ARGS_SIZE to contain the total size for the argument block.
2184    Return the original constant component of the argument block's size.
2185 
2186    REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
2187    for arguments passed in registers.  */
2188 
2189 static poly_int64
2190 compute_argument_block_size (int reg_parm_stack_space,
2191 			     struct args_size *args_size,
2192 			     tree fndecl ATTRIBUTE_UNUSED,
2193 			     tree fntype ATTRIBUTE_UNUSED,
2194 			     int preferred_stack_boundary ATTRIBUTE_UNUSED)
2195 {
2196   poly_int64 unadjusted_args_size = args_size->constant;
2197 
2198   /* For accumulate outgoing args mode we don't need to align, since the frame
2199      will be already aligned.  Align to STACK_BOUNDARY in order to prevent
2200      backends from generating misaligned frame sizes.  */
2201   if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
2202     preferred_stack_boundary = STACK_BOUNDARY;
2203 
2204   /* Compute the actual size of the argument block required.  The variable
2205      and constant sizes must be combined, the size may have to be rounded,
2206      and there may be a minimum required size.  */
2207 
2208   if (args_size->var)
2209     {
2210       args_size->var = ARGS_SIZE_TREE (*args_size);
2211       args_size->constant = 0;
2212 
2213       preferred_stack_boundary /= BITS_PER_UNIT;
2214       if (preferred_stack_boundary > 1)
2215 	{
2216 	  /* We don't handle this case yet.  To handle it correctly we have
2217 	     to add the delta, round and subtract the delta.
2218 	     Currently no machine description requires this support.  */
2219 	  gcc_assert (multiple_p (stack_pointer_delta,
2220 				  preferred_stack_boundary));
2221 	  args_size->var = round_up (args_size->var, preferred_stack_boundary);
2222 	}
2223 
2224       if (reg_parm_stack_space > 0)
2225 	{
2226 	  args_size->var
2227 	    = size_binop (MAX_EXPR, args_size->var,
2228 			  ssize_int (reg_parm_stack_space));
2229 
2230 	  /* The area corresponding to register parameters is not to count in
2231 	     the size of the block we need.  So make the adjustment.  */
2232 	  if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2233 	    args_size->var
2234 	      = size_binop (MINUS_EXPR, args_size->var,
2235 			    ssize_int (reg_parm_stack_space));
2236 	}
2237     }
2238   else
2239     {
2240       preferred_stack_boundary /= BITS_PER_UNIT;
2241       if (preferred_stack_boundary < 1)
2242 	preferred_stack_boundary = 1;
2243       args_size->constant = (aligned_upper_bound (args_size->constant
2244 						  + stack_pointer_delta,
2245 						  preferred_stack_boundary)
2246 			     - stack_pointer_delta);
2247 
2248       args_size->constant = upper_bound (args_size->constant,
2249 					 reg_parm_stack_space);
2250 
2251       if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
2252 	args_size->constant -= reg_parm_stack_space;
2253     }
2254   return unadjusted_args_size;
2255 }
2256 
2257 /* Precompute parameters as needed for a function call.
2258 
2259    FLAGS is mask of ECF_* constants.
2260 
2261    NUM_ACTUALS is the number of arguments.
2262 
2263    ARGS is an array containing information for each argument; this
2264    routine fills in the INITIAL_VALUE and VALUE fields for each
2265    precomputed argument.  */
2266 
2267 static void
2268 precompute_arguments (int num_actuals, struct arg_data *args)
2269 {
2270   int i;
2271 
2272   /* If this is a libcall, then precompute all arguments so that we do not
2273      get extraneous instructions emitted as part of the libcall sequence.  */
2274 
2275   /* If we preallocated the stack space, and some arguments must be passed
2276      on the stack, then we must precompute any parameter which contains a
2277      function call which will store arguments on the stack.
2278      Otherwise, evaluating the parameter may clobber previous parameters
2279      which have already been stored into the stack.  (we have code to avoid
2280      such case by saving the outgoing stack arguments, but it results in
2281      worse code)  */
2282   if (!ACCUMULATE_OUTGOING_ARGS)
2283     return;
2284 
2285   for (i = 0; i < num_actuals; i++)
2286     {
2287       tree type;
2288       machine_mode mode;
2289 
2290       if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
2291 	continue;
2292 
2293       /* If this is an addressable type, we cannot pre-evaluate it.  */
2294       type = TREE_TYPE (args[i].tree_value);
2295       gcc_assert (!TREE_ADDRESSABLE (type));
2296 
2297       args[i].initial_value = args[i].value
2298 	= expand_normal (args[i].tree_value);
2299 
2300       mode = TYPE_MODE (type);
2301       if (mode != args[i].mode)
2302 	{
2303 	  int unsignedp = args[i].unsignedp;
2304 	  args[i].value
2305 	    = convert_modes (args[i].mode, mode,
2306 			     args[i].value, args[i].unsignedp);
2307 
2308 	  /* CSE will replace this only if it contains args[i].value
2309 	     pseudo, so convert it down to the declared mode using
2310 	     a SUBREG.  */
2311 	  if (REG_P (args[i].value)
2312 	      && GET_MODE_CLASS (args[i].mode) == MODE_INT
2313 	      && promote_mode (type, mode, &unsignedp) != args[i].mode)
2314 	    {
2315 	      args[i].initial_value
2316 		= gen_lowpart_SUBREG (mode, args[i].value);
2317 	      SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
2318 	      SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
2319 	    }
2320 	}
2321     }
2322 }
2323 
2324 /* Given the current state of MUST_PREALLOCATE and information about
2325    arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
2326    compute and return the final value for MUST_PREALLOCATE.  */
2327 
2328 static int
2329 finalize_must_preallocate (int must_preallocate, int num_actuals,
2330 			   struct arg_data *args, struct args_size *args_size)
2331 {
2332   /* See if we have or want to preallocate stack space.
2333 
2334      If we would have to push a partially-in-regs parm
2335      before other stack parms, preallocate stack space instead.
2336 
2337      If the size of some parm is not a multiple of the required stack
2338      alignment, we must preallocate.
2339 
2340      If the total size of arguments that would otherwise create a copy in
2341      a temporary (such as a CALL) is more than half the total argument list
2342      size, preallocation is faster.
2343 
2344      Another reason to preallocate is if we have a machine (like the m88k)
2345      where stack alignment is required to be maintained between every
2346      pair of insns, not just when the call is made.  However, we assume here
2347      that such machines either do not have push insns (and hence preallocation
2348      would occur anyway) or the problem is taken care of with
2349      PUSH_ROUNDING.  */
2350 
2351   if (! must_preallocate)
2352     {
2353       int partial_seen = 0;
2354       poly_int64 copy_to_evaluate_size = 0;
2355       int i;
2356 
2357       for (i = 0; i < num_actuals && ! must_preallocate; i++)
2358 	{
2359 	  if (args[i].partial > 0 && ! args[i].pass_on_stack)
2360 	    partial_seen = 1;
2361 	  else if (partial_seen && args[i].reg == 0)
2362 	    must_preallocate = 1;
2363 	  /* We preallocate in case there are bounds passed
2364 	     in the bounds table to have precomputed address
2365 	     for bounds association.  */
2366 	  else if (POINTER_BOUNDS_P (args[i].tree_value)
2367 		   && !args[i].reg)
2368 	    must_preallocate = 1;
2369 
2370 	  if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
2371 	      && (TREE_CODE (args[i].tree_value) == CALL_EXPR
2372 		  || TREE_CODE (args[i].tree_value) == TARGET_EXPR
2373 		  || TREE_CODE (args[i].tree_value) == COND_EXPR
2374 		  || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
2375 	    copy_to_evaluate_size
2376 	      += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2377 	}
2378 
2379       if (maybe_ne (args_size->constant, 0)
2380 	  && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
2381 	must_preallocate = 1;
2382     }
2383   return must_preallocate;
2384 }
2385 
2386 /* If we preallocated stack space, compute the address of each argument
2387    and store it into the ARGS array.
2388 
2389    We need not ensure it is a valid memory address here; it will be
2390    validized when it is used.
2391 
2392    ARGBLOCK is an rtx for the address of the outgoing arguments.  */
2393 
2394 static void
2395 compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
2396 {
2397   if (argblock)
2398     {
2399       rtx arg_reg = argblock;
2400       int i;
2401       poly_int64 arg_offset = 0;
2402 
2403       if (GET_CODE (argblock) == PLUS)
2404 	{
2405 	  arg_reg = XEXP (argblock, 0);
2406 	  arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
2407 	}
2408 
2409       for (i = 0; i < num_actuals; i++)
2410 	{
2411 	  rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
2412 	  rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
2413 	  rtx addr;
2414 	  unsigned int align, boundary;
2415 	  poly_uint64 units_on_stack = 0;
2416 	  machine_mode partial_mode = VOIDmode;
2417 
2418 	  /* Skip this parm if it will not be passed on the stack.  */
2419 	  if (! args[i].pass_on_stack
2420 	      && args[i].reg != 0
2421 	      && args[i].partial == 0)
2422 	    continue;
2423 
2424 	  if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
2425 	    continue;
2426 
2427 	  /* Pointer Bounds are never passed on the stack.  */
2428 	  if (POINTER_BOUNDS_P (args[i].tree_value))
2429 	    continue;
2430 
2431 	  addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
2432 	  addr = plus_constant (Pmode, addr, arg_offset);
2433 
2434 	  if (args[i].partial != 0)
2435 	    {
2436 	      /* Only part of the parameter is being passed on the stack.
2437 		 Generate a simple memory reference of the correct size.  */
2438 	      units_on_stack = args[i].locate.size.constant;
2439 	      poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
2440 	      partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
2441 	      args[i].stack = gen_rtx_MEM (partial_mode, addr);
2442 	      set_mem_size (args[i].stack, units_on_stack);
2443 	    }
2444 	  else
2445 	    {
2446 	      args[i].stack = gen_rtx_MEM (args[i].mode, addr);
2447 	      set_mem_attributes (args[i].stack,
2448 				  TREE_TYPE (args[i].tree_value), 1);
2449 	    }
2450 	  align = BITS_PER_UNIT;
2451 	  boundary = args[i].locate.boundary;
2452 	  poly_int64 offset_val;
2453 	  if (args[i].locate.where_pad != PAD_DOWNWARD)
2454 	    align = boundary;
2455 	  else if (poly_int_rtx_p (offset, &offset_val))
2456 	    {
2457 	      align = least_bit_hwi (boundary);
2458 	      unsigned int offset_align
2459 		= known_alignment (offset_val) * BITS_PER_UNIT;
2460 	      if (offset_align != 0)
2461 		align = MIN (align, offset_align);
2462 	    }
2463 	  set_mem_align (args[i].stack, align);
2464 
2465 	  addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
2466 	  addr = plus_constant (Pmode, addr, arg_offset);
2467 
2468 	  if (args[i].partial != 0)
2469 	    {
2470 	      /* Only part of the parameter is being passed on the stack.
2471 		 Generate a simple memory reference of the correct size.
2472 	       */
2473 	      args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
2474 	      set_mem_size (args[i].stack_slot, units_on_stack);
2475 	    }
2476 	  else
2477 	    {
2478 	      args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
2479 	      set_mem_attributes (args[i].stack_slot,
2480 				  TREE_TYPE (args[i].tree_value), 1);
2481 	    }
2482 	  set_mem_align (args[i].stack_slot, args[i].locate.boundary);
2483 
2484 	  /* Function incoming arguments may overlap with sibling call
2485 	     outgoing arguments and we cannot allow reordering of reads
2486 	     from function arguments with stores to outgoing arguments
2487 	     of sibling calls.  */
2488 	  set_mem_alias_set (args[i].stack, 0);
2489 	  set_mem_alias_set (args[i].stack_slot, 0);
2490 	}
2491     }
2492 }
2493 
2494 /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
2495    in a call instruction.
2496 
2497    FNDECL is the tree node for the target function.  For an indirect call
2498    FNDECL will be NULL_TREE.
2499 
2500    ADDR is the operand 0 of CALL_EXPR for this call.  */
2501 
2502 static rtx
2503 rtx_for_function_call (tree fndecl, tree addr)
2504 {
2505   rtx funexp;
2506 
2507   /* Get the function to call, in the form of RTL.  */
2508   if (fndecl)
2509     {
2510       if (!TREE_USED (fndecl) && fndecl != current_function_decl)
2511 	TREE_USED (fndecl) = 1;
2512 
2513       /* Get a SYMBOL_REF rtx for the function address.  */
2514       funexp = XEXP (DECL_RTL (fndecl), 0);
2515     }
2516   else
2517     /* Generate an rtx (probably a pseudo-register) for the address.  */
2518     {
2519       push_temp_slots ();
2520       funexp = expand_normal (addr);
2521       pop_temp_slots ();	/* FUNEXP can't be BLKmode.  */
2522     }
2523   return funexp;
2524 }
2525 
2526 /* Return the static chain for this function, if any.  */
2527 
2528 rtx
2529 rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
2530 {
2531   if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
2532     return NULL;
2533 
2534   return targetm.calls.static_chain (fndecl_or_type, incoming_p);
2535 }
2536 
2537 /* Internal state for internal_arg_pointer_based_exp and its helpers.  */
2538 static struct
2539 {
2540   /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
2541      or NULL_RTX if none has been scanned yet.  */
2542   rtx_insn *scan_start;
2543   /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
2544      based on crtl->args.internal_arg_pointer.  The element is NULL_RTX if the
2545      pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
2546      with fixed offset, or PC if this is with variable or unknown offset.  */
2547   vec<rtx> cache;
2548 } internal_arg_pointer_exp_state;
2549 
2550 static rtx internal_arg_pointer_based_exp (const_rtx, bool);
2551 
2552 /* Helper function for internal_arg_pointer_based_exp.  Scan insns in
2553    the tail call sequence, starting with first insn that hasn't been
2554    scanned yet, and note for each pseudo on the LHS whether it is based
2555    on crtl->args.internal_arg_pointer or not, and what offset from that
2556    that pointer it has.  */
2557 
2558 static void
2559 internal_arg_pointer_based_exp_scan (void)
2560 {
2561   rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
2562 
2563   if (scan_start == NULL_RTX)
2564     insn = get_insns ();
2565   else
2566     insn = NEXT_INSN (scan_start);
2567 
2568   while (insn)
2569     {
2570       rtx set = single_set (insn);
2571       if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
2572 	{
2573 	  rtx val = NULL_RTX;
2574 	  unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
2575 	  /* Punt on pseudos set multiple times.  */
2576 	  if (idx < internal_arg_pointer_exp_state.cache.length ()
2577 	      && (internal_arg_pointer_exp_state.cache[idx]
2578 		  != NULL_RTX))
2579 	    val = pc_rtx;
2580 	  else
2581 	    val = internal_arg_pointer_based_exp (SET_SRC (set), false);
2582 	  if (val != NULL_RTX)
2583 	    {
2584 	      if (idx >= internal_arg_pointer_exp_state.cache.length ())
2585 		internal_arg_pointer_exp_state.cache
2586 		  .safe_grow_cleared (idx + 1);
2587 	      internal_arg_pointer_exp_state.cache[idx] = val;
2588 	    }
2589 	}
2590       if (NEXT_INSN (insn) == NULL_RTX)
2591 	scan_start = insn;
2592       insn = NEXT_INSN (insn);
2593     }
2594 
2595   internal_arg_pointer_exp_state.scan_start = scan_start;
2596 }
2597 
2598 /* Compute whether RTL is based on crtl->args.internal_arg_pointer.  Return
2599    NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
2600    it with fixed offset, or PC if this is with variable or unknown offset.
2601    TOPLEVEL is true if the function is invoked at the topmost level.  */
2602 
2603 static rtx
2604 internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
2605 {
2606   if (CONSTANT_P (rtl))
2607     return NULL_RTX;
2608 
2609   if (rtl == crtl->args.internal_arg_pointer)
2610     return const0_rtx;
2611 
2612   if (REG_P (rtl) && HARD_REGISTER_P (rtl))
2613     return NULL_RTX;
2614 
2615   poly_int64 offset;
2616   if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
2617     {
2618       rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
2619       if (val == NULL_RTX || val == pc_rtx)
2620 	return val;
2621       return plus_constant (Pmode, val, offset);
2622     }
2623 
2624   /* When called at the topmost level, scan pseudo assignments in between the
2625      last scanned instruction in the tail call sequence and the latest insn
2626      in that sequence.  */
2627   if (toplevel)
2628     internal_arg_pointer_based_exp_scan ();
2629 
2630   if (REG_P (rtl))
2631     {
2632       unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
2633       if (idx < internal_arg_pointer_exp_state.cache.length ())
2634 	return internal_arg_pointer_exp_state.cache[idx];
2635 
2636       return NULL_RTX;
2637     }
2638 
2639   subrtx_iterator::array_type array;
2640   FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
2641     {
2642       const_rtx x = *iter;
2643       if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
2644 	return pc_rtx;
2645       if (MEM_P (x))
2646 	iter.skip_subrtxes ();
2647     }
2648 
2649   return NULL_RTX;
2650 }
2651 
2652 /* Return true if SIZE bytes starting from address ADDR might overlap an
2653    already-clobbered argument area.  This function is used to determine
2654    if we should give up a sibcall.  */
2655 
2656 static bool
2657 mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
2658 {
2659   poly_int64 i;
2660   unsigned HOST_WIDE_INT start, end;
2661   rtx val;
2662 
2663   if (bitmap_empty_p (stored_args_map)
2664       && stored_args_watermark == HOST_WIDE_INT_M1U)
2665     return false;
2666   val = internal_arg_pointer_based_exp (addr, true);
2667   if (val == NULL_RTX)
2668     return false;
2669   else if (!poly_int_rtx_p (val, &i))
2670     return true;
2671 
2672   if (known_eq (size, 0U))
2673     return false;
2674 
2675   if (STACK_GROWS_DOWNWARD)
2676     i -= crtl->args.pretend_args_size;
2677   else
2678     i += crtl->args.pretend_args_size;
2679 
2680   if (ARGS_GROW_DOWNWARD)
2681     i = -i - size;
2682 
2683   /* We can ignore any references to the function's pretend args,
2684      which at this point would manifest as negative values of I.  */
2685   if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
2686     return false;
2687 
2688   start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
2689   if (!(i + size).is_constant (&end))
2690     end = HOST_WIDE_INT_M1U;
2691 
2692   if (end > stored_args_watermark)
2693     return true;
2694 
2695   end = MIN (end, SBITMAP_SIZE (stored_args_map));
2696   for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
2697     if (bitmap_bit_p (stored_args_map, k))
2698       return true;
2699 
2700   return false;
2701 }
2702 
2703 /* Do the register loads required for any wholly-register parms or any
2704    parms which are passed both on the stack and in a register.  Their
2705    expressions were already evaluated.
2706 
2707    Mark all register-parms as living through the call, putting these USE
2708    insns in the CALL_INSN_FUNCTION_USAGE field.
2709 
2710    When IS_SIBCALL, perform the check_sibcall_argument_overlap
2711    checking, setting *SIBCALL_FAILURE if appropriate.  */
2712 
2713 static void
2714 load_register_parameters (struct arg_data *args, int num_actuals,
2715 			  rtx *call_fusage, int flags, int is_sibcall,
2716 			  int *sibcall_failure)
2717 {
2718   int i, j;
2719 
2720   for (i = 0; i < num_actuals; i++)
2721     {
2722       rtx reg = ((flags & ECF_SIBCALL)
2723 		 ? args[i].tail_call_reg : args[i].reg);
2724       if (reg)
2725 	{
2726 	  int partial = args[i].partial;
2727 	  int nregs;
2728 	  poly_int64 size = 0;
2729 	  HOST_WIDE_INT const_size = 0;
2730 	  rtx_insn *before_arg = get_last_insn ();
2731 	  /* Set non-negative if we must move a word at a time, even if
2732 	     just one word (e.g, partial == 4 && mode == DFmode).  Set
2733 	     to -1 if we just use a normal move insn.  This value can be
2734 	     zero if the argument is a zero size structure.  */
2735 	  nregs = -1;
2736 	  if (GET_CODE (reg) == PARALLEL)
2737 	    ;
2738 	  else if (partial)
2739 	    {
2740 	      gcc_assert (partial % UNITS_PER_WORD == 0);
2741 	      nregs = partial / UNITS_PER_WORD;
2742 	    }
2743 	  else if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode)
2744 	    {
2745 	      /* Variable-sized parameters should be described by a
2746 		 PARALLEL instead.  */
2747 	      const_size = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
2748 	      gcc_assert (const_size >= 0);
2749 	      nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2750 	      size = const_size;
2751 	    }
2752 	  else
2753 	    size = GET_MODE_SIZE (args[i].mode);
2754 
2755 	  /* Handle calls that pass values in multiple non-contiguous
2756 	     locations.  The Irix 6 ABI has examples of this.  */
2757 
2758 	  if (GET_CODE (reg) == PARALLEL)
2759 	    emit_group_move (reg, args[i].parallel_value);
2760 
2761 	  /* If simple case, just do move.  If normal partial, store_one_arg
2762 	     has already loaded the register for us.  In all other cases,
2763 	     load the register(s) from memory.  */
2764 
2765 	  else if (nregs == -1)
2766 	    {
2767 	      emit_move_insn (reg, args[i].value);
2768 #ifdef BLOCK_REG_PADDING
2769 	      /* Handle case where we have a value that needs shifting
2770 		 up to the msb.  eg. a QImode value and we're padding
2771 		 upward on a BYTES_BIG_ENDIAN machine.  */
2772 	      if (args[i].locate.where_pad
2773 		  == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
2774 		{
2775 		  gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
2776 		  if (maybe_lt (size, UNITS_PER_WORD))
2777 		    {
2778 		      rtx x;
2779 		      poly_int64 shift
2780 			= (UNITS_PER_WORD - size) * BITS_PER_UNIT;
2781 
2782 		      /* Assigning REG here rather than a temp makes
2783 			 CALL_FUSAGE report the whole reg as used.
2784 			 Strictly speaking, the call only uses SIZE
2785 			 bytes at the msb end, but it doesn't seem worth
2786 			 generating rtl to say that.  */
2787 		      reg = gen_rtx_REG (word_mode, REGNO (reg));
2788 		      x = expand_shift (LSHIFT_EXPR, word_mode,
2789 					reg, shift, reg, 1);
2790 		      if (x != reg)
2791 			emit_move_insn (reg, x);
2792 		    }
2793 		}
2794 #endif
2795 	    }
2796 
2797 	  /* If we have pre-computed the values to put in the registers in
2798 	     the case of non-aligned structures, copy them in now.  */
2799 
2800 	  else if (args[i].n_aligned_regs != 0)
2801 	    for (j = 0; j < args[i].n_aligned_regs; j++)
2802 	      emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
2803 			      args[i].aligned_regs[j]);
2804 
2805 	  else if (partial == 0 || args[i].pass_on_stack)
2806 	    {
2807 	      /* SIZE and CONST_SIZE are 0 for partial arguments and
2808 		 the size of a BLKmode type otherwise.  */
2809 	      gcc_checking_assert (known_eq (size, const_size));
2810 	      rtx mem = validize_mem (copy_rtx (args[i].value));
2811 
2812 	      /* Check for overlap with already clobbered argument area,
2813 	         providing that this has non-zero size.  */
2814 	      if (is_sibcall
2815 		  && const_size != 0
2816 		  && (mem_might_overlap_already_clobbered_arg_p
2817 		      (XEXP (args[i].value, 0), const_size)))
2818 		*sibcall_failure = 1;
2819 
2820 	      if (const_size % UNITS_PER_WORD == 0
2821 		  || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
2822 		move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
2823 	      else
2824 		{
2825 		  if (nregs > 1)
2826 		    move_block_to_reg (REGNO (reg), mem, nregs - 1,
2827 				       args[i].mode);
2828 		  rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
2829 		  unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
2830 		  unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
2831 		  rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
2832 					     word_mode, word_mode, false,
2833 					     NULL);
2834 		  if (BYTES_BIG_ENDIAN)
2835 		    x = expand_shift (LSHIFT_EXPR, word_mode, x,
2836 				      BITS_PER_WORD - bitsize, dest, 1);
2837 		  if (x != dest)
2838 		    emit_move_insn (dest, x);
2839 		}
2840 
2841 	      /* Handle a BLKmode that needs shifting.  */
2842 	      if (nregs == 1 && const_size < UNITS_PER_WORD
2843 #ifdef BLOCK_REG_PADDING
2844 		  && args[i].locate.where_pad == PAD_DOWNWARD
2845 #else
2846 		  && BYTES_BIG_ENDIAN
2847 #endif
2848 		  )
2849 		{
2850 		  rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
2851 		  int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
2852 		  enum tree_code dir = (BYTES_BIG_ENDIAN
2853 					? RSHIFT_EXPR : LSHIFT_EXPR);
2854 		  rtx x;
2855 
2856 		  x = expand_shift (dir, word_mode, dest, shift, dest, 1);
2857 		  if (x != dest)
2858 		    emit_move_insn (dest, x);
2859 		}
2860 	    }
2861 
2862 	  /* When a parameter is a block, and perhaps in other cases, it is
2863 	     possible that it did a load from an argument slot that was
2864 	     already clobbered.  */
2865 	  if (is_sibcall
2866 	      && check_sibcall_argument_overlap (before_arg, &args[i], 0))
2867 	    *sibcall_failure = 1;
2868 
2869 	  /* Handle calls that pass values in multiple non-contiguous
2870 	     locations.  The Irix 6 ABI has examples of this.  */
2871 	  if (GET_CODE (reg) == PARALLEL)
2872 	    use_group_regs (call_fusage, reg);
2873 	  else if (nregs == -1)
2874 	    use_reg_mode (call_fusage, reg,
2875 			  TYPE_MODE (TREE_TYPE (args[i].tree_value)));
2876 	  else if (nregs > 0)
2877 	    use_regs (call_fusage, REGNO (reg), nregs);
2878 	}
2879     }
2880 }
2881 
2882 /* We need to pop PENDING_STACK_ADJUST bytes.  But, if the arguments
2883    wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
2884    bytes, then we would need to push some additional bytes to pad the
2885    arguments.  So, we try to compute an adjust to the stack pointer for an
2886    amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
2887    bytes.  Then, when the arguments are pushed the stack will be perfectly
2888    aligned.
2889 
2890    Return true if this optimization is possible, storing the adjustment
2891    in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
2892    bytes that should be popped after the call.  */
2893 
2894 static bool
2895 combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
2896 					   poly_int64 unadjusted_args_size,
2897 					   struct args_size *args_size,
2898 					   unsigned int preferred_unit_stack_boundary)
2899 {
2900   /* The number of bytes to pop so that the stack will be
2901      under-aligned by UNADJUSTED_ARGS_SIZE bytes.  */
2902   poly_int64 adjustment;
2903   /* The alignment of the stack after the arguments are pushed, if we
2904      just pushed the arguments without adjust the stack here.  */
2905   unsigned HOST_WIDE_INT unadjusted_alignment;
2906 
2907   if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
2908 			   preferred_unit_stack_boundary,
2909 			   &unadjusted_alignment))
2910     return false;
2911 
2912   /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
2913      as possible -- leaving just enough left to cancel out the
2914      UNADJUSTED_ALIGNMENT.  In other words, we want to ensure that the
2915      PENDING_STACK_ADJUST is non-negative, and congruent to
2916      -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY.  */
2917 
2918   /* Begin by trying to pop all the bytes.  */
2919   unsigned HOST_WIDE_INT tmp_misalignment;
2920   if (!known_misalignment (pending_stack_adjust,
2921 			   preferred_unit_stack_boundary,
2922 			   &tmp_misalignment))
2923     return false;
2924   unadjusted_alignment -= tmp_misalignment;
2925   adjustment = pending_stack_adjust;
2926   /* Push enough additional bytes that the stack will be aligned
2927      after the arguments are pushed.  */
2928   if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
2929     adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
2930 
2931   /* We need to know whether the adjusted argument size
2932      (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
2933      or a deallocation.  */
2934   if (!ordered_p (adjustment, unadjusted_args_size))
2935     return false;
2936 
2937   /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
2938      bytes after the call.  The right number is the entire
2939      PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
2940      by the arguments in the first place.  */
2941   args_size->constant
2942     = pending_stack_adjust - adjustment + unadjusted_args_size;
2943 
2944   *adjustment_out = adjustment;
2945   return true;
2946 }
2947 
2948 /* Scan X expression if it does not dereference any argument slots
2949    we already clobbered by tail call arguments (as noted in stored_args_map
2950    bitmap).
2951    Return nonzero if X expression dereferences such argument slots,
2952    zero otherwise.  */
2953 
2954 static int
2955 check_sibcall_argument_overlap_1 (rtx x)
2956 {
2957   RTX_CODE code;
2958   int i, j;
2959   const char *fmt;
2960 
2961   if (x == NULL_RTX)
2962     return 0;
2963 
2964   code = GET_CODE (x);
2965 
2966   /* We need not check the operands of the CALL expression itself.  */
2967   if (code == CALL)
2968     return 0;
2969 
2970   if (code == MEM)
2971     return (mem_might_overlap_already_clobbered_arg_p
2972 	    (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
2973 
2974   /* Scan all subexpressions.  */
2975   fmt = GET_RTX_FORMAT (code);
2976   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
2977     {
2978       if (*fmt == 'e')
2979 	{
2980 	  if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
2981 	    return 1;
2982 	}
2983       else if (*fmt == 'E')
2984 	{
2985 	  for (j = 0; j < XVECLEN (x, i); j++)
2986 	    if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
2987 	      return 1;
2988 	}
2989     }
2990   return 0;
2991 }
2992 
2993 /* Scan sequence after INSN if it does not dereference any argument slots
2994    we already clobbered by tail call arguments (as noted in stored_args_map
2995    bitmap).  If MARK_STORED_ARGS_MAP, add stack slots for ARG to
2996    stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
2997    should be 0).  Return nonzero if sequence after INSN dereferences such argument
2998    slots, zero otherwise.  */
2999 
3000 static int
3001 check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
3002 				int mark_stored_args_map)
3003 {
3004   poly_uint64 low, high;
3005   unsigned HOST_WIDE_INT const_low, const_high;
3006 
3007   if (insn == NULL_RTX)
3008     insn = get_insns ();
3009   else
3010     insn = NEXT_INSN (insn);
3011 
3012   for (; insn; insn = NEXT_INSN (insn))
3013     if (INSN_P (insn)
3014 	&& check_sibcall_argument_overlap_1 (PATTERN (insn)))
3015       break;
3016 
3017   if (mark_stored_args_map)
3018     {
3019       if (ARGS_GROW_DOWNWARD)
3020 	low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
3021       else
3022 	low = arg->locate.slot_offset.constant;
3023       high = low + arg->locate.size.constant;
3024 
3025       const_low = constant_lower_bound (low);
3026       if (high.is_constant (&const_high))
3027 	for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
3028 	  bitmap_set_bit (stored_args_map, i);
3029       else
3030 	stored_args_watermark = MIN (stored_args_watermark, const_low);
3031     }
3032   return insn != NULL_RTX;
3033 }
3034 
3035 /* Given that a function returns a value of mode MODE at the most
3036    significant end of hard register VALUE, shift VALUE left or right
3037    as specified by LEFT_P.  Return true if some action was needed.  */
3038 
3039 bool
3040 shift_return_value (machine_mode mode, bool left_p, rtx value)
3041 {
3042   gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
3043   machine_mode value_mode = GET_MODE (value);
3044   poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
3045 
3046   if (known_eq (shift, 0))
3047     return false;
3048 
3049   /* Use ashr rather than lshr for right shifts.  This is for the benefit
3050      of the MIPS port, which requires SImode values to be sign-extended
3051      when stored in 64-bit registers.  */
3052   if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
3053 			   value, gen_int_shift_amount (value_mode, shift),
3054 			   value, 1, OPTAB_WIDEN))
3055     gcc_unreachable ();
3056   return true;
3057 }
3058 
3059 /* If X is a likely-spilled register value, copy it to a pseudo
3060    register and return that register.  Return X otherwise.  */
3061 
3062 static rtx
3063 avoid_likely_spilled_reg (rtx x)
3064 {
3065   rtx new_rtx;
3066 
3067   if (REG_P (x)
3068       && HARD_REGISTER_P (x)
3069       && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
3070     {
3071       /* Make sure that we generate a REG rather than a CONCAT.
3072 	 Moves into CONCATs can need nontrivial instructions,
3073 	 and the whole point of this function is to avoid
3074 	 using the hard register directly in such a situation.  */
3075       generating_concat_p = 0;
3076       new_rtx = gen_reg_rtx (GET_MODE (x));
3077       generating_concat_p = 1;
3078       emit_move_insn (new_rtx, x);
3079       return new_rtx;
3080     }
3081   return x;
3082 }
3083 
3084 /* Helper function for expand_call.
3085    Return false is EXP is not implementable as a sibling call.  */
3086 
3087 static bool
3088 can_implement_as_sibling_call_p (tree exp,
3089 				 rtx structure_value_addr,
3090 				 tree funtype,
3091 				 int reg_parm_stack_space ATTRIBUTE_UNUSED,
3092 				 tree fndecl,
3093 				 int flags,
3094 				 tree addr,
3095 				 const args_size &args_size)
3096 {
3097   if (!targetm.have_sibcall_epilogue ())
3098     {
3099       maybe_complain_about_tail_call
3100 	(exp,
3101 	 "machine description does not have"
3102 	 " a sibcall_epilogue instruction pattern");
3103       return false;
3104     }
3105 
3106   /* Doing sibling call optimization needs some work, since
3107      structure_value_addr can be allocated on the stack.
3108      It does not seem worth the effort since few optimizable
3109      sibling calls will return a structure.  */
3110   if (structure_value_addr != NULL_RTX)
3111     {
3112       maybe_complain_about_tail_call (exp, "callee returns a structure");
3113       return false;
3114     }
3115 
3116 #ifdef REG_PARM_STACK_SPACE
3117   /* If outgoing reg parm stack space changes, we can not do sibcall.  */
3118   if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
3119       != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
3120       || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
3121     {
3122       maybe_complain_about_tail_call (exp,
3123 				      "inconsistent size of stack space"
3124 				      " allocated for arguments which are"
3125 				      " passed in registers");
3126       return false;
3127     }
3128 #endif
3129 
3130   /* Check whether the target is able to optimize the call
3131      into a sibcall.  */
3132   if (!targetm.function_ok_for_sibcall (fndecl, exp))
3133     {
3134       maybe_complain_about_tail_call (exp,
3135 				      "target is not able to optimize the"
3136 				      " call into a sibling call");
3137       return false;
3138     }
3139 
3140   /* Functions that do not return exactly once may not be sibcall
3141      optimized.  */
3142   if (flags & ECF_RETURNS_TWICE)
3143     {
3144       maybe_complain_about_tail_call (exp, "callee returns twice");
3145       return false;
3146     }
3147   if (flags & ECF_NORETURN)
3148     {
3149       maybe_complain_about_tail_call (exp, "callee does not return");
3150       return false;
3151     }
3152 
3153   if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
3154     {
3155       maybe_complain_about_tail_call (exp, "volatile function type");
3156       return false;
3157     }
3158 
3159   /* If the called function is nested in the current one, it might access
3160      some of the caller's arguments, but could clobber them beforehand if
3161      the argument areas are shared.  */
3162   if (fndecl && decl_function_context (fndecl) == current_function_decl)
3163     {
3164       maybe_complain_about_tail_call (exp, "nested function");
3165       return false;
3166     }
3167 
3168   /* If this function requires more stack slots than the current
3169      function, we cannot change it into a sibling call.
3170      crtl->args.pretend_args_size is not part of the
3171      stack allocated by our caller.  */
3172   if (maybe_gt (args_size.constant,
3173 		crtl->args.size - crtl->args.pretend_args_size))
3174     {
3175       maybe_complain_about_tail_call (exp,
3176 				      "callee required more stack slots"
3177 				      " than the caller");
3178       return false;
3179     }
3180 
3181   /* If the callee pops its own arguments, then it must pop exactly
3182      the same number of arguments as the current function.  */
3183   if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
3184 						args_size.constant),
3185 		targetm.calls.return_pops_args (current_function_decl,
3186 						TREE_TYPE
3187 						(current_function_decl),
3188 						crtl->args.size)))
3189     {
3190       maybe_complain_about_tail_call (exp,
3191 				      "inconsistent number of"
3192 				      " popped arguments");
3193       return false;
3194     }
3195 
3196   if (!lang_hooks.decls.ok_for_sibcall (fndecl))
3197     {
3198       maybe_complain_about_tail_call (exp, "frontend does not support"
3199 					    " sibling call");
3200       return false;
3201     }
3202 
3203   /* All checks passed.  */
3204   return true;
3205 }
3206 
3207 /* Generate all the code for a CALL_EXPR exp
3208    and return an rtx for its value.
3209    Store the value in TARGET (specified as an rtx) if convenient.
3210    If the value is stored in TARGET then TARGET is returned.
3211    If IGNORE is nonzero, then we ignore the value of the function call.  */
3212 
3213 rtx
3214 expand_call (tree exp, rtx target, int ignore)
3215 {
3216   /* Nonzero if we are currently expanding a call.  */
3217   static int currently_expanding_call = 0;
3218 
3219   /* RTX for the function to be called.  */
3220   rtx funexp;
3221   /* Sequence of insns to perform a normal "call".  */
3222   rtx_insn *normal_call_insns = NULL;
3223   /* Sequence of insns to perform a tail "call".  */
3224   rtx_insn *tail_call_insns = NULL;
3225   /* Data type of the function.  */
3226   tree funtype;
3227   tree type_arg_types;
3228   tree rettype;
3229   /* Declaration of the function being called,
3230      or 0 if the function is computed (not known by name).  */
3231   tree fndecl = 0;
3232   /* The type of the function being called.  */
3233   tree fntype;
3234   bool try_tail_call = CALL_EXPR_TAILCALL (exp);
3235   bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
3236   int pass;
3237 
3238   /* Register in which non-BLKmode value will be returned,
3239      or 0 if no value or if value is BLKmode.  */
3240   rtx valreg;
3241   /* Register(s) in which bounds are returned.  */
3242   rtx valbnd = NULL;
3243   /* Address where we should return a BLKmode value;
3244      0 if value not BLKmode.  */
3245   rtx structure_value_addr = 0;
3246   /* Nonzero if that address is being passed by treating it as
3247      an extra, implicit first parameter.  Otherwise,
3248      it is passed by being copied directly into struct_value_rtx.  */
3249   int structure_value_addr_parm = 0;
3250   /* Holds the value of implicit argument for the struct value.  */
3251   tree structure_value_addr_value = NULL_TREE;
3252   /* Size of aggregate value wanted, or zero if none wanted
3253      or if we are using the non-reentrant PCC calling convention
3254      or expecting the value in registers.  */
3255   poly_int64 struct_value_size = 0;
3256   /* Nonzero if called function returns an aggregate in memory PCC style,
3257      by returning the address of where to find it.  */
3258   int pcc_struct_value = 0;
3259   rtx struct_value = 0;
3260 
3261   /* Number of actual parameters in this call, including struct value addr.  */
3262   int num_actuals;
3263   /* Number of named args.  Args after this are anonymous ones
3264      and they must all go on the stack.  */
3265   int n_named_args;
3266   /* Number of complex actual arguments that need to be split.  */
3267   int num_complex_actuals = 0;
3268 
3269   /* Vector of information about each argument.
3270      Arguments are numbered in the order they will be pushed,
3271      not the order they are written.  */
3272   struct arg_data *args;
3273 
3274   /* Total size in bytes of all the stack-parms scanned so far.  */
3275   struct args_size args_size;
3276   struct args_size adjusted_args_size;
3277   /* Size of arguments before any adjustments (such as rounding).  */
3278   poly_int64 unadjusted_args_size;
3279   /* Data on reg parms scanned so far.  */
3280   CUMULATIVE_ARGS args_so_far_v;
3281   cumulative_args_t args_so_far;
3282   /* Nonzero if a reg parm has been scanned.  */
3283   int reg_parm_seen;
3284   /* Nonzero if this is an indirect function call.  */
3285 
3286   /* Nonzero if we must avoid push-insns in the args for this call.
3287      If stack space is allocated for register parameters, but not by the
3288      caller, then it is preallocated in the fixed part of the stack frame.
3289      So the entire argument block must then be preallocated (i.e., we
3290      ignore PUSH_ROUNDING in that case).  */
3291 
3292   int must_preallocate = !PUSH_ARGS;
3293 
3294   /* Size of the stack reserved for parameter registers.  */
3295   int reg_parm_stack_space = 0;
3296 
3297   /* Address of space preallocated for stack parms
3298      (on machines that lack push insns), or 0 if space not preallocated.  */
3299   rtx argblock = 0;
3300 
3301   /* Mask of ECF_ and ERF_ flags.  */
3302   int flags = 0;
3303   int return_flags = 0;
3304 #ifdef REG_PARM_STACK_SPACE
3305   /* Define the boundary of the register parm stack space that needs to be
3306      saved, if any.  */
3307   int low_to_save, high_to_save;
3308   rtx save_area = 0;		/* Place that it is saved */
3309 #endif
3310 
3311   unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
3312   char *initial_stack_usage_map = stack_usage_map;
3313   unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
3314   char *stack_usage_map_buf = NULL;
3315 
3316   poly_int64 old_stack_allocated;
3317 
3318   /* State variables to track stack modifications.  */
3319   rtx old_stack_level = 0;
3320   int old_stack_arg_under_construction = 0;
3321   poly_int64 old_pending_adj = 0;
3322   int old_inhibit_defer_pop = inhibit_defer_pop;
3323 
3324   /* Some stack pointer alterations we make are performed via
3325      allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
3326      which we then also need to save/restore along the way.  */
3327   poly_int64 old_stack_pointer_delta = 0;
3328 
3329   rtx call_fusage;
3330   tree addr = CALL_EXPR_FN (exp);
3331   int i;
3332   /* The alignment of the stack, in bits.  */
3333   unsigned HOST_WIDE_INT preferred_stack_boundary;
3334   /* The alignment of the stack, in bytes.  */
3335   unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
3336   /* The static chain value to use for this call.  */
3337   rtx static_chain_value;
3338   /* See if this is "nothrow" function call.  */
3339   if (TREE_NOTHROW (exp))
3340     flags |= ECF_NOTHROW;
3341 
3342   /* See if we can find a DECL-node for the actual function, and get the
3343      function attributes (flags) from the function decl or type node.  */
3344   fndecl = get_callee_fndecl (exp);
3345   if (fndecl)
3346     {
3347       fntype = TREE_TYPE (fndecl);
3348       flags |= flags_from_decl_or_type (fndecl);
3349       return_flags |= decl_return_flags (fndecl);
3350     }
3351   else
3352     {
3353       fntype = TREE_TYPE (TREE_TYPE (addr));
3354       flags |= flags_from_decl_or_type (fntype);
3355       if (CALL_EXPR_BY_DESCRIPTOR (exp))
3356 	flags |= ECF_BY_DESCRIPTOR;
3357     }
3358   rettype = TREE_TYPE (exp);
3359 
3360   struct_value = targetm.calls.struct_value_rtx (fntype, 0);
3361 
3362   /* Warn if this value is an aggregate type,
3363      regardless of which calling convention we are using for it.  */
3364   if (AGGREGATE_TYPE_P (rettype))
3365     warning (OPT_Waggregate_return, "function call has aggregate value");
3366 
3367   /* If the result of a non looping pure or const function call is
3368      ignored (or void), and none of its arguments are volatile, we can
3369      avoid expanding the call and just evaluate the arguments for
3370      side-effects.  */
3371   if ((flags & (ECF_CONST | ECF_PURE))
3372       && (!(flags & ECF_LOOPING_CONST_OR_PURE))
3373       && (ignore || target == const0_rtx
3374 	  || TYPE_MODE (rettype) == VOIDmode))
3375     {
3376       bool volatilep = false;
3377       tree arg;
3378       call_expr_arg_iterator iter;
3379 
3380       FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3381 	if (TREE_THIS_VOLATILE (arg))
3382 	  {
3383 	    volatilep = true;
3384 	    break;
3385 	  }
3386 
3387       if (! volatilep)
3388 	{
3389 	  FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3390 	    expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
3391 	  return const0_rtx;
3392 	}
3393     }
3394 
3395 #ifdef REG_PARM_STACK_SPACE
3396   reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
3397 #endif
3398 
3399   if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
3400       && reg_parm_stack_space > 0 && PUSH_ARGS)
3401     must_preallocate = 1;
3402 
3403   /* Set up a place to return a structure.  */
3404 
3405   /* Cater to broken compilers.  */
3406   if (aggregate_value_p (exp, fntype))
3407     {
3408       /* This call returns a big structure.  */
3409       flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
3410 
3411 #ifdef PCC_STATIC_STRUCT_RETURN
3412       {
3413 	pcc_struct_value = 1;
3414       }
3415 #else /* not PCC_STATIC_STRUCT_RETURN */
3416       {
3417 	if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
3418 	  struct_value_size = -1;
3419 
3420 	/* Even if it is semantically safe to use the target as the return
3421 	   slot, it may be not sufficiently aligned for the return type.  */
3422 	if (CALL_EXPR_RETURN_SLOT_OPT (exp)
3423 	    && target
3424 	    && MEM_P (target)
3425 	    /* If rettype is addressable, we may not create a temporary.
3426 	       If target is properly aligned at runtime and the compiler
3427 	       just doesn't know about it, it will work fine, otherwise it
3428 	       will be UB.  */
3429 	    && (TREE_ADDRESSABLE (rettype)
3430 		|| !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3431 		     && targetm.slow_unaligned_access (TYPE_MODE (rettype),
3432 						       MEM_ALIGN (target)))))
3433 	  structure_value_addr = XEXP (target, 0);
3434 	else
3435 	  {
3436 	    /* For variable-sized objects, we must be called with a target
3437 	       specified.  If we were to allocate space on the stack here,
3438 	       we would have no way of knowing when to free it.  */
3439 	    rtx d = assign_temp (rettype, 1, 1);
3440 	    structure_value_addr = XEXP (d, 0);
3441 	    target = 0;
3442 	  }
3443       }
3444 #endif /* not PCC_STATIC_STRUCT_RETURN */
3445     }
3446 
3447   /* Figure out the amount to which the stack should be aligned.  */
3448   preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
3449   if (fndecl)
3450     {
3451       struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
3452       /* Without automatic stack alignment, we can't increase preferred
3453 	 stack boundary.  With automatic stack alignment, it is
3454 	 unnecessary since unless we can guarantee that all callers will
3455 	 align the outgoing stack properly, callee has to align its
3456 	 stack anyway.  */
3457       if (i
3458 	  && i->preferred_incoming_stack_boundary
3459 	  && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
3460 	preferred_stack_boundary = i->preferred_incoming_stack_boundary;
3461     }
3462 
3463   /* Operand 0 is a pointer-to-function; get the type of the function.  */
3464   funtype = TREE_TYPE (addr);
3465   gcc_assert (POINTER_TYPE_P (funtype));
3466   funtype = TREE_TYPE (funtype);
3467 
3468   /* Count whether there are actual complex arguments that need to be split
3469      into their real and imaginary parts.  Munge the type_arg_types
3470      appropriately here as well.  */
3471   if (targetm.calls.split_complex_arg)
3472     {
3473       call_expr_arg_iterator iter;
3474       tree arg;
3475       FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
3476 	{
3477 	  tree type = TREE_TYPE (arg);
3478 	  if (type && TREE_CODE (type) == COMPLEX_TYPE
3479 	      && targetm.calls.split_complex_arg (type))
3480 	    num_complex_actuals++;
3481 	}
3482       type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
3483     }
3484   else
3485     type_arg_types = TYPE_ARG_TYPES (funtype);
3486 
3487   if (flags & ECF_MAY_BE_ALLOCA)
3488     cfun->calls_alloca = 1;
3489 
3490   /* If struct_value_rtx is 0, it means pass the address
3491      as if it were an extra parameter.  Put the argument expression
3492      in structure_value_addr_value.  */
3493   if (structure_value_addr && struct_value == 0)
3494     {
3495       /* If structure_value_addr is a REG other than
3496 	 virtual_outgoing_args_rtx, we can use always use it.  If it
3497 	 is not a REG, we must always copy it into a register.
3498 	 If it is virtual_outgoing_args_rtx, we must copy it to another
3499 	 register in some cases.  */
3500       rtx temp = (!REG_P (structure_value_addr)
3501 		  || (ACCUMULATE_OUTGOING_ARGS
3502 		      && stack_arg_under_construction
3503 		      && structure_value_addr == virtual_outgoing_args_rtx)
3504 		  ? copy_addr_to_reg (convert_memory_address
3505 				      (Pmode, structure_value_addr))
3506 		  : structure_value_addr);
3507 
3508       structure_value_addr_value =
3509 	make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
3510       structure_value_addr_parm = CALL_WITH_BOUNDS_P (exp) ? 2 : 1;
3511     }
3512 
3513   /* Count the arguments and set NUM_ACTUALS.  */
3514   num_actuals =
3515     call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
3516 
3517   /* Compute number of named args.
3518      First, do a raw count of the args for INIT_CUMULATIVE_ARGS.  */
3519 
3520   if (type_arg_types != 0)
3521     n_named_args
3522       = (list_length (type_arg_types)
3523 	 /* Count the struct value address, if it is passed as a parm.  */
3524 	 + structure_value_addr_parm);
3525   else
3526     /* If we know nothing, treat all args as named.  */
3527     n_named_args = num_actuals;
3528 
3529   /* Start updating where the next arg would go.
3530 
3531      On some machines (such as the PA) indirect calls have a different
3532      calling convention than normal calls.  The fourth argument in
3533      INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
3534      or not.  */
3535   INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
3536   args_so_far = pack_cumulative_args (&args_so_far_v);
3537 
3538   /* Now possibly adjust the number of named args.
3539      Normally, don't include the last named arg if anonymous args follow.
3540      We do include the last named arg if
3541      targetm.calls.strict_argument_naming() returns nonzero.
3542      (If no anonymous args follow, the result of list_length is actually
3543      one too large.  This is harmless.)
3544 
3545      If targetm.calls.pretend_outgoing_varargs_named() returns
3546      nonzero, and targetm.calls.strict_argument_naming() returns zero,
3547      this machine will be able to place unnamed args that were passed
3548      in registers into the stack.  So treat all args as named.  This
3549      allows the insns emitting for a specific argument list to be
3550      independent of the function declaration.
3551 
3552      If targetm.calls.pretend_outgoing_varargs_named() returns zero,
3553      we do not have any reliable way to pass unnamed args in
3554      registers, so we must force them into memory.  */
3555 
3556   if (type_arg_types != 0
3557       && targetm.calls.strict_argument_naming (args_so_far))
3558     ;
3559   else if (type_arg_types != 0
3560 	   && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
3561     /* Don't include the last named arg.  */
3562     --n_named_args;
3563   else
3564     /* Treat all args as named.  */
3565     n_named_args = num_actuals;
3566 
3567   /* Make a vector to hold all the information about each arg.  */
3568   args = XCNEWVEC (struct arg_data, num_actuals);
3569 
3570   /* Build up entries in the ARGS array, compute the size of the
3571      arguments into ARGS_SIZE, etc.  */
3572   initialize_argument_information (num_actuals, args, &args_size,
3573 				   n_named_args, exp,
3574 				   structure_value_addr_value, fndecl, fntype,
3575 				   args_so_far, reg_parm_stack_space,
3576 				   &old_stack_level, &old_pending_adj,
3577 				   &must_preallocate, &flags,
3578 				   &try_tail_call, CALL_FROM_THUNK_P (exp));
3579 
3580   if (args_size.var)
3581     must_preallocate = 1;
3582 
3583   /* Now make final decision about preallocating stack space.  */
3584   must_preallocate = finalize_must_preallocate (must_preallocate,
3585 						num_actuals, args,
3586 						&args_size);
3587 
3588   /* If the structure value address will reference the stack pointer, we
3589      must stabilize it.  We don't need to do this if we know that we are
3590      not going to adjust the stack pointer in processing this call.  */
3591 
3592   if (structure_value_addr
3593       && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
3594 	  || reg_mentioned_p (virtual_outgoing_args_rtx,
3595 			      structure_value_addr))
3596       && (args_size.var
3597 	  || (!ACCUMULATE_OUTGOING_ARGS
3598 	      && maybe_ne (args_size.constant, 0))))
3599     structure_value_addr = copy_to_reg (structure_value_addr);
3600 
3601   /* Tail calls can make things harder to debug, and we've traditionally
3602      pushed these optimizations into -O2.  Don't try if we're already
3603      expanding a call, as that means we're an argument.  Don't try if
3604      there's cleanups, as we know there's code to follow the call.  */
3605 
3606   if (currently_expanding_call++ != 0
3607       || !flag_optimize_sibling_calls
3608       || args_size.var
3609       || dbg_cnt (tail_call) == false)
3610     try_tail_call = 0;
3611 
3612   /* If the user has marked the function as requiring tail-call
3613      optimization, attempt it.  */
3614   if (must_tail_call)
3615     try_tail_call = 1;
3616 
3617   /*  Rest of purposes for tail call optimizations to fail.  */
3618   if (try_tail_call)
3619     try_tail_call = can_implement_as_sibling_call_p (exp,
3620 						     structure_value_addr,
3621 						     funtype,
3622 						     reg_parm_stack_space,
3623 						     fndecl,
3624 						     flags, addr, args_size);
3625 
3626   /* Check if caller and callee disagree in promotion of function
3627      return value.  */
3628   if (try_tail_call)
3629     {
3630       machine_mode caller_mode, caller_promoted_mode;
3631       machine_mode callee_mode, callee_promoted_mode;
3632       int caller_unsignedp, callee_unsignedp;
3633       tree caller_res = DECL_RESULT (current_function_decl);
3634 
3635       caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
3636       caller_mode = DECL_MODE (caller_res);
3637       callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
3638       callee_mode = TYPE_MODE (TREE_TYPE (funtype));
3639       caller_promoted_mode
3640 	= promote_function_mode (TREE_TYPE (caller_res), caller_mode,
3641 				 &caller_unsignedp,
3642 				 TREE_TYPE (current_function_decl), 1);
3643       callee_promoted_mode
3644 	= promote_function_mode (TREE_TYPE (funtype), callee_mode,
3645 				 &callee_unsignedp,
3646 				 funtype, 1);
3647       if (caller_mode != VOIDmode
3648 	  && (caller_promoted_mode != callee_promoted_mode
3649 	      || ((caller_mode != caller_promoted_mode
3650 		   || callee_mode != callee_promoted_mode)
3651 		  && (caller_unsignedp != callee_unsignedp
3652 		      || partial_subreg_p (caller_mode, callee_mode)))))
3653 	{
3654 	  try_tail_call = 0;
3655 	  maybe_complain_about_tail_call (exp,
3656 					  "caller and callee disagree in"
3657 					  " promotion of function"
3658 					  " return value");
3659 	}
3660     }
3661 
3662   /* Ensure current function's preferred stack boundary is at least
3663      what we need.  Stack alignment may also increase preferred stack
3664      boundary.  */
3665   if (crtl->preferred_stack_boundary < preferred_stack_boundary)
3666     crtl->preferred_stack_boundary = preferred_stack_boundary;
3667   else
3668     preferred_stack_boundary = crtl->preferred_stack_boundary;
3669 
3670   preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
3671 
3672   /* We want to make two insn chains; one for a sibling call, the other
3673      for a normal call.  We will select one of the two chains after
3674      initial RTL generation is complete.  */
3675   for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
3676     {
3677       int sibcall_failure = 0;
3678       /* We want to emit any pending stack adjustments before the tail
3679 	 recursion "call".  That way we know any adjustment after the tail
3680 	 recursion call can be ignored if we indeed use the tail
3681 	 call expansion.  */
3682       saved_pending_stack_adjust save;
3683       rtx_insn *insns, *before_call, *after_args;
3684       rtx next_arg_reg;
3685 
3686       if (pass == 0)
3687 	{
3688 	  /* State variables we need to save and restore between
3689 	     iterations.  */
3690 	  save_pending_stack_adjust (&save);
3691 	}
3692       if (pass)
3693 	flags &= ~ECF_SIBCALL;
3694       else
3695 	flags |= ECF_SIBCALL;
3696 
3697       /* Other state variables that we must reinitialize each time
3698 	 through the loop (that are not initialized by the loop itself).  */
3699       argblock = 0;
3700       call_fusage = 0;
3701 
3702       /* Start a new sequence for the normal call case.
3703 
3704 	 From this point on, if the sibling call fails, we want to set
3705 	 sibcall_failure instead of continuing the loop.  */
3706       start_sequence ();
3707 
3708       /* Don't let pending stack adjusts add up to too much.
3709 	 Also, do all pending adjustments now if there is any chance
3710 	 this might be a call to alloca or if we are expanding a sibling
3711 	 call sequence.
3712 	 Also do the adjustments before a throwing call, otherwise
3713 	 exception handling can fail; PR 19225. */
3714       if (maybe_ge (pending_stack_adjust, 32)
3715 	  || (maybe_ne (pending_stack_adjust, 0)
3716 	      && (flags & ECF_MAY_BE_ALLOCA))
3717 	  || (maybe_ne (pending_stack_adjust, 0)
3718 	      && flag_exceptions && !(flags & ECF_NOTHROW))
3719 	  || pass == 0)
3720 	do_pending_stack_adjust ();
3721 
3722       /* Precompute any arguments as needed.  */
3723       if (pass)
3724 	precompute_arguments (num_actuals, args);
3725 
3726       /* Now we are about to start emitting insns that can be deleted
3727 	 if a libcall is deleted.  */
3728       if (pass && (flags & ECF_MALLOC))
3729 	start_sequence ();
3730 
3731       if (pass == 0
3732 	  && crtl->stack_protect_guard
3733 	  && targetm.stack_protect_runtime_enabled_p ())
3734 	stack_protect_epilogue ();
3735 
3736       adjusted_args_size = args_size;
3737       /* Compute the actual size of the argument block required.  The variable
3738 	 and constant sizes must be combined, the size may have to be rounded,
3739 	 and there may be a minimum required size.  When generating a sibcall
3740 	 pattern, do not round up, since we'll be re-using whatever space our
3741 	 caller provided.  */
3742       unadjusted_args_size
3743 	= compute_argument_block_size (reg_parm_stack_space,
3744 				       &adjusted_args_size,
3745 				       fndecl, fntype,
3746 				       (pass == 0 ? 0
3747 					: preferred_stack_boundary));
3748 
3749       old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
3750 
3751       /* The argument block when performing a sibling call is the
3752 	 incoming argument block.  */
3753       if (pass == 0)
3754 	{
3755 	  argblock = crtl->args.internal_arg_pointer;
3756 	  if (STACK_GROWS_DOWNWARD)
3757 	    argblock
3758 	      = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
3759 	  else
3760 	    argblock
3761 	      = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
3762 
3763 	  HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
3764 	  stored_args_map = sbitmap_alloc (map_size);
3765 	  bitmap_clear (stored_args_map);
3766 	  stored_args_watermark = HOST_WIDE_INT_M1U;
3767 	}
3768 
3769       /* If we have no actual push instructions, or shouldn't use them,
3770 	 make space for all args right now.  */
3771       else if (adjusted_args_size.var != 0)
3772 	{
3773 	  if (old_stack_level == 0)
3774 	    {
3775 	      emit_stack_save (SAVE_BLOCK, &old_stack_level);
3776 	      old_stack_pointer_delta = stack_pointer_delta;
3777 	      old_pending_adj = pending_stack_adjust;
3778 	      pending_stack_adjust = 0;
3779 	      /* stack_arg_under_construction says whether a stack arg is
3780 		 being constructed at the old stack level.  Pushing the stack
3781 		 gets a clean outgoing argument block.  */
3782 	      old_stack_arg_under_construction = stack_arg_under_construction;
3783 	      stack_arg_under_construction = 0;
3784 	    }
3785 	  argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
3786 	  if (flag_stack_usage_info)
3787 	    current_function_has_unbounded_dynamic_stack_size = 1;
3788 	}
3789       else
3790 	{
3791 	  /* Note that we must go through the motions of allocating an argument
3792 	     block even if the size is zero because we may be storing args
3793 	     in the area reserved for register arguments, which may be part of
3794 	     the stack frame.  */
3795 
3796 	  poly_int64 needed = adjusted_args_size.constant;
3797 
3798 	  /* Store the maximum argument space used.  It will be pushed by
3799 	     the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
3800 	     checking).  */
3801 
3802 	  crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
3803 						  needed);
3804 
3805 	  if (must_preallocate)
3806 	    {
3807 	      if (ACCUMULATE_OUTGOING_ARGS)
3808 		{
3809 		  /* Since the stack pointer will never be pushed, it is
3810 		     possible for the evaluation of a parm to clobber
3811 		     something we have already written to the stack.
3812 		     Since most function calls on RISC machines do not use
3813 		     the stack, this is uncommon, but must work correctly.
3814 
3815 		     Therefore, we save any area of the stack that was already
3816 		     written and that we are using.  Here we set up to do this
3817 		     by making a new stack usage map from the old one.  The
3818 		     actual save will be done by store_one_arg.
3819 
3820 		     Another approach might be to try to reorder the argument
3821 		     evaluations to avoid this conflicting stack usage.  */
3822 
3823 		  /* Since we will be writing into the entire argument area,
3824 		     the map must be allocated for its entire size, not just
3825 		     the part that is the responsibility of the caller.  */
3826 		  if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
3827 		    needed += reg_parm_stack_space;
3828 
3829 		  poly_int64 limit = needed;
3830 		  if (ARGS_GROW_DOWNWARD)
3831 		    limit += 1;
3832 
3833 		  /* For polynomial sizes, this is the maximum possible
3834 		     size needed for arguments with a constant size
3835 		     and offset.  */
3836 		  HOST_WIDE_INT const_limit = constant_lower_bound (limit);
3837 		  highest_outgoing_arg_in_use
3838 		    = MAX (initial_highest_arg_in_use, const_limit);
3839 
3840 		  free (stack_usage_map_buf);
3841 		  stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
3842 		  stack_usage_map = stack_usage_map_buf;
3843 
3844 		  if (initial_highest_arg_in_use)
3845 		    memcpy (stack_usage_map, initial_stack_usage_map,
3846 			    initial_highest_arg_in_use);
3847 
3848 		  if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
3849 		    memset (&stack_usage_map[initial_highest_arg_in_use], 0,
3850 			   (highest_outgoing_arg_in_use
3851 			    - initial_highest_arg_in_use));
3852 		  needed = 0;
3853 
3854 		  /* The address of the outgoing argument list must not be
3855 		     copied to a register here, because argblock would be left
3856 		     pointing to the wrong place after the call to
3857 		     allocate_dynamic_stack_space below.  */
3858 
3859 		  argblock = virtual_outgoing_args_rtx;
3860 		}
3861 	      else
3862 		{
3863 		  /* Try to reuse some or all of the pending_stack_adjust
3864 		     to get this space.  */
3865 		  if (inhibit_defer_pop == 0
3866 		      && (combine_pending_stack_adjustment_and_call
3867 			  (&needed,
3868 			   unadjusted_args_size,
3869 			   &adjusted_args_size,
3870 			   preferred_unit_stack_boundary)))
3871 		    {
3872 		      /* combine_pending_stack_adjustment_and_call computes
3873 			 an adjustment before the arguments are allocated.
3874 			 Account for them and see whether or not the stack
3875 			 needs to go up or down.  */
3876 		      needed = unadjusted_args_size - needed;
3877 
3878 		      /* Checked by
3879 			 combine_pending_stack_adjustment_and_call.  */
3880 		      gcc_checking_assert (ordered_p (needed, 0));
3881 		      if (maybe_lt (needed, 0))
3882 			{
3883 			  /* We're releasing stack space.  */
3884 			  /* ??? We can avoid any adjustment at all if we're
3885 			     already aligned.  FIXME.  */
3886 			  pending_stack_adjust = -needed;
3887 			  do_pending_stack_adjust ();
3888 			  needed = 0;
3889 			}
3890 		      else
3891 			/* We need to allocate space.  We'll do that in
3892 			   push_block below.  */
3893 			pending_stack_adjust = 0;
3894 		    }
3895 
3896 		  /* Special case this because overhead of `push_block' in
3897 		     this case is non-trivial.  */
3898 		  if (known_eq (needed, 0))
3899 		    argblock = virtual_outgoing_args_rtx;
3900 		  else
3901 		    {
3902 		      rtx needed_rtx = gen_int_mode (needed, Pmode);
3903 		      argblock = push_block (needed_rtx, 0, 0);
3904 		      if (ARGS_GROW_DOWNWARD)
3905 			argblock = plus_constant (Pmode, argblock, needed);
3906 		    }
3907 
3908 		  /* We only really need to call `copy_to_reg' in the case
3909 		     where push insns are going to be used to pass ARGBLOCK
3910 		     to a function call in ARGS.  In that case, the stack
3911 		     pointer changes value from the allocation point to the
3912 		     call point, and hence the value of
3913 		     VIRTUAL_OUTGOING_ARGS_RTX changes as well.  But might
3914 		     as well always do it.  */
3915 		  argblock = copy_to_reg (argblock);
3916 		}
3917 	    }
3918 	}
3919 
3920       if (ACCUMULATE_OUTGOING_ARGS)
3921 	{
3922 	  /* The save/restore code in store_one_arg handles all
3923 	     cases except one: a constructor call (including a C
3924 	     function returning a BLKmode struct) to initialize
3925 	     an argument.  */
3926 	  if (stack_arg_under_construction)
3927 	    {
3928 	      rtx push_size
3929 		= (gen_int_mode
3930 		   (adjusted_args_size.constant
3931 		    + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
3932 						      : TREE_TYPE (fndecl))
3933 		       ? 0 : reg_parm_stack_space), Pmode));
3934 	      if (old_stack_level == 0)
3935 		{
3936 		  emit_stack_save (SAVE_BLOCK, &old_stack_level);
3937 		  old_stack_pointer_delta = stack_pointer_delta;
3938 		  old_pending_adj = pending_stack_adjust;
3939 		  pending_stack_adjust = 0;
3940 		  /* stack_arg_under_construction says whether a stack
3941 		     arg is being constructed at the old stack level.
3942 		     Pushing the stack gets a clean outgoing argument
3943 		     block.  */
3944 		  old_stack_arg_under_construction
3945 		    = stack_arg_under_construction;
3946 		  stack_arg_under_construction = 0;
3947 		  /* Make a new map for the new argument list.  */
3948 		  free (stack_usage_map_buf);
3949 		  stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
3950 		  stack_usage_map = stack_usage_map_buf;
3951 		  highest_outgoing_arg_in_use = 0;
3952 		  stack_usage_watermark = HOST_WIDE_INT_M1U;
3953 		}
3954 	      /* We can pass TRUE as the 4th argument because we just
3955 		 saved the stack pointer and will restore it right after
3956 		 the call.  */
3957 	      allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
3958 					    -1, true);
3959 	    }
3960 
3961 	  /* If argument evaluation might modify the stack pointer,
3962 	     copy the address of the argument list to a register.  */
3963 	  for (i = 0; i < num_actuals; i++)
3964 	    if (args[i].pass_on_stack)
3965 	      {
3966 		argblock = copy_addr_to_reg (argblock);
3967 		break;
3968 	      }
3969 	}
3970 
3971       compute_argument_addresses (args, argblock, num_actuals);
3972 
3973       /* Stack is properly aligned, pops can't safely be deferred during
3974 	 the evaluation of the arguments.  */
3975       NO_DEFER_POP;
3976 
3977       /* Precompute all register parameters.  It isn't safe to compute
3978 	 anything once we have started filling any specific hard regs.
3979 	 TLS symbols sometimes need a call to resolve.  Precompute
3980 	 register parameters before any stack pointer manipulation
3981 	 to avoid unaligned stack in the called function.  */
3982       precompute_register_parameters (num_actuals, args, &reg_parm_seen);
3983 
3984       OK_DEFER_POP;
3985 
3986       /* Perform stack alignment before the first push (the last arg).  */
3987       if (argblock == 0
3988 	  && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
3989 	  && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
3990 	{
3991 	  /* When the stack adjustment is pending, we get better code
3992 	     by combining the adjustments.  */
3993 	  if (maybe_ne (pending_stack_adjust, 0)
3994 	      && ! inhibit_defer_pop
3995 	      && (combine_pending_stack_adjustment_and_call
3996 		  (&pending_stack_adjust,
3997 		   unadjusted_args_size,
3998 		   &adjusted_args_size,
3999 		   preferred_unit_stack_boundary)))
4000 	    do_pending_stack_adjust ();
4001 	  else if (argblock == 0)
4002 	    anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
4003 					     - unadjusted_args_size,
4004 					     Pmode));
4005 	}
4006       /* Now that the stack is properly aligned, pops can't safely
4007 	 be deferred during the evaluation of the arguments.  */
4008       NO_DEFER_POP;
4009 
4010       /* Record the maximum pushed stack space size.  We need to delay
4011 	 doing it this far to take into account the optimization done
4012 	 by combine_pending_stack_adjustment_and_call.  */
4013       if (flag_stack_usage_info
4014 	  && !ACCUMULATE_OUTGOING_ARGS
4015 	  && pass
4016 	  && adjusted_args_size.var == 0)
4017 	{
4018 	  poly_int64 pushed = (adjusted_args_size.constant
4019 			       + pending_stack_adjust);
4020 	  current_function_pushed_stack_size
4021 	    = upper_bound (current_function_pushed_stack_size, pushed);
4022 	}
4023 
4024       funexp = rtx_for_function_call (fndecl, addr);
4025 
4026       if (CALL_EXPR_STATIC_CHAIN (exp))
4027 	static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
4028       else
4029 	static_chain_value = 0;
4030 
4031 #ifdef REG_PARM_STACK_SPACE
4032       /* Save the fixed argument area if it's part of the caller's frame and
4033 	 is clobbered by argument setup for this call.  */
4034       if (ACCUMULATE_OUTGOING_ARGS && pass)
4035 	save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
4036 					      &low_to_save, &high_to_save);
4037 #endif
4038 
4039       /* Now store (and compute if necessary) all non-register parms.
4040 	 These come before register parms, since they can require block-moves,
4041 	 which could clobber the registers used for register parms.
4042 	 Parms which have partial registers are not stored here,
4043 	 but we do preallocate space here if they want that.  */
4044 
4045       for (i = 0; i < num_actuals; i++)
4046 	{
4047 	  /* Delay bounds until all other args are stored.  */
4048 	  if (POINTER_BOUNDS_P (args[i].tree_value))
4049 	    continue;
4050 	  else if (args[i].reg == 0 || args[i].pass_on_stack)
4051 	    {
4052 	      rtx_insn *before_arg = get_last_insn ();
4053 
4054 	      /* We don't allow passing huge (> 2^30 B) arguments
4055 	         by value.  It would cause an overflow later on.  */
4056 	      if (constant_lower_bound (adjusted_args_size.constant)
4057 		  >= (1 << (HOST_BITS_PER_INT - 2)))
4058 	        {
4059 	          sorry ("passing too large argument on stack");
4060 		  continue;
4061 		}
4062 
4063 	      if (store_one_arg (&args[i], argblock, flags,
4064 				 adjusted_args_size.var != 0,
4065 				 reg_parm_stack_space)
4066 		  || (pass == 0
4067 		      && check_sibcall_argument_overlap (before_arg,
4068 							 &args[i], 1)))
4069 		sibcall_failure = 1;
4070 	      }
4071 
4072 	  if (args[i].stack)
4073 	    call_fusage
4074 	      = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
4075 				   gen_rtx_USE (VOIDmode, args[i].stack),
4076 				   call_fusage);
4077 	}
4078 
4079       /* If we have a parm that is passed in registers but not in memory
4080 	 and whose alignment does not permit a direct copy into registers,
4081 	 make a group of pseudos that correspond to each register that we
4082 	 will later fill.  */
4083       if (STRICT_ALIGNMENT)
4084 	store_unaligned_arguments_into_pseudos (args, num_actuals);
4085 
4086       /* Now store any partially-in-registers parm.
4087 	 This is the last place a block-move can happen.  */
4088       if (reg_parm_seen)
4089 	for (i = 0; i < num_actuals; i++)
4090 	  if (args[i].partial != 0 && ! args[i].pass_on_stack)
4091 	    {
4092 	      rtx_insn *before_arg = get_last_insn ();
4093 
4094 	     /* On targets with weird calling conventions (e.g. PA) it's
4095 		hard to ensure that all cases of argument overlap between
4096 		stack and registers work.  Play it safe and bail out.  */
4097 	      if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
4098 		{
4099 		  sibcall_failure = 1;
4100 		  break;
4101 		}
4102 
4103 	      if (store_one_arg (&args[i], argblock, flags,
4104 				 adjusted_args_size.var != 0,
4105 				 reg_parm_stack_space)
4106 		  || (pass == 0
4107 		      && check_sibcall_argument_overlap (before_arg,
4108 							 &args[i], 1)))
4109 		sibcall_failure = 1;
4110 	    }
4111 
4112       bool any_regs = false;
4113       for (i = 0; i < num_actuals; i++)
4114 	if (args[i].reg != NULL_RTX)
4115 	  {
4116 	    any_regs = true;
4117 	    targetm.calls.call_args (args[i].reg, funtype);
4118 	  }
4119       if (!any_regs)
4120 	targetm.calls.call_args (pc_rtx, funtype);
4121 
4122       /* Figure out the register where the value, if any, will come back.  */
4123       valreg = 0;
4124       valbnd = 0;
4125       if (TYPE_MODE (rettype) != VOIDmode
4126 	  && ! structure_value_addr)
4127 	{
4128 	  if (pcc_struct_value)
4129 	    {
4130 	      valreg = hard_function_value (build_pointer_type (rettype),
4131 					    fndecl, NULL, (pass == 0));
4132 	      if (CALL_WITH_BOUNDS_P (exp))
4133 		valbnd = targetm.calls.
4134 		  chkp_function_value_bounds (build_pointer_type (rettype),
4135 					      fndecl, (pass == 0));
4136 	    }
4137 	  else
4138 	    {
4139 	      valreg = hard_function_value (rettype, fndecl, fntype,
4140 					    (pass == 0));
4141 	      if (CALL_WITH_BOUNDS_P (exp))
4142 		valbnd = targetm.calls.chkp_function_value_bounds (rettype,
4143 								   fndecl,
4144 								   (pass == 0));
4145 	    }
4146 
4147 	  /* If VALREG is a PARALLEL whose first member has a zero
4148 	     offset, use that.  This is for targets such as m68k that
4149 	     return the same value in multiple places.  */
4150 	  if (GET_CODE (valreg) == PARALLEL)
4151 	    {
4152 	      rtx elem = XVECEXP (valreg, 0, 0);
4153 	      rtx where = XEXP (elem, 0);
4154 	      rtx offset = XEXP (elem, 1);
4155 	      if (offset == const0_rtx
4156 		  && GET_MODE (where) == GET_MODE (valreg))
4157 		valreg = where;
4158 	    }
4159 	}
4160 
4161       /* Store all bounds not passed in registers.  */
4162       for (i = 0; i < num_actuals; i++)
4163 	{
4164 	  if (POINTER_BOUNDS_P (args[i].tree_value)
4165 	      && !args[i].reg)
4166 	    store_bounds (&args[i],
4167 			  args[i].pointer_arg == -1
4168 			  ? NULL
4169 			  : &args[args[i].pointer_arg]);
4170 	}
4171 
4172       /* If register arguments require space on the stack and stack space
4173 	 was not preallocated, allocate stack space here for arguments
4174 	 passed in registers.  */
4175       if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
4176           && !ACCUMULATE_OUTGOING_ARGS
4177 	  && must_preallocate == 0 && reg_parm_stack_space > 0)
4178 	anti_adjust_stack (GEN_INT (reg_parm_stack_space));
4179 
4180       /* Pass the function the address in which to return a
4181 	 structure value.  */
4182       if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
4183 	{
4184 	  structure_value_addr
4185 	    = convert_memory_address (Pmode, structure_value_addr);
4186 	  emit_move_insn (struct_value,
4187 			  force_reg (Pmode,
4188 				     force_operand (structure_value_addr,
4189 						    NULL_RTX)));
4190 
4191 	  if (REG_P (struct_value))
4192 	    use_reg (&call_fusage, struct_value);
4193 	}
4194 
4195       after_args = get_last_insn ();
4196       funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
4197 				     static_chain_value, &call_fusage,
4198 				     reg_parm_seen, flags);
4199 
4200       load_register_parameters (args, num_actuals, &call_fusage, flags,
4201 				pass == 0, &sibcall_failure);
4202 
4203       /* Save a pointer to the last insn before the call, so that we can
4204 	 later safely search backwards to find the CALL_INSN.  */
4205       before_call = get_last_insn ();
4206 
4207       /* Set up next argument register.  For sibling calls on machines
4208 	 with register windows this should be the incoming register.  */
4209       if (pass == 0)
4210 	next_arg_reg = targetm.calls.function_incoming_arg (args_so_far,
4211 							    VOIDmode,
4212 							    void_type_node,
4213 							    true);
4214       else
4215 	next_arg_reg = targetm.calls.function_arg (args_so_far,
4216 						   VOIDmode, void_type_node,
4217 						   true);
4218 
4219       if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
4220 	{
4221 	  int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
4222 	  arg_nr = num_actuals - arg_nr - 1;
4223 	  if (arg_nr >= 0
4224 	      && arg_nr < num_actuals
4225 	      && args[arg_nr].reg
4226 	      && valreg
4227 	      && REG_P (valreg)
4228 	      && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
4229 	  call_fusage
4230 	    = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
4231 				 gen_rtx_SET (valreg, args[arg_nr].reg),
4232 				 call_fusage);
4233 	}
4234       /* All arguments and registers used for the call must be set up by
4235 	 now!  */
4236 
4237       /* Stack must be properly aligned now.  */
4238       gcc_assert (!pass
4239 		  || multiple_p (stack_pointer_delta,
4240 				 preferred_unit_stack_boundary));
4241 
4242       /* Generate the actual call instruction.  */
4243       emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
4244 		   adjusted_args_size.constant, struct_value_size,
4245 		   next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
4246 		   flags, args_so_far);
4247 
4248       if (flag_ipa_ra)
4249 	{
4250 	  rtx_call_insn *last;
4251 	  rtx datum = NULL_RTX;
4252 	  if (fndecl != NULL_TREE)
4253 	    {
4254 	      datum = XEXP (DECL_RTL (fndecl), 0);
4255 	      gcc_assert (datum != NULL_RTX
4256 			  && GET_CODE (datum) == SYMBOL_REF);
4257 	    }
4258 	  last = last_call_insn ();
4259 	  add_reg_note (last, REG_CALL_DECL, datum);
4260 	}
4261 
4262       /* If the call setup or the call itself overlaps with anything
4263 	 of the argument setup we probably clobbered our call address.
4264 	 In that case we can't do sibcalls.  */
4265       if (pass == 0
4266 	  && check_sibcall_argument_overlap (after_args, 0, 0))
4267 	sibcall_failure = 1;
4268 
4269       /* If a non-BLKmode value is returned at the most significant end
4270 	 of a register, shift the register right by the appropriate amount
4271 	 and update VALREG accordingly.  BLKmode values are handled by the
4272 	 group load/store machinery below.  */
4273       if (!structure_value_addr
4274 	  && !pcc_struct_value
4275 	  && TYPE_MODE (rettype) != VOIDmode
4276 	  && TYPE_MODE (rettype) != BLKmode
4277 	  && REG_P (valreg)
4278 	  && targetm.calls.return_in_msb (rettype))
4279 	{
4280 	  if (shift_return_value (TYPE_MODE (rettype), false, valreg))
4281 	    sibcall_failure = 1;
4282 	  valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
4283 	}
4284 
4285       if (pass && (flags & ECF_MALLOC))
4286 	{
4287 	  rtx temp = gen_reg_rtx (GET_MODE (valreg));
4288 	  rtx_insn *last, *insns;
4289 
4290 	  /* The return value from a malloc-like function is a pointer.  */
4291 	  if (TREE_CODE (rettype) == POINTER_TYPE)
4292 	    mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
4293 
4294 	  emit_move_insn (temp, valreg);
4295 
4296 	  /* The return value from a malloc-like function can not alias
4297 	     anything else.  */
4298 	  last = get_last_insn ();
4299 	  add_reg_note (last, REG_NOALIAS, temp);
4300 
4301 	  /* Write out the sequence.  */
4302 	  insns = get_insns ();
4303 	  end_sequence ();
4304 	  emit_insn (insns);
4305 	  valreg = temp;
4306 	}
4307 
4308       /* For calls to `setjmp', etc., inform
4309 	 function.c:setjmp_warnings that it should complain if
4310 	 nonvolatile values are live.  For functions that cannot
4311 	 return, inform flow that control does not fall through.  */
4312 
4313       if ((flags & ECF_NORETURN) || pass == 0)
4314 	{
4315 	  /* The barrier must be emitted
4316 	     immediately after the CALL_INSN.  Some ports emit more
4317 	     than just a CALL_INSN above, so we must search for it here.  */
4318 
4319 	  rtx_insn *last = get_last_insn ();
4320 	  while (!CALL_P (last))
4321 	    {
4322 	      last = PREV_INSN (last);
4323 	      /* There was no CALL_INSN?  */
4324 	      gcc_assert (last != before_call);
4325 	    }
4326 
4327 	  emit_barrier_after (last);
4328 
4329 	  /* Stack adjustments after a noreturn call are dead code.
4330 	     However when NO_DEFER_POP is in effect, we must preserve
4331 	     stack_pointer_delta.  */
4332 	  if (inhibit_defer_pop == 0)
4333 	    {
4334 	      stack_pointer_delta = old_stack_allocated;
4335 	      pending_stack_adjust = 0;
4336 	    }
4337 	}
4338 
4339       /* If value type not void, return an rtx for the value.  */
4340 
4341       if (TYPE_MODE (rettype) == VOIDmode
4342 	  || ignore)
4343 	target = const0_rtx;
4344       else if (structure_value_addr)
4345 	{
4346 	  if (target == 0 || !MEM_P (target))
4347 	    {
4348 	      target
4349 		= gen_rtx_MEM (TYPE_MODE (rettype),
4350 			       memory_address (TYPE_MODE (rettype),
4351 					       structure_value_addr));
4352 	      set_mem_attributes (target, rettype, 1);
4353 	    }
4354 	}
4355       else if (pcc_struct_value)
4356 	{
4357 	  /* This is the special C++ case where we need to
4358 	     know what the true target was.  We take care to
4359 	     never use this value more than once in one expression.  */
4360 	  target = gen_rtx_MEM (TYPE_MODE (rettype),
4361 				copy_to_reg (valreg));
4362 	  set_mem_attributes (target, rettype, 1);
4363 	}
4364       /* Handle calls that return values in multiple non-contiguous locations.
4365 	 The Irix 6 ABI has examples of this.  */
4366       else if (GET_CODE (valreg) == PARALLEL)
4367 	{
4368 	  if (target == 0)
4369 	    target = emit_group_move_into_temps (valreg);
4370 	  else if (rtx_equal_p (target, valreg))
4371 	    ;
4372 	  else if (GET_CODE (target) == PARALLEL)
4373 	    /* Handle the result of a emit_group_move_into_temps
4374 	       call in the previous pass.  */
4375 	    emit_group_move (target, valreg);
4376 	  else
4377 	    emit_group_store (target, valreg, rettype,
4378 			      int_size_in_bytes (rettype));
4379 	}
4380       else if (target
4381 	       && GET_MODE (target) == TYPE_MODE (rettype)
4382 	       && GET_MODE (target) == GET_MODE (valreg))
4383 	{
4384 	  bool may_overlap = false;
4385 
4386 	  /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
4387 	     reg to a plain register.  */
4388 	  if (!REG_P (target) || HARD_REGISTER_P (target))
4389 	    valreg = avoid_likely_spilled_reg (valreg);
4390 
4391 	  /* If TARGET is a MEM in the argument area, and we have
4392 	     saved part of the argument area, then we can't store
4393 	     directly into TARGET as it may get overwritten when we
4394 	     restore the argument save area below.  Don't work too
4395 	     hard though and simply force TARGET to a register if it
4396 	     is a MEM; the optimizer is quite likely to sort it out.  */
4397 	  if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
4398 	    for (i = 0; i < num_actuals; i++)
4399 	      if (args[i].save_area)
4400 		{
4401 		  may_overlap = true;
4402 		  break;
4403 		}
4404 
4405 	  if (may_overlap)
4406 	    target = copy_to_reg (valreg);
4407 	  else
4408 	    {
4409 	      /* TARGET and VALREG cannot be equal at this point
4410 		 because the latter would not have
4411 		 REG_FUNCTION_VALUE_P true, while the former would if
4412 		 it were referring to the same register.
4413 
4414 		 If they refer to the same register, this move will be
4415 		 a no-op, except when function inlining is being
4416 		 done.  */
4417 	      emit_move_insn (target, valreg);
4418 
4419 	      /* If we are setting a MEM, this code must be executed.
4420 		 Since it is emitted after the call insn, sibcall
4421 		 optimization cannot be performed in that case.  */
4422 	      if (MEM_P (target))
4423 		sibcall_failure = 1;
4424 	    }
4425 	}
4426       else
4427 	target = copy_to_reg (avoid_likely_spilled_reg (valreg));
4428 
4429       /* If we promoted this return value, make the proper SUBREG.
4430          TARGET might be const0_rtx here, so be careful.  */
4431       if (REG_P (target)
4432 	  && TYPE_MODE (rettype) != BLKmode
4433 	  && GET_MODE (target) != TYPE_MODE (rettype))
4434 	{
4435 	  tree type = rettype;
4436 	  int unsignedp = TYPE_UNSIGNED (type);
4437 	  machine_mode pmode;
4438 
4439 	  /* Ensure we promote as expected, and get the new unsignedness.  */
4440 	  pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
4441 					 funtype, 1);
4442 	  gcc_assert (GET_MODE (target) == pmode);
4443 
4444 	  poly_uint64 offset = subreg_lowpart_offset (TYPE_MODE (type),
4445 						      GET_MODE (target));
4446 	  target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
4447 	  SUBREG_PROMOTED_VAR_P (target) = 1;
4448 	  SUBREG_PROMOTED_SET (target, unsignedp);
4449 	}
4450 
4451       /* If size of args is variable or this was a constructor call for a stack
4452 	 argument, restore saved stack-pointer value.  */
4453 
4454       if (old_stack_level)
4455 	{
4456 	  rtx_insn *prev = get_last_insn ();
4457 
4458 	  emit_stack_restore (SAVE_BLOCK, old_stack_level);
4459 	  stack_pointer_delta = old_stack_pointer_delta;
4460 
4461 	  fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
4462 
4463 	  pending_stack_adjust = old_pending_adj;
4464 	  old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
4465 	  stack_arg_under_construction = old_stack_arg_under_construction;
4466 	  highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4467 	  stack_usage_map = initial_stack_usage_map;
4468 	  stack_usage_watermark = initial_stack_usage_watermark;
4469 	  sibcall_failure = 1;
4470 	}
4471       else if (ACCUMULATE_OUTGOING_ARGS && pass)
4472 	{
4473 #ifdef REG_PARM_STACK_SPACE
4474 	  if (save_area)
4475 	    restore_fixed_argument_area (save_area, argblock,
4476 					 high_to_save, low_to_save);
4477 #endif
4478 
4479 	  /* If we saved any argument areas, restore them.  */
4480 	  for (i = 0; i < num_actuals; i++)
4481 	    if (args[i].save_area)
4482 	      {
4483 		machine_mode save_mode = GET_MODE (args[i].save_area);
4484 		rtx stack_area
4485 		  = gen_rtx_MEM (save_mode,
4486 				 memory_address (save_mode,
4487 						 XEXP (args[i].stack_slot, 0)));
4488 
4489 		if (save_mode != BLKmode)
4490 		  emit_move_insn (stack_area, args[i].save_area);
4491 		else
4492 		  emit_block_move (stack_area, args[i].save_area,
4493 				   (gen_int_mode
4494 				    (args[i].locate.size.constant, Pmode)),
4495 				   BLOCK_OP_CALL_PARM);
4496 	      }
4497 
4498 	  highest_outgoing_arg_in_use = initial_highest_arg_in_use;
4499 	  stack_usage_map = initial_stack_usage_map;
4500 	  stack_usage_watermark = initial_stack_usage_watermark;
4501 	}
4502 
4503       /* If this was alloca, record the new stack level.  */
4504       if (flags & ECF_MAY_BE_ALLOCA)
4505 	record_new_stack_level ();
4506 
4507       /* Free up storage we no longer need.  */
4508       for (i = 0; i < num_actuals; ++i)
4509 	free (args[i].aligned_regs);
4510 
4511       targetm.calls.end_call_args ();
4512 
4513       insns = get_insns ();
4514       end_sequence ();
4515 
4516       if (pass == 0)
4517 	{
4518 	  tail_call_insns = insns;
4519 
4520 	  /* Restore the pending stack adjustment now that we have
4521 	     finished generating the sibling call sequence.  */
4522 
4523 	  restore_pending_stack_adjust (&save);
4524 
4525 	  /* Prepare arg structure for next iteration.  */
4526 	  for (i = 0; i < num_actuals; i++)
4527 	    {
4528 	      args[i].value = 0;
4529 	      args[i].aligned_regs = 0;
4530 	      args[i].stack = 0;
4531 	    }
4532 
4533 	  sbitmap_free (stored_args_map);
4534 	  internal_arg_pointer_exp_state.scan_start = NULL;
4535 	  internal_arg_pointer_exp_state.cache.release ();
4536 	}
4537       else
4538 	{
4539 	  normal_call_insns = insns;
4540 
4541 	  /* Verify that we've deallocated all the stack we used.  */
4542 	  gcc_assert ((flags & ECF_NORETURN)
4543 		      || known_eq (old_stack_allocated,
4544 				   stack_pointer_delta
4545 				   - pending_stack_adjust));
4546 	}
4547 
4548       /* If something prevents making this a sibling call,
4549 	 zero out the sequence.  */
4550       if (sibcall_failure)
4551 	tail_call_insns = NULL;
4552       else
4553 	break;
4554     }
4555 
4556   /* If tail call production succeeded, we need to remove REG_EQUIV notes on
4557      arguments too, as argument area is now clobbered by the call.  */
4558   if (tail_call_insns)
4559     {
4560       emit_insn (tail_call_insns);
4561       crtl->tail_call_emit = true;
4562     }
4563   else
4564     {
4565       emit_insn (normal_call_insns);
4566       if (try_tail_call)
4567 	/* Ideally we'd emit a message for all of the ways that it could
4568 	   have failed.  */
4569 	maybe_complain_about_tail_call (exp, "tail call production failed");
4570     }
4571 
4572   currently_expanding_call--;
4573 
4574   free (stack_usage_map_buf);
4575   free (args);
4576 
4577   /* Join result with returned bounds so caller may use them if needed.  */
4578   target = chkp_join_splitted_slot (target, valbnd);
4579 
4580   return target;
4581 }
4582 
4583 /* A sibling call sequence invalidates any REG_EQUIV notes made for
4584    this function's incoming arguments.
4585 
4586    At the start of RTL generation we know the only REG_EQUIV notes
4587    in the rtl chain are those for incoming arguments, so we can look
4588    for REG_EQUIV notes between the start of the function and the
4589    NOTE_INSN_FUNCTION_BEG.
4590 
4591    This is (slight) overkill.  We could keep track of the highest
4592    argument we clobber and be more selective in removing notes, but it
4593    does not seem to be worth the effort.  */
4594 
4595 void
4596 fixup_tail_calls (void)
4597 {
4598   rtx_insn *insn;
4599 
4600   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4601     {
4602       rtx note;
4603 
4604       /* There are never REG_EQUIV notes for the incoming arguments
4605 	 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it.  */
4606       if (NOTE_P (insn)
4607 	  && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
4608 	break;
4609 
4610       note = find_reg_note (insn, REG_EQUIV, 0);
4611       if (note)
4612 	remove_note (insn, note);
4613       note = find_reg_note (insn, REG_EQUIV, 0);
4614       gcc_assert (!note);
4615     }
4616 }
4617 
4618 /* Traverse a list of TYPES and expand all complex types into their
4619    components.  */
4620 static tree
4621 split_complex_types (tree types)
4622 {
4623   tree p;
4624 
4625   /* Before allocating memory, check for the common case of no complex.  */
4626   for (p = types; p; p = TREE_CHAIN (p))
4627     {
4628       tree type = TREE_VALUE (p);
4629       if (TREE_CODE (type) == COMPLEX_TYPE
4630 	  && targetm.calls.split_complex_arg (type))
4631 	goto found;
4632     }
4633   return types;
4634 
4635  found:
4636   types = copy_list (types);
4637 
4638   for (p = types; p; p = TREE_CHAIN (p))
4639     {
4640       tree complex_type = TREE_VALUE (p);
4641 
4642       if (TREE_CODE (complex_type) == COMPLEX_TYPE
4643 	  && targetm.calls.split_complex_arg (complex_type))
4644 	{
4645 	  tree next, imag;
4646 
4647 	  /* Rewrite complex type with component type.  */
4648 	  TREE_VALUE (p) = TREE_TYPE (complex_type);
4649 	  next = TREE_CHAIN (p);
4650 
4651 	  /* Add another component type for the imaginary part.  */
4652 	  imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
4653 	  TREE_CHAIN (p) = imag;
4654 	  TREE_CHAIN (imag) = next;
4655 
4656 	  /* Skip the newly created node.  */
4657 	  p = TREE_CHAIN (p);
4658 	}
4659     }
4660 
4661   return types;
4662 }
4663 
4664 /* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
4665    for a value of mode OUTMODE,
4666    with NARGS different arguments, passed as ARGS.
4667    Store the return value if RETVAL is nonzero: store it in VALUE if
4668    VALUE is nonnull, otherwise pick a convenient location.  In either
4669    case return the location of the stored value.
4670 
4671    FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
4672    `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
4673    other types of library calls.  */
4674 
4675 rtx
4676 emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
4677 			   enum libcall_type fn_type,
4678 			   machine_mode outmode, int nargs, rtx_mode_t *args)
4679 {
4680   /* Total size in bytes of all the stack-parms scanned so far.  */
4681   struct args_size args_size;
4682   /* Size of arguments before any adjustments (such as rounding).  */
4683   struct args_size original_args_size;
4684   int argnum;
4685   rtx fun;
4686   /* Todo, choose the correct decl type of orgfun. Sadly this information
4687      isn't present here, so we default to native calling abi here.  */
4688   tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
4689   tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
4690   int count;
4691   rtx argblock = 0;
4692   CUMULATIVE_ARGS args_so_far_v;
4693   cumulative_args_t args_so_far;
4694   struct arg
4695   {
4696     rtx value;
4697     machine_mode mode;
4698     rtx reg;
4699     int partial;
4700     struct locate_and_pad_arg_data locate;
4701     rtx save_area;
4702   };
4703   struct arg *argvec;
4704   int old_inhibit_defer_pop = inhibit_defer_pop;
4705   rtx call_fusage = 0;
4706   rtx mem_value = 0;
4707   rtx valreg;
4708   int pcc_struct_value = 0;
4709   poly_int64 struct_value_size = 0;
4710   int flags;
4711   int reg_parm_stack_space = 0;
4712   poly_int64 needed;
4713   rtx_insn *before_call;
4714   bool have_push_fusage;
4715   tree tfom;			/* type_for_mode (outmode, 0) */
4716 
4717 #ifdef REG_PARM_STACK_SPACE
4718   /* Define the boundary of the register parm stack space that needs to be
4719      save, if any.  */
4720   int low_to_save = 0, high_to_save = 0;
4721   rtx save_area = 0;            /* Place that it is saved.  */
4722 #endif
4723 
4724   /* Size of the stack reserved for parameter registers.  */
4725   unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
4726   char *initial_stack_usage_map = stack_usage_map;
4727   unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
4728   char *stack_usage_map_buf = NULL;
4729 
4730   rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
4731 
4732 #ifdef REG_PARM_STACK_SPACE
4733   reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
4734 #endif
4735 
4736   /* By default, library functions cannot throw.  */
4737   flags = ECF_NOTHROW;
4738 
4739   switch (fn_type)
4740     {
4741     case LCT_NORMAL:
4742       break;
4743     case LCT_CONST:
4744       flags |= ECF_CONST;
4745       break;
4746     case LCT_PURE:
4747       flags |= ECF_PURE;
4748       break;
4749     case LCT_NORETURN:
4750       flags |= ECF_NORETURN;
4751       break;
4752     case LCT_THROW:
4753       flags &= ~ECF_NOTHROW;
4754       break;
4755     case LCT_RETURNS_TWICE:
4756       flags = ECF_RETURNS_TWICE;
4757       break;
4758     }
4759   fun = orgfun;
4760 
4761   /* Ensure current function's preferred stack boundary is at least
4762      what we need.  */
4763   if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
4764     crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
4765 
4766   /* If this kind of value comes back in memory,
4767      decide where in memory it should come back.  */
4768   if (outmode != VOIDmode)
4769     {
4770       tfom = lang_hooks.types.type_for_mode (outmode, 0);
4771       if (aggregate_value_p (tfom, 0))
4772 	{
4773 #ifdef PCC_STATIC_STRUCT_RETURN
4774 	  rtx pointer_reg
4775 	    = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
4776 	  mem_value = gen_rtx_MEM (outmode, pointer_reg);
4777 	  pcc_struct_value = 1;
4778 	  if (value == 0)
4779 	    value = gen_reg_rtx (outmode);
4780 #else /* not PCC_STATIC_STRUCT_RETURN */
4781 	  struct_value_size = GET_MODE_SIZE (outmode);
4782 	  if (value != 0 && MEM_P (value))
4783 	    mem_value = value;
4784 	  else
4785 	    mem_value = assign_temp (tfom, 1, 1);
4786 #endif
4787 	  /* This call returns a big structure.  */
4788 	  flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
4789 	}
4790     }
4791   else
4792     tfom = void_type_node;
4793 
4794   /* ??? Unfinished: must pass the memory address as an argument.  */
4795 
4796   /* Copy all the libcall-arguments out of the varargs data
4797      and into a vector ARGVEC.
4798 
4799      Compute how to pass each argument.  We only support a very small subset
4800      of the full argument passing conventions to limit complexity here since
4801      library functions shouldn't have many args.  */
4802 
4803   argvec = XALLOCAVEC (struct arg, nargs + 1);
4804   memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
4805 
4806 #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
4807   INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
4808 #else
4809   INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
4810 #endif
4811   args_so_far = pack_cumulative_args (&args_so_far_v);
4812 
4813   args_size.constant = 0;
4814   args_size.var = 0;
4815 
4816   count = 0;
4817 
4818   push_temp_slots ();
4819 
4820   /* If there's a structure value address to be passed,
4821      either pass it in the special place, or pass it as an extra argument.  */
4822   if (mem_value && struct_value == 0 && ! pcc_struct_value)
4823     {
4824       rtx addr = XEXP (mem_value, 0);
4825 
4826       nargs++;
4827 
4828       /* Make sure it is a reasonable operand for a move or push insn.  */
4829       if (!REG_P (addr) && !MEM_P (addr)
4830 	  && !(CONSTANT_P (addr)
4831 	       && targetm.legitimate_constant_p (Pmode, addr)))
4832 	addr = force_operand (addr, NULL_RTX);
4833 
4834       argvec[count].value = addr;
4835       argvec[count].mode = Pmode;
4836       argvec[count].partial = 0;
4837 
4838       argvec[count].reg = targetm.calls.function_arg (args_so_far,
4839 						      Pmode, NULL_TREE, true);
4840       gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, Pmode,
4841 						   NULL_TREE, 1) == 0);
4842 
4843       locate_and_pad_parm (Pmode, NULL_TREE,
4844 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4845 			   1,
4846 #else
4847 			   argvec[count].reg != 0,
4848 #endif
4849 			   reg_parm_stack_space, 0,
4850 			   NULL_TREE, &args_size, &argvec[count].locate);
4851 
4852       if (argvec[count].reg == 0 || argvec[count].partial != 0
4853 	  || reg_parm_stack_space > 0)
4854 	args_size.constant += argvec[count].locate.size.constant;
4855 
4856       targetm.calls.function_arg_advance (args_so_far, Pmode, (tree) 0, true);
4857 
4858       count++;
4859     }
4860 
4861   for (unsigned int i = 0; count < nargs; i++, count++)
4862     {
4863       rtx val = args[i].first;
4864       machine_mode mode = args[i].second;
4865       int unsigned_p = 0;
4866 
4867       /* We cannot convert the arg value to the mode the library wants here;
4868 	 must do it earlier where we know the signedness of the arg.  */
4869       gcc_assert (mode != BLKmode
4870 		  && (GET_MODE (val) == mode || GET_MODE (val) == VOIDmode));
4871 
4872       /* Make sure it is a reasonable operand for a move or push insn.  */
4873       if (!REG_P (val) && !MEM_P (val)
4874 	  && !(CONSTANT_P (val) && targetm.legitimate_constant_p (mode, val)))
4875 	val = force_operand (val, NULL_RTX);
4876 
4877       if (pass_by_reference (&args_so_far_v, mode, NULL_TREE, 1))
4878 	{
4879 	  rtx slot;
4880 	  int must_copy
4881 	    = !reference_callee_copied (&args_so_far_v, mode, NULL_TREE, 1);
4882 
4883 	  /* If this was a CONST function, it is now PURE since it now
4884 	     reads memory.  */
4885 	  if (flags & ECF_CONST)
4886 	    {
4887 	      flags &= ~ECF_CONST;
4888 	      flags |= ECF_PURE;
4889 	    }
4890 
4891 	  if (MEM_P (val) && !must_copy)
4892 	    {
4893 	      tree val_expr = MEM_EXPR (val);
4894 	      if (val_expr)
4895 		mark_addressable (val_expr);
4896 	      slot = val;
4897 	    }
4898 	  else
4899 	    {
4900 	      slot = assign_temp (lang_hooks.types.type_for_mode (mode, 0),
4901 				  1, 1);
4902 	      emit_move_insn (slot, val);
4903 	    }
4904 
4905 	  call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4906 					   gen_rtx_USE (VOIDmode, slot),
4907 					   call_fusage);
4908 	  if (must_copy)
4909 	    call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
4910 					     gen_rtx_CLOBBER (VOIDmode,
4911 							      slot),
4912 					     call_fusage);
4913 
4914 	  mode = Pmode;
4915 	  val = force_operand (XEXP (slot, 0), NULL_RTX);
4916 	}
4917 
4918       mode = promote_function_mode (NULL_TREE, mode, &unsigned_p, NULL_TREE, 0);
4919       argvec[count].mode = mode;
4920       argvec[count].value = convert_modes (mode, GET_MODE (val), val, unsigned_p);
4921       argvec[count].reg = targetm.calls.function_arg (args_so_far, mode,
4922 						      NULL_TREE, true);
4923 
4924       argvec[count].partial
4925 	= targetm.calls.arg_partial_bytes (args_so_far, mode, NULL_TREE, 1);
4926 
4927       if (argvec[count].reg == 0
4928 	  || argvec[count].partial != 0
4929 	  || reg_parm_stack_space > 0)
4930 	{
4931 	  locate_and_pad_parm (mode, NULL_TREE,
4932 #ifdef STACK_PARMS_IN_REG_PARM_AREA
4933 			       1,
4934 #else
4935 			       argvec[count].reg != 0,
4936 #endif
4937 			       reg_parm_stack_space, argvec[count].partial,
4938 			       NULL_TREE, &args_size, &argvec[count].locate);
4939 	  args_size.constant += argvec[count].locate.size.constant;
4940 	  gcc_assert (!argvec[count].locate.size.var);
4941 	}
4942 #ifdef BLOCK_REG_PADDING
4943       else
4944 	/* The argument is passed entirely in registers.  See at which
4945 	   end it should be padded.  */
4946 	argvec[count].locate.where_pad =
4947 	  BLOCK_REG_PADDING (mode, NULL_TREE,
4948 			     known_le (GET_MODE_SIZE (mode), UNITS_PER_WORD));
4949 #endif
4950 
4951       targetm.calls.function_arg_advance (args_so_far, mode, (tree) 0, true);
4952     }
4953 
4954   /* If this machine requires an external definition for library
4955      functions, write one out.  */
4956   assemble_external_libcall (fun);
4957 
4958   original_args_size = args_size;
4959   args_size.constant = (aligned_upper_bound (args_size.constant
4960 					     + stack_pointer_delta,
4961 					     STACK_BYTES)
4962 			- stack_pointer_delta);
4963 
4964   args_size.constant = upper_bound (args_size.constant,
4965 				    reg_parm_stack_space);
4966 
4967   if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
4968     args_size.constant -= reg_parm_stack_space;
4969 
4970   crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
4971 					  args_size.constant);
4972 
4973   if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
4974     {
4975       poly_int64 pushed = args_size.constant + pending_stack_adjust;
4976       current_function_pushed_stack_size
4977 	= upper_bound (current_function_pushed_stack_size, pushed);
4978     }
4979 
4980   if (ACCUMULATE_OUTGOING_ARGS)
4981     {
4982       /* Since the stack pointer will never be pushed, it is possible for
4983 	 the evaluation of a parm to clobber something we have already
4984 	 written to the stack.  Since most function calls on RISC machines
4985 	 do not use the stack, this is uncommon, but must work correctly.
4986 
4987 	 Therefore, we save any area of the stack that was already written
4988 	 and that we are using.  Here we set up to do this by making a new
4989 	 stack usage map from the old one.
4990 
4991 	 Another approach might be to try to reorder the argument
4992 	 evaluations to avoid this conflicting stack usage.  */
4993 
4994       needed = args_size.constant;
4995 
4996       /* Since we will be writing into the entire argument area, the
4997 	 map must be allocated for its entire size, not just the part that
4998 	 is the responsibility of the caller.  */
4999       if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
5000 	needed += reg_parm_stack_space;
5001 
5002       poly_int64 limit = needed;
5003       if (ARGS_GROW_DOWNWARD)
5004 	limit += 1;
5005 
5006       /* For polynomial sizes, this is the maximum possible size needed
5007 	 for arguments with a constant size and offset.  */
5008       HOST_WIDE_INT const_limit = constant_lower_bound (limit);
5009       highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
5010 					 const_limit);
5011 
5012       stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
5013       stack_usage_map = stack_usage_map_buf;
5014 
5015       if (initial_highest_arg_in_use)
5016 	memcpy (stack_usage_map, initial_stack_usage_map,
5017 		initial_highest_arg_in_use);
5018 
5019       if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
5020 	memset (&stack_usage_map[initial_highest_arg_in_use], 0,
5021 	       highest_outgoing_arg_in_use - initial_highest_arg_in_use);
5022       needed = 0;
5023 
5024       /* We must be careful to use virtual regs before they're instantiated,
5025 	 and real regs afterwards.  Loop optimization, for example, can create
5026 	 new libcalls after we've instantiated the virtual regs, and if we
5027 	 use virtuals anyway, they won't match the rtl patterns.  */
5028 
5029       if (virtuals_instantiated)
5030 	argblock = plus_constant (Pmode, stack_pointer_rtx,
5031 				  STACK_POINTER_OFFSET);
5032       else
5033 	argblock = virtual_outgoing_args_rtx;
5034     }
5035   else
5036     {
5037       if (!PUSH_ARGS)
5038 	argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
5039     }
5040 
5041   /* We push args individually in reverse order, perform stack alignment
5042      before the first push (the last arg).  */
5043   if (argblock == 0)
5044     anti_adjust_stack (gen_int_mode (args_size.constant
5045 				     - original_args_size.constant,
5046 				     Pmode));
5047 
5048   argnum = nargs - 1;
5049 
5050 #ifdef REG_PARM_STACK_SPACE
5051   if (ACCUMULATE_OUTGOING_ARGS)
5052     {
5053       /* The argument list is the property of the called routine and it
5054 	 may clobber it.  If the fixed area has been used for previous
5055 	 parameters, we must save and restore it.  */
5056       save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
5057 					    &low_to_save, &high_to_save);
5058     }
5059 #endif
5060 
5061   /* When expanding a normal call, args are stored in push order,
5062      which is the reverse of what we have here.  */
5063   bool any_regs = false;
5064   for (int i = nargs; i-- > 0; )
5065     if (argvec[i].reg != NULL_RTX)
5066       {
5067 	targetm.calls.call_args (argvec[i].reg, NULL_TREE);
5068 	any_regs = true;
5069       }
5070   if (!any_regs)
5071     targetm.calls.call_args (pc_rtx, NULL_TREE);
5072 
5073   /* Push the args that need to be pushed.  */
5074 
5075   have_push_fusage = false;
5076 
5077   /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5078      are to be pushed.  */
5079   for (count = 0; count < nargs; count++, argnum--)
5080     {
5081       machine_mode mode = argvec[argnum].mode;
5082       rtx val = argvec[argnum].value;
5083       rtx reg = argvec[argnum].reg;
5084       int partial = argvec[argnum].partial;
5085       unsigned int parm_align = argvec[argnum].locate.boundary;
5086       poly_int64 lower_bound = 0, upper_bound = 0;
5087 
5088       if (! (reg != 0 && partial == 0))
5089 	{
5090 	  rtx use;
5091 
5092 	  if (ACCUMULATE_OUTGOING_ARGS)
5093 	    {
5094 	      /* If this is being stored into a pre-allocated, fixed-size,
5095 		 stack area, save any previous data at that location.  */
5096 
5097 	      if (ARGS_GROW_DOWNWARD)
5098 		{
5099 		  /* stack_slot is negative, but we want to index stack_usage_map
5100 		     with positive values.  */
5101 		  upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
5102 		  lower_bound = upper_bound - argvec[argnum].locate.size.constant;
5103 		}
5104 	      else
5105 		{
5106 		  lower_bound = argvec[argnum].locate.slot_offset.constant;
5107 		  upper_bound = lower_bound + argvec[argnum].locate.size.constant;
5108 		}
5109 
5110 	      if (stack_region_maybe_used_p (lower_bound, upper_bound,
5111 					     reg_parm_stack_space))
5112 		{
5113 		  /* We need to make a save area.  */
5114 		  poly_uint64 size
5115 		    = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
5116 		  machine_mode save_mode
5117 		    = int_mode_for_size (size, 1).else_blk ();
5118 		  rtx adr
5119 		    = plus_constant (Pmode, argblock,
5120 				     argvec[argnum].locate.offset.constant);
5121 		  rtx stack_area
5122 		    = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
5123 
5124 		  if (save_mode == BLKmode)
5125 		    {
5126 		      argvec[argnum].save_area
5127 			= assign_stack_temp (BLKmode,
5128 					     argvec[argnum].locate.size.constant
5129 					     );
5130 
5131 		      emit_block_move (validize_mem
5132 				         (copy_rtx (argvec[argnum].save_area)),
5133 				       stack_area,
5134 				       (gen_int_mode
5135 					(argvec[argnum].locate.size.constant,
5136 					 Pmode)),
5137 				       BLOCK_OP_CALL_PARM);
5138 		    }
5139 		  else
5140 		    {
5141 		      argvec[argnum].save_area = gen_reg_rtx (save_mode);
5142 
5143 		      emit_move_insn (argvec[argnum].save_area, stack_area);
5144 		    }
5145 		}
5146 	    }
5147 
5148 	  emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
5149 			  partial, reg, 0, argblock,
5150 			  (gen_int_mode
5151 			   (argvec[argnum].locate.offset.constant, Pmode)),
5152 			  reg_parm_stack_space,
5153 			  ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
5154 
5155 	  /* Now mark the segment we just used.  */
5156 	  if (ACCUMULATE_OUTGOING_ARGS)
5157 	    mark_stack_region_used (lower_bound, upper_bound);
5158 
5159 	  NO_DEFER_POP;
5160 
5161 	  /* Indicate argument access so that alias.c knows that these
5162 	     values are live.  */
5163 	  if (argblock)
5164 	    use = plus_constant (Pmode, argblock,
5165 				 argvec[argnum].locate.offset.constant);
5166 	  else if (have_push_fusage)
5167 	    continue;
5168 	  else
5169 	    {
5170 	      /* When arguments are pushed, trying to tell alias.c where
5171 		 exactly this argument is won't work, because the
5172 		 auto-increment causes confusion.  So we merely indicate
5173 		 that we access something with a known mode somewhere on
5174 		 the stack.  */
5175 	      use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
5176 				  gen_rtx_SCRATCH (Pmode));
5177 	      have_push_fusage = true;
5178 	    }
5179 	  use = gen_rtx_MEM (argvec[argnum].mode, use);
5180 	  use = gen_rtx_USE (VOIDmode, use);
5181 	  call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
5182 	}
5183     }
5184 
5185   argnum = nargs - 1;
5186 
5187   fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
5188 
5189   /* Now load any reg parms into their regs.  */
5190 
5191   /* ARGNUM indexes the ARGVEC array in the order in which the arguments
5192      are to be pushed.  */
5193   for (count = 0; count < nargs; count++, argnum--)
5194     {
5195       machine_mode mode = argvec[argnum].mode;
5196       rtx val = argvec[argnum].value;
5197       rtx reg = argvec[argnum].reg;
5198       int partial = argvec[argnum].partial;
5199 
5200       /* Handle calls that pass values in multiple non-contiguous
5201 	 locations.  The PA64 has examples of this for library calls.  */
5202       if (reg != 0 && GET_CODE (reg) == PARALLEL)
5203 	emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
5204       else if (reg != 0 && partial == 0)
5205         {
5206 	  emit_move_insn (reg, val);
5207 #ifdef BLOCK_REG_PADDING
5208 	  poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
5209 
5210 	  /* Copied from load_register_parameters.  */
5211 
5212 	  /* Handle case where we have a value that needs shifting
5213 	     up to the msb.  eg. a QImode value and we're padding
5214 	     upward on a BYTES_BIG_ENDIAN machine.  */
5215 	  if (known_lt (size, UNITS_PER_WORD)
5216 	      && (argvec[argnum].locate.where_pad
5217 		  == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
5218 	    {
5219 	      rtx x;
5220 	      poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
5221 
5222 	      /* Assigning REG here rather than a temp makes CALL_FUSAGE
5223 		 report the whole reg as used.  Strictly speaking, the
5224 		 call only uses SIZE bytes at the msb end, but it doesn't
5225 		 seem worth generating rtl to say that.  */
5226 	      reg = gen_rtx_REG (word_mode, REGNO (reg));
5227 	      x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
5228 	      if (x != reg)
5229 		emit_move_insn (reg, x);
5230 	    }
5231 #endif
5232 	}
5233 
5234       NO_DEFER_POP;
5235     }
5236 
5237   /* Any regs containing parms remain in use through the call.  */
5238   for (count = 0; count < nargs; count++)
5239     {
5240       rtx reg = argvec[count].reg;
5241       if (reg != 0 && GET_CODE (reg) == PARALLEL)
5242 	use_group_regs (&call_fusage, reg);
5243       else if (reg != 0)
5244         {
5245 	  int partial = argvec[count].partial;
5246 	  if (partial)
5247 	    {
5248 	      int nregs;
5249               gcc_assert (partial % UNITS_PER_WORD == 0);
5250 	      nregs = partial / UNITS_PER_WORD;
5251 	      use_regs (&call_fusage, REGNO (reg), nregs);
5252 	    }
5253 	  else
5254 	    use_reg (&call_fusage, reg);
5255 	}
5256     }
5257 
5258   /* Pass the function the address in which to return a structure value.  */
5259   if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
5260     {
5261       emit_move_insn (struct_value,
5262 		      force_reg (Pmode,
5263 				 force_operand (XEXP (mem_value, 0),
5264 						NULL_RTX)));
5265       if (REG_P (struct_value))
5266 	use_reg (&call_fusage, struct_value);
5267     }
5268 
5269   /* Don't allow popping to be deferred, since then
5270      cse'ing of library calls could delete a call and leave the pop.  */
5271   NO_DEFER_POP;
5272   valreg = (mem_value == 0 && outmode != VOIDmode
5273 	    ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
5274 
5275   /* Stack must be properly aligned now.  */
5276   gcc_assert (multiple_p (stack_pointer_delta,
5277 			  PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
5278 
5279   before_call = get_last_insn ();
5280 
5281   /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
5282      will set inhibit_defer_pop to that value.  */
5283   /* The return type is needed to decide how many bytes the function pops.
5284      Signedness plays no role in that, so for simplicity, we pretend it's
5285      always signed.  We also assume that the list of arguments passed has
5286      no impact, so we pretend it is unknown.  */
5287 
5288   emit_call_1 (fun, NULL,
5289 	       get_identifier (XSTR (orgfun, 0)),
5290 	       build_function_type (tfom, NULL_TREE),
5291 	       original_args_size.constant, args_size.constant,
5292 	       struct_value_size,
5293 	       targetm.calls.function_arg (args_so_far,
5294 					   VOIDmode, void_type_node, true),
5295 	       valreg,
5296 	       old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
5297 
5298   if (flag_ipa_ra)
5299     {
5300       rtx datum = orgfun;
5301       gcc_assert (GET_CODE (datum) == SYMBOL_REF);
5302       rtx_call_insn *last = last_call_insn ();
5303       add_reg_note (last, REG_CALL_DECL, datum);
5304     }
5305 
5306   /* Right-shift returned value if necessary.  */
5307   if (!pcc_struct_value
5308       && TYPE_MODE (tfom) != BLKmode
5309       && targetm.calls.return_in_msb (tfom))
5310     {
5311       shift_return_value (TYPE_MODE (tfom), false, valreg);
5312       valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
5313     }
5314 
5315   targetm.calls.end_call_args ();
5316 
5317   /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
5318      that it should complain if nonvolatile values are live.  For
5319      functions that cannot return, inform flow that control does not
5320      fall through.  */
5321   if (flags & ECF_NORETURN)
5322     {
5323       /* The barrier note must be emitted
5324 	 immediately after the CALL_INSN.  Some ports emit more than
5325 	 just a CALL_INSN above, so we must search for it here.  */
5326       rtx_insn *last = get_last_insn ();
5327       while (!CALL_P (last))
5328 	{
5329 	  last = PREV_INSN (last);
5330 	  /* There was no CALL_INSN?  */
5331 	  gcc_assert (last != before_call);
5332 	}
5333 
5334       emit_barrier_after (last);
5335     }
5336 
5337   /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
5338      and LCT_RETURNS_TWICE, cannot perform non-local gotos.  */
5339   if (flags & ECF_NOTHROW)
5340     {
5341       rtx_insn *last = get_last_insn ();
5342       while (!CALL_P (last))
5343 	{
5344 	  last = PREV_INSN (last);
5345 	  /* There was no CALL_INSN?  */
5346 	  gcc_assert (last != before_call);
5347 	}
5348 
5349       make_reg_eh_region_note_nothrow_nononlocal (last);
5350     }
5351 
5352   /* Now restore inhibit_defer_pop to its actual original value.  */
5353   OK_DEFER_POP;
5354 
5355   pop_temp_slots ();
5356 
5357   /* Copy the value to the right place.  */
5358   if (outmode != VOIDmode && retval)
5359     {
5360       if (mem_value)
5361 	{
5362 	  if (value == 0)
5363 	    value = mem_value;
5364 	  if (value != mem_value)
5365 	    emit_move_insn (value, mem_value);
5366 	}
5367       else if (GET_CODE (valreg) == PARALLEL)
5368 	{
5369 	  if (value == 0)
5370 	    value = gen_reg_rtx (outmode);
5371 	  emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
5372 	}
5373       else
5374 	{
5375 	  /* Convert to the proper mode if a promotion has been active.  */
5376 	  if (GET_MODE (valreg) != outmode)
5377 	    {
5378 	      int unsignedp = TYPE_UNSIGNED (tfom);
5379 
5380 	      gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
5381 						 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
5382 			  == GET_MODE (valreg));
5383 	      valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
5384 	    }
5385 
5386 	  if (value != 0)
5387 	    emit_move_insn (value, valreg);
5388 	  else
5389 	    value = valreg;
5390 	}
5391     }
5392 
5393   if (ACCUMULATE_OUTGOING_ARGS)
5394     {
5395 #ifdef REG_PARM_STACK_SPACE
5396       if (save_area)
5397 	restore_fixed_argument_area (save_area, argblock,
5398 				     high_to_save, low_to_save);
5399 #endif
5400 
5401       /* If we saved any argument areas, restore them.  */
5402       for (count = 0; count < nargs; count++)
5403 	if (argvec[count].save_area)
5404 	  {
5405 	    machine_mode save_mode = GET_MODE (argvec[count].save_area);
5406 	    rtx adr = plus_constant (Pmode, argblock,
5407 				     argvec[count].locate.offset.constant);
5408 	    rtx stack_area = gen_rtx_MEM (save_mode,
5409 					  memory_address (save_mode, adr));
5410 
5411 	    if (save_mode == BLKmode)
5412 	      emit_block_move (stack_area,
5413 			       validize_mem
5414 			         (copy_rtx (argvec[count].save_area)),
5415 			       (gen_int_mode
5416 				(argvec[count].locate.size.constant, Pmode)),
5417 			       BLOCK_OP_CALL_PARM);
5418 	    else
5419 	      emit_move_insn (stack_area, argvec[count].save_area);
5420 	  }
5421 
5422       highest_outgoing_arg_in_use = initial_highest_arg_in_use;
5423       stack_usage_map = initial_stack_usage_map;
5424       stack_usage_watermark = initial_stack_usage_watermark;
5425     }
5426 
5427   free (stack_usage_map_buf);
5428 
5429   return value;
5430 
5431 }
5432 
5433 
5434 /* Store pointer bounds argument ARG  into Bounds Table entry
5435    associated with PARM.  */
5436 static void
5437 store_bounds (struct arg_data *arg, struct arg_data *parm)
5438 {
5439   rtx slot = NULL, ptr = NULL, addr = NULL;
5440 
5441   /* We may pass bounds not associated with any pointer.  */
5442   if (!parm)
5443     {
5444       gcc_assert (arg->special_slot);
5445       slot = arg->special_slot;
5446       ptr = const0_rtx;
5447     }
5448   /* Find pointer associated with bounds and where it is
5449      passed.  */
5450   else
5451     {
5452       if (!parm->reg)
5453 	{
5454 	  gcc_assert (!arg->special_slot);
5455 
5456 	  addr = adjust_address (parm->stack, Pmode, arg->pointer_offset);
5457 	}
5458       else if (REG_P (parm->reg))
5459 	{
5460 	  gcc_assert (arg->special_slot);
5461 	  slot = arg->special_slot;
5462 
5463 	  if (MEM_P (parm->value))
5464 	    addr = adjust_address (parm->value, Pmode, arg->pointer_offset);
5465 	  else if (REG_P (parm->value))
5466 	    ptr = gen_rtx_SUBREG (Pmode, parm->value, arg->pointer_offset);
5467 	  else
5468 	    {
5469 	      gcc_assert (!arg->pointer_offset);
5470 	      ptr = parm->value;
5471 	    }
5472 	}
5473       else
5474 	{
5475 	  gcc_assert (GET_CODE (parm->reg) == PARALLEL);
5476 
5477 	  gcc_assert (arg->special_slot);
5478 	  slot = arg->special_slot;
5479 
5480 	  if (parm->parallel_value)
5481 	    ptr = chkp_get_value_with_offs (parm->parallel_value,
5482 					    GEN_INT (arg->pointer_offset));
5483 	  else
5484 	    gcc_unreachable ();
5485 	}
5486     }
5487 
5488   /* Expand bounds.  */
5489   if (!arg->value)
5490     arg->value = expand_normal (arg->tree_value);
5491 
5492   targetm.calls.store_bounds_for_arg (ptr, addr, arg->value, slot);
5493 }
5494 
5495 /* Store a single argument for a function call
5496    into the register or memory area where it must be passed.
5497    *ARG describes the argument value and where to pass it.
5498 
5499    ARGBLOCK is the address of the stack-block for all the arguments,
5500    or 0 on a machine where arguments are pushed individually.
5501 
5502    MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
5503    so must be careful about how the stack is used.
5504 
5505    VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
5506    argument stack.  This is used if ACCUMULATE_OUTGOING_ARGS to indicate
5507    that we need not worry about saving and restoring the stack.
5508 
5509    FNDECL is the declaration of the function we are calling.
5510 
5511    Return nonzero if this arg should cause sibcall failure,
5512    zero otherwise.  */
5513 
5514 static int
5515 store_one_arg (struct arg_data *arg, rtx argblock, int flags,
5516 	       int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
5517 {
5518   tree pval = arg->tree_value;
5519   rtx reg = 0;
5520   int partial = 0;
5521   poly_int64 used = 0;
5522   poly_int64 lower_bound = 0, upper_bound = 0;
5523   int sibcall_failure = 0;
5524 
5525   if (TREE_CODE (pval) == ERROR_MARK)
5526     return 1;
5527 
5528   /* Push a new temporary level for any temporaries we make for
5529      this argument.  */
5530   push_temp_slots ();
5531 
5532   if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
5533     {
5534       /* If this is being stored into a pre-allocated, fixed-size, stack area,
5535 	 save any previous data at that location.  */
5536       if (argblock && ! variable_size && arg->stack)
5537 	{
5538 	  if (ARGS_GROW_DOWNWARD)
5539 	    {
5540 	      /* stack_slot is negative, but we want to index stack_usage_map
5541 		 with positive values.  */
5542 	      if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5543 		{
5544 		  rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5545 		  upper_bound = -rtx_to_poly_int64 (offset) + 1;
5546 		}
5547 	      else
5548 		upper_bound = 0;
5549 
5550 	      lower_bound = upper_bound - arg->locate.size.constant;
5551 	    }
5552 	  else
5553 	    {
5554 	      if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5555 		{
5556 		  rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5557 		  lower_bound = rtx_to_poly_int64 (offset);
5558 		}
5559 	      else
5560 		lower_bound = 0;
5561 
5562 	      upper_bound = lower_bound + arg->locate.size.constant;
5563 	    }
5564 
5565 	  if (stack_region_maybe_used_p (lower_bound, upper_bound,
5566 					 reg_parm_stack_space))
5567 	    {
5568 	      /* We need to make a save area.  */
5569 	      poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
5570 	      machine_mode save_mode
5571 		= int_mode_for_size (size, 1).else_blk ();
5572 	      rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
5573 	      rtx stack_area = gen_rtx_MEM (save_mode, adr);
5574 
5575 	      if (save_mode == BLKmode)
5576 		{
5577 		  arg->save_area
5578 		    = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
5579 		  preserve_temp_slots (arg->save_area);
5580 		  emit_block_move (validize_mem (copy_rtx (arg->save_area)),
5581 				   stack_area,
5582 				   (gen_int_mode
5583 				    (arg->locate.size.constant, Pmode)),
5584 				   BLOCK_OP_CALL_PARM);
5585 		}
5586 	      else
5587 		{
5588 		  arg->save_area = gen_reg_rtx (save_mode);
5589 		  emit_move_insn (arg->save_area, stack_area);
5590 		}
5591 	    }
5592 	}
5593     }
5594 
5595   /* If this isn't going to be placed on both the stack and in registers,
5596      set up the register and number of words.  */
5597   if (! arg->pass_on_stack)
5598     {
5599       if (flags & ECF_SIBCALL)
5600 	reg = arg->tail_call_reg;
5601       else
5602 	reg = arg->reg;
5603       partial = arg->partial;
5604     }
5605 
5606   /* Being passed entirely in a register.  We shouldn't be called in
5607      this case.  */
5608   gcc_assert (reg == 0 || partial != 0);
5609 
5610   /* If this arg needs special alignment, don't load the registers
5611      here.  */
5612   if (arg->n_aligned_regs != 0)
5613     reg = 0;
5614 
5615   /* If this is being passed partially in a register, we can't evaluate
5616      it directly into its stack slot.  Otherwise, we can.  */
5617   if (arg->value == 0)
5618     {
5619       /* stack_arg_under_construction is nonzero if a function argument is
5620 	 being evaluated directly into the outgoing argument list and
5621 	 expand_call must take special action to preserve the argument list
5622 	 if it is called recursively.
5623 
5624 	 For scalar function arguments stack_usage_map is sufficient to
5625 	 determine which stack slots must be saved and restored.  Scalar
5626 	 arguments in general have pass_on_stack == 0.
5627 
5628 	 If this argument is initialized by a function which takes the
5629 	 address of the argument (a C++ constructor or a C function
5630 	 returning a BLKmode structure), then stack_usage_map is
5631 	 insufficient and expand_call must push the stack around the
5632 	 function call.  Such arguments have pass_on_stack == 1.
5633 
5634 	 Note that it is always safe to set stack_arg_under_construction,
5635 	 but this generates suboptimal code if set when not needed.  */
5636 
5637       if (arg->pass_on_stack)
5638 	stack_arg_under_construction++;
5639 
5640       arg->value = expand_expr (pval,
5641 				(partial
5642 				 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
5643 				? NULL_RTX : arg->stack,
5644 				VOIDmode, EXPAND_STACK_PARM);
5645 
5646       /* If we are promoting object (or for any other reason) the mode
5647 	 doesn't agree, convert the mode.  */
5648 
5649       if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
5650 	arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
5651 				    arg->value, arg->unsignedp);
5652 
5653       if (arg->pass_on_stack)
5654 	stack_arg_under_construction--;
5655     }
5656 
5657   /* Check for overlap with already clobbered argument area.  */
5658   if ((flags & ECF_SIBCALL)
5659       && MEM_P (arg->value)
5660       && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
5661 						    arg->locate.size.constant))
5662     sibcall_failure = 1;
5663 
5664   /* Don't allow anything left on stack from computation
5665      of argument to alloca.  */
5666   if (flags & ECF_MAY_BE_ALLOCA)
5667     do_pending_stack_adjust ();
5668 
5669   if (arg->value == arg->stack)
5670     /* If the value is already in the stack slot, we are done.  */
5671     ;
5672   else if (arg->mode != BLKmode)
5673     {
5674       unsigned int parm_align;
5675 
5676       /* Argument is a scalar, not entirely passed in registers.
5677 	 (If part is passed in registers, arg->partial says how much
5678 	 and emit_push_insn will take care of putting it there.)
5679 
5680 	 Push it, and if its size is less than the
5681 	 amount of space allocated to it,
5682 	 also bump stack pointer by the additional space.
5683 	 Note that in C the default argument promotions
5684 	 will prevent such mismatches.  */
5685 
5686       poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
5687 			 ? 0 : GET_MODE_SIZE (arg->mode));
5688 
5689       /* Compute how much space the push instruction will push.
5690 	 On many machines, pushing a byte will advance the stack
5691 	 pointer by a halfword.  */
5692 #ifdef PUSH_ROUNDING
5693       size = PUSH_ROUNDING (size);
5694 #endif
5695       used = size;
5696 
5697       /* Compute how much space the argument should get:
5698 	 round up to a multiple of the alignment for arguments.  */
5699       if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5700 	  != PAD_NONE)
5701 	/* At the moment we don't (need to) support ABIs for which the
5702 	   padding isn't known at compile time.  In principle it should
5703 	   be easy to add though.  */
5704 	used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
5705 
5706       /* Compute the alignment of the pushed argument.  */
5707       parm_align = arg->locate.boundary;
5708       if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5709 	  == PAD_DOWNWARD)
5710 	{
5711 	  poly_int64 pad = used - size;
5712 	  unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
5713 	  if (pad_align != 0)
5714 	    parm_align = MIN (parm_align, pad_align);
5715 	}
5716 
5717       /* This isn't already where we want it on the stack, so put it there.
5718 	 This can either be done with push or copy insns.  */
5719       if (maybe_ne (used, 0)
5720 	  && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
5721 			      NULL_RTX, parm_align, partial, reg, used - size,
5722 			      argblock, ARGS_SIZE_RTX (arg->locate.offset),
5723 			      reg_parm_stack_space,
5724 			      ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
5725 	sibcall_failure = 1;
5726 
5727       /* Unless this is a partially-in-register argument, the argument is now
5728 	 in the stack.  */
5729       if (partial == 0)
5730 	arg->value = arg->stack;
5731     }
5732   else
5733     {
5734       /* BLKmode, at least partly to be pushed.  */
5735 
5736       unsigned int parm_align;
5737       poly_int64 excess;
5738       rtx size_rtx;
5739 
5740       /* Pushing a nonscalar.
5741 	 If part is passed in registers, PARTIAL says how much
5742 	 and emit_push_insn will take care of putting it there.  */
5743 
5744       /* Round its size up to a multiple
5745 	 of the allocation unit for arguments.  */
5746 
5747       if (arg->locate.size.var != 0)
5748 	{
5749 	  excess = 0;
5750 	  size_rtx = ARGS_SIZE_RTX (arg->locate.size);
5751 	}
5752       else
5753 	{
5754 	  /* PUSH_ROUNDING has no effect on us, because emit_push_insn
5755 	     for BLKmode is careful to avoid it.  */
5756 	  excess = (arg->locate.size.constant
5757 		    - arg_int_size_in_bytes (TREE_TYPE (pval))
5758 		    + partial);
5759 	  size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
5760 				  NULL_RTX, TYPE_MODE (sizetype),
5761 				  EXPAND_NORMAL);
5762 	}
5763 
5764       parm_align = arg->locate.boundary;
5765 
5766       /* When an argument is padded down, the block is aligned to
5767 	 PARM_BOUNDARY, but the actual argument isn't.  */
5768       if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
5769 	  == PAD_DOWNWARD)
5770 	{
5771 	  if (arg->locate.size.var)
5772 	    parm_align = BITS_PER_UNIT;
5773 	  else
5774 	    {
5775 	      unsigned int excess_align
5776 		= known_alignment (excess) * BITS_PER_UNIT;
5777 	      if (excess_align != 0)
5778 		parm_align = MIN (parm_align, excess_align);
5779 	    }
5780 	}
5781 
5782       if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
5783 	{
5784 	  /* emit_push_insn might not work properly if arg->value and
5785 	     argblock + arg->locate.offset areas overlap.  */
5786 	  rtx x = arg->value;
5787 	  poly_int64 i = 0;
5788 
5789 	  if (XEXP (x, 0) == crtl->args.internal_arg_pointer
5790 	      || (GET_CODE (XEXP (x, 0)) == PLUS
5791 		  && XEXP (XEXP (x, 0), 0) ==
5792 		     crtl->args.internal_arg_pointer
5793 		  && CONST_INT_P (XEXP (XEXP (x, 0), 1))))
5794 	    {
5795 	      if (XEXP (x, 0) != crtl->args.internal_arg_pointer)
5796 		i = rtx_to_poly_int64 (XEXP (XEXP (x, 0), 1));
5797 
5798 	      /* arg.locate doesn't contain the pretend_args_size offset,
5799 		 it's part of argblock.  Ensure we don't count it in I.  */
5800 	      if (STACK_GROWS_DOWNWARD)
5801 		i -= crtl->args.pretend_args_size;
5802 	      else
5803 		i += crtl->args.pretend_args_size;
5804 
5805 	      /* expand_call should ensure this.  */
5806 	      gcc_assert (!arg->locate.offset.var
5807 			  && arg->locate.size.var == 0);
5808 	      poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
5809 
5810 	      if (known_eq (arg->locate.offset.constant, i))
5811 		{
5812 		  /* Even though they appear to be at the same location,
5813 		     if part of the outgoing argument is in registers,
5814 		     they aren't really at the same location.  Check for
5815 		     this by making sure that the incoming size is the
5816 		     same as the outgoing size.  */
5817 		  if (maybe_ne (arg->locate.size.constant, size_val))
5818 		    sibcall_failure = 1;
5819 		}
5820 	      else if (maybe_in_range_p (arg->locate.offset.constant,
5821 					 i, size_val))
5822 		sibcall_failure = 1;
5823 	      /* Use arg->locate.size.constant instead of size_rtx
5824 		 because we only care about the part of the argument
5825 		 on the stack.  */
5826 	      else if (maybe_in_range_p (i, arg->locate.offset.constant,
5827 					 arg->locate.size.constant))
5828 		sibcall_failure = 1;
5829 	    }
5830 	}
5831 
5832       if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
5833 	emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
5834 			parm_align, partial, reg, excess, argblock,
5835 			ARGS_SIZE_RTX (arg->locate.offset),
5836 			reg_parm_stack_space,
5837 			ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
5838 
5839       /* Unless this is a partially-in-register argument, the argument is now
5840 	 in the stack.
5841 
5842 	 ??? Unlike the case above, in which we want the actual
5843 	 address of the data, so that we can load it directly into a
5844 	 register, here we want the address of the stack slot, so that
5845 	 it's properly aligned for word-by-word copying or something
5846 	 like that.  It's not clear that this is always correct.  */
5847       if (partial == 0)
5848 	arg->value = arg->stack_slot;
5849     }
5850 
5851   if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
5852     {
5853       tree type = TREE_TYPE (arg->tree_value);
5854       arg->parallel_value
5855 	= emit_group_load_into_temps (arg->reg, arg->value, type,
5856 				      int_size_in_bytes (type));
5857     }
5858 
5859   /* Mark all slots this store used.  */
5860   if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
5861       && argblock && ! variable_size && arg->stack)
5862     mark_stack_region_used (lower_bound, upper_bound);
5863 
5864   /* Once we have pushed something, pops can't safely
5865      be deferred during the rest of the arguments.  */
5866   NO_DEFER_POP;
5867 
5868   /* Free any temporary slots made in processing this argument.  */
5869   pop_temp_slots ();
5870 
5871   return sibcall_failure;
5872 }
5873 
5874 /* Nonzero if we do not know how to pass TYPE solely in registers.  */
5875 
5876 bool
5877 must_pass_in_stack_var_size (machine_mode mode ATTRIBUTE_UNUSED,
5878 			     const_tree type)
5879 {
5880   if (!type)
5881     return false;
5882 
5883   /* If the type has variable size...  */
5884   if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5885     return true;
5886 
5887   /* If the type is marked as addressable (it is required
5888      to be constructed into the stack)...  */
5889   if (TREE_ADDRESSABLE (type))
5890     return true;
5891 
5892   return false;
5893 }
5894 
5895 /* Another version of the TARGET_MUST_PASS_IN_STACK hook.  This one
5896    takes trailing padding of a structure into account.  */
5897 /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING.  */
5898 
5899 bool
5900 must_pass_in_stack_var_size_or_pad (machine_mode mode, const_tree type)
5901 {
5902   if (!type)
5903     return false;
5904 
5905   /* If the type has variable size...  */
5906   if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5907     return true;
5908 
5909   /* If the type is marked as addressable (it is required
5910      to be constructed into the stack)...  */
5911   if (TREE_ADDRESSABLE (type))
5912     return true;
5913 
5914   if (TYPE_EMPTY_P (type))
5915     return false;
5916 
5917   /* If the padding and mode of the type is such that a copy into
5918      a register would put it into the wrong part of the register.  */
5919   if (mode == BLKmode
5920       && int_size_in_bytes (type) % (PARM_BOUNDARY / BITS_PER_UNIT)
5921       && (targetm.calls.function_arg_padding (mode, type)
5922 	  == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
5923     return true;
5924 
5925   return false;
5926 }
5927 
5928 /* Tell the garbage collector about GTY markers in this source file.  */
5929 #include "gt-calls.h"
5930