xref: /netbsd/external/gpl3/gcc.old/dist/gcc/calls.c (revision ec02198a)
110d565efSmrg /* Convert function calls to rtl insns, for GNU C compiler.
2*ec02198aSmrg    Copyright (C) 1989-2020 Free Software Foundation, Inc.
310d565efSmrg 
410d565efSmrg This file is part of GCC.
510d565efSmrg 
610d565efSmrg GCC is free software; you can redistribute it and/or modify it under
710d565efSmrg the terms of the GNU General Public License as published by the Free
810d565efSmrg Software Foundation; either version 3, or (at your option) any later
910d565efSmrg version.
1010d565efSmrg 
1110d565efSmrg GCC is distributed in the hope that it will be useful, but WITHOUT ANY
1210d565efSmrg WARRANTY; without even the implied warranty of MERCHANTABILITY or
1310d565efSmrg FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1410d565efSmrg for more details.
1510d565efSmrg 
1610d565efSmrg You should have received a copy of the GNU General Public License
1710d565efSmrg along with GCC; see the file COPYING3.  If not see
1810d565efSmrg <http://www.gnu.org/licenses/>.  */
1910d565efSmrg 
2010d565efSmrg #include "config.h"
2110d565efSmrg #include "system.h"
2210d565efSmrg #include "coretypes.h"
2310d565efSmrg #include "backend.h"
2410d565efSmrg #include "target.h"
2510d565efSmrg #include "rtl.h"
2610d565efSmrg #include "tree.h"
2710d565efSmrg #include "gimple.h"
2810d565efSmrg #include "predict.h"
2910d565efSmrg #include "memmodel.h"
3010d565efSmrg #include "tm_p.h"
3110d565efSmrg #include "stringpool.h"
3210d565efSmrg #include "expmed.h"
3310d565efSmrg #include "optabs.h"
3410d565efSmrg #include "emit-rtl.h"
3510d565efSmrg #include "cgraph.h"
3610d565efSmrg #include "diagnostic-core.h"
3710d565efSmrg #include "fold-const.h"
3810d565efSmrg #include "stor-layout.h"
3910d565efSmrg #include "varasm.h"
4010d565efSmrg #include "internal-fn.h"
4110d565efSmrg #include "dojump.h"
4210d565efSmrg #include "explow.h"
4310d565efSmrg #include "calls.h"
4410d565efSmrg #include "expr.h"
4510d565efSmrg #include "output.h"
4610d565efSmrg #include "langhooks.h"
4710d565efSmrg #include "except.h"
4810d565efSmrg #include "dbgcnt.h"
4910d565efSmrg #include "rtl-iter.h"
5010d565efSmrg #include "tree-vrp.h"
5110d565efSmrg #include "tree-ssanames.h"
52c7a68eb7Smrg #include "tree-ssa-strlen.h"
5310d565efSmrg #include "intl.h"
54c7a68eb7Smrg #include "stringpool.h"
55*ec02198aSmrg #include "hash-map.h"
56*ec02198aSmrg #include "hash-traits.h"
57c7a68eb7Smrg #include "attribs.h"
58c7a68eb7Smrg #include "builtins.h"
59c7a68eb7Smrg #include "gimple-fold.h"
6010d565efSmrg 
6110d565efSmrg /* Like PREFERRED_STACK_BOUNDARY but in units of bytes, not bits.  */
6210d565efSmrg #define STACK_BYTES (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)
6310d565efSmrg 
6410d565efSmrg /* Data structure and subroutines used within expand_call.  */
6510d565efSmrg 
6610d565efSmrg struct arg_data
6710d565efSmrg {
6810d565efSmrg   /* Tree node for this argument.  */
6910d565efSmrg   tree tree_value;
7010d565efSmrg   /* Mode for value; TYPE_MODE unless promoted.  */
7110d565efSmrg   machine_mode mode;
7210d565efSmrg   /* Current RTL value for argument, or 0 if it isn't precomputed.  */
7310d565efSmrg   rtx value;
7410d565efSmrg   /* Initially-compute RTL value for argument; only for const functions.  */
7510d565efSmrg   rtx initial_value;
7610d565efSmrg   /* Register to pass this argument in, 0 if passed on stack, or an
7710d565efSmrg      PARALLEL if the arg is to be copied into multiple non-contiguous
7810d565efSmrg      registers.  */
7910d565efSmrg   rtx reg;
8010d565efSmrg   /* Register to pass this argument in when generating tail call sequence.
8110d565efSmrg      This is not the same register as for normal calls on machines with
8210d565efSmrg      register windows.  */
8310d565efSmrg   rtx tail_call_reg;
8410d565efSmrg   /* If REG is a PARALLEL, this is a copy of VALUE pulled into the correct
8510d565efSmrg      form for emit_group_move.  */
8610d565efSmrg   rtx parallel_value;
8710d565efSmrg   /* If REG was promoted from the actual mode of the argument expression,
8810d565efSmrg      indicates whether the promotion is sign- or zero-extended.  */
8910d565efSmrg   int unsignedp;
9010d565efSmrg   /* Number of bytes to put in registers.  0 means put the whole arg
9110d565efSmrg      in registers.  Also 0 if not passed in registers.  */
9210d565efSmrg   int partial;
9310d565efSmrg   /* Nonzero if argument must be passed on stack.
9410d565efSmrg      Note that some arguments may be passed on the stack
9510d565efSmrg      even though pass_on_stack is zero, just because FUNCTION_ARG says so.
9610d565efSmrg      pass_on_stack identifies arguments that *cannot* go in registers.  */
9710d565efSmrg   int pass_on_stack;
9810d565efSmrg   /* Some fields packaged up for locate_and_pad_parm.  */
9910d565efSmrg   struct locate_and_pad_arg_data locate;
10010d565efSmrg   /* Location on the stack at which parameter should be stored.  The store
10110d565efSmrg      has already been done if STACK == VALUE.  */
10210d565efSmrg   rtx stack;
10310d565efSmrg   /* Location on the stack of the start of this argument slot.  This can
10410d565efSmrg      differ from STACK if this arg pads downward.  This location is known
10510d565efSmrg      to be aligned to TARGET_FUNCTION_ARG_BOUNDARY.  */
10610d565efSmrg   rtx stack_slot;
10710d565efSmrg   /* Place that this stack area has been saved, if needed.  */
10810d565efSmrg   rtx save_area;
10910d565efSmrg   /* If an argument's alignment does not permit direct copying into registers,
11010d565efSmrg      copy in smaller-sized pieces into pseudos.  These are stored in a
11110d565efSmrg      block pointed to by this field.  The next field says how many
11210d565efSmrg      word-sized pseudos we made.  */
11310d565efSmrg   rtx *aligned_regs;
11410d565efSmrg   int n_aligned_regs;
11510d565efSmrg };
11610d565efSmrg 
11710d565efSmrg /* A vector of one char per byte of stack space.  A byte if nonzero if
11810d565efSmrg    the corresponding stack location has been used.
11910d565efSmrg    This vector is used to prevent a function call within an argument from
12010d565efSmrg    clobbering any stack already set up.  */
12110d565efSmrg static char *stack_usage_map;
12210d565efSmrg 
12310d565efSmrg /* Size of STACK_USAGE_MAP.  */
124c7a68eb7Smrg static unsigned int highest_outgoing_arg_in_use;
125c7a68eb7Smrg 
126c7a68eb7Smrg /* Assume that any stack location at this byte index is used,
127c7a68eb7Smrg    without checking the contents of stack_usage_map.  */
128c7a68eb7Smrg static unsigned HOST_WIDE_INT stack_usage_watermark = HOST_WIDE_INT_M1U;
12910d565efSmrg 
13010d565efSmrg /* A bitmap of virtual-incoming stack space.  Bit is set if the corresponding
13110d565efSmrg    stack location's tail call argument has been already stored into the stack.
13210d565efSmrg    This bitmap is used to prevent sibling call optimization if function tries
13310d565efSmrg    to use parent's incoming argument slots when they have been already
13410d565efSmrg    overwritten with tail call arguments.  */
13510d565efSmrg static sbitmap stored_args_map;
13610d565efSmrg 
137c7a68eb7Smrg /* Assume that any virtual-incoming location at this byte index has been
138c7a68eb7Smrg    stored, without checking the contents of stored_args_map.  */
139c7a68eb7Smrg static unsigned HOST_WIDE_INT stored_args_watermark;
140c7a68eb7Smrg 
14110d565efSmrg /* stack_arg_under_construction is nonzero when an argument may be
14210d565efSmrg    initialized with a constructor call (including a C function that
14310d565efSmrg    returns a BLKmode struct) and expand_call must take special action
14410d565efSmrg    to make sure the object being constructed does not overlap the
14510d565efSmrg    argument list for the constructor call.  */
14610d565efSmrg static int stack_arg_under_construction;
14710d565efSmrg 
14810d565efSmrg static void precompute_register_parameters (int, struct arg_data *, int *);
14910d565efSmrg static int store_one_arg (struct arg_data *, rtx, int, int, int);
15010d565efSmrg static void store_unaligned_arguments_into_pseudos (struct arg_data *, int);
15110d565efSmrg static int finalize_must_preallocate (int, int, struct arg_data *,
15210d565efSmrg 				      struct args_size *);
15310d565efSmrg static void precompute_arguments (int, struct arg_data *);
15410d565efSmrg static void compute_argument_addresses (struct arg_data *, rtx, int);
15510d565efSmrg static rtx rtx_for_function_call (tree, tree);
15610d565efSmrg static void load_register_parameters (struct arg_data *, int, rtx *, int,
15710d565efSmrg 				      int, int *);
15810d565efSmrg static int special_function_p (const_tree, int);
15910d565efSmrg static int check_sibcall_argument_overlap_1 (rtx);
16010d565efSmrg static int check_sibcall_argument_overlap (rtx_insn *, struct arg_data *, int);
16110d565efSmrg 
16210d565efSmrg static tree split_complex_types (tree);
16310d565efSmrg 
16410d565efSmrg #ifdef REG_PARM_STACK_SPACE
16510d565efSmrg static rtx save_fixed_argument_area (int, rtx, int *, int *);
16610d565efSmrg static void restore_fixed_argument_area (rtx, rtx, int, int);
16710d565efSmrg #endif
16810d565efSmrg 
169c7a68eb7Smrg /* Return true if bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
170c7a68eb7Smrg    stack region might already be in use.  */
171c7a68eb7Smrg 
172c7a68eb7Smrg static bool
stack_region_maybe_used_p(poly_uint64 lower_bound,poly_uint64 upper_bound,unsigned int reg_parm_stack_space)173c7a68eb7Smrg stack_region_maybe_used_p (poly_uint64 lower_bound, poly_uint64 upper_bound,
174c7a68eb7Smrg 			   unsigned int reg_parm_stack_space)
175c7a68eb7Smrg {
176c7a68eb7Smrg   unsigned HOST_WIDE_INT const_lower, const_upper;
177c7a68eb7Smrg   const_lower = constant_lower_bound (lower_bound);
178c7a68eb7Smrg   if (!upper_bound.is_constant (&const_upper))
179c7a68eb7Smrg     const_upper = HOST_WIDE_INT_M1U;
180c7a68eb7Smrg 
181c7a68eb7Smrg   if (const_upper > stack_usage_watermark)
182c7a68eb7Smrg     return true;
183c7a68eb7Smrg 
184c7a68eb7Smrg   /* Don't worry about things in the fixed argument area;
185c7a68eb7Smrg      it has already been saved.  */
186c7a68eb7Smrg   const_lower = MAX (const_lower, reg_parm_stack_space);
187c7a68eb7Smrg   const_upper = MIN (const_upper, highest_outgoing_arg_in_use);
188c7a68eb7Smrg   for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
189c7a68eb7Smrg     if (stack_usage_map[i])
190c7a68eb7Smrg       return true;
191c7a68eb7Smrg   return false;
192c7a68eb7Smrg }
193c7a68eb7Smrg 
194c7a68eb7Smrg /* Record that bytes [LOWER_BOUND, UPPER_BOUND) of the outgoing
195c7a68eb7Smrg    stack region are now in use.  */
196c7a68eb7Smrg 
197c7a68eb7Smrg static void
mark_stack_region_used(poly_uint64 lower_bound,poly_uint64 upper_bound)198c7a68eb7Smrg mark_stack_region_used (poly_uint64 lower_bound, poly_uint64 upper_bound)
199c7a68eb7Smrg {
200c7a68eb7Smrg   unsigned HOST_WIDE_INT const_lower, const_upper;
201c7a68eb7Smrg   const_lower = constant_lower_bound (lower_bound);
202c7a68eb7Smrg   if (upper_bound.is_constant (&const_upper))
203c7a68eb7Smrg     for (unsigned HOST_WIDE_INT i = const_lower; i < const_upper; ++i)
204c7a68eb7Smrg       stack_usage_map[i] = 1;
205c7a68eb7Smrg   else
206c7a68eb7Smrg     stack_usage_watermark = MIN (stack_usage_watermark, const_lower);
207c7a68eb7Smrg }
208c7a68eb7Smrg 
20910d565efSmrg /* Force FUNEXP into a form suitable for the address of a CALL,
21010d565efSmrg    and return that as an rtx.  Also load the static chain register
21110d565efSmrg    if FNDECL is a nested function.
21210d565efSmrg 
21310d565efSmrg    CALL_FUSAGE points to a variable holding the prospective
21410d565efSmrg    CALL_INSN_FUNCTION_USAGE information.  */
21510d565efSmrg 
21610d565efSmrg rtx
prepare_call_address(tree fndecl_or_type,rtx funexp,rtx static_chain_value,rtx * call_fusage,int reg_parm_seen,int flags)21710d565efSmrg prepare_call_address (tree fndecl_or_type, rtx funexp, rtx static_chain_value,
21810d565efSmrg 		      rtx *call_fusage, int reg_parm_seen, int flags)
21910d565efSmrg {
22010d565efSmrg   /* Make a valid memory address and copy constants through pseudo-regs,
22110d565efSmrg      but not for a constant address if -fno-function-cse.  */
22210d565efSmrg   if (GET_CODE (funexp) != SYMBOL_REF)
22310d565efSmrg     {
22410d565efSmrg       /* If it's an indirect call by descriptor, generate code to perform
22510d565efSmrg 	 runtime identification of the pointer and load the descriptor.  */
22610d565efSmrg       if ((flags & ECF_BY_DESCRIPTOR) && !flag_trampolines)
22710d565efSmrg 	{
22810d565efSmrg 	  const int bit_val = targetm.calls.custom_function_descriptors;
22910d565efSmrg 	  rtx call_lab = gen_label_rtx ();
23010d565efSmrg 
23110d565efSmrg 	  gcc_assert (fndecl_or_type && TYPE_P (fndecl_or_type));
23210d565efSmrg 	  fndecl_or_type
23310d565efSmrg 	    = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
23410d565efSmrg 			  fndecl_or_type);
23510d565efSmrg 	  DECL_STATIC_CHAIN (fndecl_or_type) = 1;
23610d565efSmrg 	  rtx chain = targetm.calls.static_chain (fndecl_or_type, false);
23710d565efSmrg 
23810d565efSmrg 	  if (GET_MODE (funexp) != Pmode)
23910d565efSmrg 	    funexp = convert_memory_address (Pmode, funexp);
24010d565efSmrg 
24110d565efSmrg 	  /* Avoid long live ranges around function calls.  */
24210d565efSmrg 	  funexp = copy_to_mode_reg (Pmode, funexp);
24310d565efSmrg 
24410d565efSmrg 	  if (REG_P (chain))
24510d565efSmrg 	    emit_insn (gen_rtx_CLOBBER (VOIDmode, chain));
24610d565efSmrg 
24710d565efSmrg 	  /* Emit the runtime identification pattern.  */
24810d565efSmrg 	  rtx mask = gen_rtx_AND (Pmode, funexp, GEN_INT (bit_val));
24910d565efSmrg 	  emit_cmp_and_jump_insns (mask, const0_rtx, EQ, NULL_RTX, Pmode, 1,
25010d565efSmrg 				   call_lab);
25110d565efSmrg 
25210d565efSmrg 	  /* Statically predict the branch to very likely taken.  */
25310d565efSmrg 	  rtx_insn *insn = get_last_insn ();
25410d565efSmrg 	  if (JUMP_P (insn))
25510d565efSmrg 	    predict_insn_def (insn, PRED_BUILTIN_EXPECT, TAKEN);
25610d565efSmrg 
25710d565efSmrg 	  /* Load the descriptor.  */
25810d565efSmrg 	  rtx mem = gen_rtx_MEM (ptr_mode,
25910d565efSmrg 				 plus_constant (Pmode, funexp, - bit_val));
26010d565efSmrg 	  MEM_NOTRAP_P (mem) = 1;
26110d565efSmrg 	  mem = convert_memory_address (Pmode, mem);
26210d565efSmrg 	  emit_move_insn (chain, mem);
26310d565efSmrg 
26410d565efSmrg 	  mem = gen_rtx_MEM (ptr_mode,
26510d565efSmrg 			     plus_constant (Pmode, funexp,
26610d565efSmrg 					    POINTER_SIZE / BITS_PER_UNIT
26710d565efSmrg 					      - bit_val));
26810d565efSmrg 	  MEM_NOTRAP_P (mem) = 1;
26910d565efSmrg 	  mem = convert_memory_address (Pmode, mem);
27010d565efSmrg 	  emit_move_insn (funexp, mem);
27110d565efSmrg 
27210d565efSmrg 	  emit_label (call_lab);
27310d565efSmrg 
27410d565efSmrg 	  if (REG_P (chain))
27510d565efSmrg 	    {
27610d565efSmrg 	      use_reg (call_fusage, chain);
27710d565efSmrg 	      STATIC_CHAIN_REG_P (chain) = 1;
27810d565efSmrg 	    }
27910d565efSmrg 
28010d565efSmrg 	  /* Make sure we're not going to be overwritten below.  */
28110d565efSmrg 	  gcc_assert (!static_chain_value);
28210d565efSmrg 	}
28310d565efSmrg 
28410d565efSmrg       /* If we are using registers for parameters, force the
28510d565efSmrg 	 function address into a register now.  */
28610d565efSmrg       funexp = ((reg_parm_seen
28710d565efSmrg 		 && targetm.small_register_classes_for_mode_p (FUNCTION_MODE))
28810d565efSmrg 		 ? force_not_mem (memory_address (FUNCTION_MODE, funexp))
28910d565efSmrg 		 : memory_address (FUNCTION_MODE, funexp));
29010d565efSmrg     }
29110d565efSmrg   else
29210d565efSmrg     {
29310d565efSmrg       /* funexp could be a SYMBOL_REF represents a function pointer which is
29410d565efSmrg 	 of ptr_mode.  In this case, it should be converted into address mode
29510d565efSmrg 	 to be a valid address for memory rtx pattern.  See PR 64971.  */
29610d565efSmrg       if (GET_MODE (funexp) != Pmode)
29710d565efSmrg 	funexp = convert_memory_address (Pmode, funexp);
29810d565efSmrg 
29910d565efSmrg       if (!(flags & ECF_SIBCALL))
30010d565efSmrg 	{
30110d565efSmrg 	  if (!NO_FUNCTION_CSE && optimize && ! flag_no_function_cse)
30210d565efSmrg 	    funexp = force_reg (Pmode, funexp);
30310d565efSmrg 	}
30410d565efSmrg     }
30510d565efSmrg 
30610d565efSmrg   if (static_chain_value != 0
30710d565efSmrg       && (TREE_CODE (fndecl_or_type) != FUNCTION_DECL
30810d565efSmrg 	  || DECL_STATIC_CHAIN (fndecl_or_type)))
30910d565efSmrg     {
31010d565efSmrg       rtx chain;
31110d565efSmrg 
31210d565efSmrg       chain = targetm.calls.static_chain (fndecl_or_type, false);
31310d565efSmrg       static_chain_value = convert_memory_address (Pmode, static_chain_value);
31410d565efSmrg 
31510d565efSmrg       emit_move_insn (chain, static_chain_value);
31610d565efSmrg       if (REG_P (chain))
31710d565efSmrg 	{
31810d565efSmrg 	  use_reg (call_fusage, chain);
31910d565efSmrg 	  STATIC_CHAIN_REG_P (chain) = 1;
32010d565efSmrg 	}
32110d565efSmrg     }
32210d565efSmrg 
32310d565efSmrg   return funexp;
32410d565efSmrg }
32510d565efSmrg 
32610d565efSmrg /* Generate instructions to call function FUNEXP,
32710d565efSmrg    and optionally pop the results.
32810d565efSmrg    The CALL_INSN is the first insn generated.
32910d565efSmrg 
33010d565efSmrg    FNDECL is the declaration node of the function.  This is given to the
33110d565efSmrg    hook TARGET_RETURN_POPS_ARGS to determine whether this function pops
33210d565efSmrg    its own args.
33310d565efSmrg 
33410d565efSmrg    FUNTYPE is the data type of the function.  This is given to the hook
33510d565efSmrg    TARGET_RETURN_POPS_ARGS to determine whether this function pops its
33610d565efSmrg    own args.  We used to allow an identifier for library functions, but
33710d565efSmrg    that doesn't work when the return type is an aggregate type and the
33810d565efSmrg    calling convention says that the pointer to this aggregate is to be
33910d565efSmrg    popped by the callee.
34010d565efSmrg 
34110d565efSmrg    STACK_SIZE is the number of bytes of arguments on the stack,
34210d565efSmrg    ROUNDED_STACK_SIZE is that number rounded up to
34310d565efSmrg    PREFERRED_STACK_BOUNDARY; zero if the size is variable.  This is
34410d565efSmrg    both to put into the call insn and to generate explicit popping
34510d565efSmrg    code if necessary.
34610d565efSmrg 
34710d565efSmrg    STRUCT_VALUE_SIZE is the number of bytes wanted in a structure value.
34810d565efSmrg    It is zero if this call doesn't want a structure value.
34910d565efSmrg 
35010d565efSmrg    NEXT_ARG_REG is the rtx that results from executing
351*ec02198aSmrg      targetm.calls.function_arg (&args_so_far,
352*ec02198aSmrg 				 function_arg_info::end_marker ());
35310d565efSmrg    just after all the args have had their registers assigned.
35410d565efSmrg    This could be whatever you like, but normally it is the first
35510d565efSmrg    arg-register beyond those used for args in this call,
35610d565efSmrg    or 0 if all the arg-registers are used in this call.
35710d565efSmrg    It is passed on to `gen_call' so you can put this info in the call insn.
35810d565efSmrg 
35910d565efSmrg    VALREG is a hard register in which a value is returned,
36010d565efSmrg    or 0 if the call does not return a value.
36110d565efSmrg 
36210d565efSmrg    OLD_INHIBIT_DEFER_POP is the value that `inhibit_defer_pop' had before
36310d565efSmrg    the args to this call were processed.
36410d565efSmrg    We restore `inhibit_defer_pop' to that value.
36510d565efSmrg 
36610d565efSmrg    CALL_FUSAGE is either empty or an EXPR_LIST of USE expressions that
36710d565efSmrg    denote registers used by the called function.  */
36810d565efSmrg 
36910d565efSmrg static void
emit_call_1(rtx funexp,tree fntree ATTRIBUTE_UNUSED,tree fndecl ATTRIBUTE_UNUSED,tree funtype ATTRIBUTE_UNUSED,poly_int64 stack_size ATTRIBUTE_UNUSED,poly_int64 rounded_stack_size,poly_int64 struct_value_size ATTRIBUTE_UNUSED,rtx next_arg_reg ATTRIBUTE_UNUSED,rtx valreg,int old_inhibit_defer_pop,rtx call_fusage,int ecf_flags,cumulative_args_t args_so_far ATTRIBUTE_UNUSED)37010d565efSmrg emit_call_1 (rtx funexp, tree fntree ATTRIBUTE_UNUSED, tree fndecl ATTRIBUTE_UNUSED,
37110d565efSmrg 	     tree funtype ATTRIBUTE_UNUSED,
372c7a68eb7Smrg 	     poly_int64 stack_size ATTRIBUTE_UNUSED,
373c7a68eb7Smrg 	     poly_int64 rounded_stack_size,
374c7a68eb7Smrg 	     poly_int64 struct_value_size ATTRIBUTE_UNUSED,
37510d565efSmrg 	     rtx next_arg_reg ATTRIBUTE_UNUSED, rtx valreg,
37610d565efSmrg 	     int old_inhibit_defer_pop, rtx call_fusage, int ecf_flags,
37710d565efSmrg 	     cumulative_args_t args_so_far ATTRIBUTE_UNUSED)
37810d565efSmrg {
379c7a68eb7Smrg   rtx rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
38010d565efSmrg   rtx call, funmem, pat;
38110d565efSmrg   int already_popped = 0;
382c7a68eb7Smrg   poly_int64 n_popped = 0;
38310d565efSmrg 
38410d565efSmrg   /* Sibling call patterns never pop arguments (no sibcall(_value)_pop
38510d565efSmrg      patterns exist).  Any popping that the callee does on return will
38610d565efSmrg      be from our caller's frame rather than ours.  */
38710d565efSmrg   if (!(ecf_flags & ECF_SIBCALL))
38810d565efSmrg     {
38910d565efSmrg       n_popped += targetm.calls.return_pops_args (fndecl, funtype, stack_size);
39010d565efSmrg 
39110d565efSmrg #ifdef CALL_POPS_ARGS
39210d565efSmrg       n_popped += CALL_POPS_ARGS (*get_cumulative_args (args_so_far));
39310d565efSmrg #endif
39410d565efSmrg     }
39510d565efSmrg 
39610d565efSmrg   /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
39710d565efSmrg      and we don't want to load it into a register as an optimization,
39810d565efSmrg      because prepare_call_address already did it if it should be done.  */
39910d565efSmrg   if (GET_CODE (funexp) != SYMBOL_REF)
40010d565efSmrg     funexp = memory_address (FUNCTION_MODE, funexp);
40110d565efSmrg 
40210d565efSmrg   funmem = gen_rtx_MEM (FUNCTION_MODE, funexp);
40310d565efSmrg   if (fndecl && TREE_CODE (fndecl) == FUNCTION_DECL)
40410d565efSmrg     {
40510d565efSmrg       tree t = fndecl;
40610d565efSmrg 
40710d565efSmrg       /* Although a built-in FUNCTION_DECL and its non-__builtin
40810d565efSmrg 	 counterpart compare equal and get a shared mem_attrs, they
40910d565efSmrg 	 produce different dump output in compare-debug compilations,
41010d565efSmrg 	 if an entry gets garbage collected in one compilation, then
41110d565efSmrg 	 adds a different (but equivalent) entry, while the other
41210d565efSmrg 	 doesn't run the garbage collector at the same spot and then
41310d565efSmrg 	 shares the mem_attr with the equivalent entry. */
41410d565efSmrg       if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL)
41510d565efSmrg 	{
41610d565efSmrg 	  tree t2 = builtin_decl_explicit (DECL_FUNCTION_CODE (t));
41710d565efSmrg 	  if (t2)
41810d565efSmrg 	    t = t2;
41910d565efSmrg 	}
42010d565efSmrg 
42110d565efSmrg 	set_mem_expr (funmem, t);
42210d565efSmrg     }
42310d565efSmrg   else if (fntree)
42410d565efSmrg     set_mem_expr (funmem, build_simple_mem_ref (CALL_EXPR_FN (fntree)));
42510d565efSmrg 
42610d565efSmrg   if (ecf_flags & ECF_SIBCALL)
42710d565efSmrg     {
42810d565efSmrg       if (valreg)
42910d565efSmrg 	pat = targetm.gen_sibcall_value (valreg, funmem,
43010d565efSmrg 					 rounded_stack_size_rtx,
43110d565efSmrg 					 next_arg_reg, NULL_RTX);
43210d565efSmrg       else
43310d565efSmrg 	pat = targetm.gen_sibcall (funmem, rounded_stack_size_rtx,
434c7a68eb7Smrg 				   next_arg_reg,
435c7a68eb7Smrg 				   gen_int_mode (struct_value_size, Pmode));
43610d565efSmrg     }
43710d565efSmrg   /* If the target has "call" or "call_value" insns, then prefer them
43810d565efSmrg      if no arguments are actually popped.  If the target does not have
43910d565efSmrg      "call" or "call_value" insns, then we must use the popping versions
44010d565efSmrg      even if the call has no arguments to pop.  */
441c7a68eb7Smrg   else if (maybe_ne (n_popped, 0)
44210d565efSmrg 	   || !(valreg
44310d565efSmrg 		? targetm.have_call_value ()
44410d565efSmrg 		: targetm.have_call ()))
44510d565efSmrg     {
446c7a68eb7Smrg       rtx n_pop = gen_int_mode (n_popped, Pmode);
44710d565efSmrg 
44810d565efSmrg       /* If this subroutine pops its own args, record that in the call insn
44910d565efSmrg 	 if possible, for the sake of frame pointer elimination.  */
45010d565efSmrg 
45110d565efSmrg       if (valreg)
45210d565efSmrg 	pat = targetm.gen_call_value_pop (valreg, funmem,
45310d565efSmrg 					  rounded_stack_size_rtx,
45410d565efSmrg 					  next_arg_reg, n_pop);
45510d565efSmrg       else
45610d565efSmrg 	pat = targetm.gen_call_pop (funmem, rounded_stack_size_rtx,
45710d565efSmrg 				    next_arg_reg, n_pop);
45810d565efSmrg 
45910d565efSmrg       already_popped = 1;
46010d565efSmrg     }
46110d565efSmrg   else
46210d565efSmrg     {
46310d565efSmrg       if (valreg)
46410d565efSmrg 	pat = targetm.gen_call_value (valreg, funmem, rounded_stack_size_rtx,
46510d565efSmrg 				      next_arg_reg, NULL_RTX);
46610d565efSmrg       else
46710d565efSmrg 	pat = targetm.gen_call (funmem, rounded_stack_size_rtx, next_arg_reg,
468c7a68eb7Smrg 				gen_int_mode (struct_value_size, Pmode));
46910d565efSmrg     }
47010d565efSmrg   emit_insn (pat);
47110d565efSmrg 
47210d565efSmrg   /* Find the call we just emitted.  */
47310d565efSmrg   rtx_call_insn *call_insn = last_call_insn ();
47410d565efSmrg 
47510d565efSmrg   /* Some target create a fresh MEM instead of reusing the one provided
47610d565efSmrg      above.  Set its MEM_EXPR.  */
47710d565efSmrg   call = get_call_rtx_from (call_insn);
47810d565efSmrg   if (call
47910d565efSmrg       && MEM_EXPR (XEXP (call, 0)) == NULL_TREE
48010d565efSmrg       && MEM_EXPR (funmem) != NULL_TREE)
48110d565efSmrg     set_mem_expr (XEXP (call, 0), MEM_EXPR (funmem));
48210d565efSmrg 
48310d565efSmrg   /* Put the register usage information there.  */
48410d565efSmrg   add_function_usage_to (call_insn, call_fusage);
48510d565efSmrg 
48610d565efSmrg   /* If this is a const call, then set the insn's unchanging bit.  */
48710d565efSmrg   if (ecf_flags & ECF_CONST)
48810d565efSmrg     RTL_CONST_CALL_P (call_insn) = 1;
48910d565efSmrg 
49010d565efSmrg   /* If this is a pure call, then set the insn's unchanging bit.  */
49110d565efSmrg   if (ecf_flags & ECF_PURE)
49210d565efSmrg     RTL_PURE_CALL_P (call_insn) = 1;
49310d565efSmrg 
49410d565efSmrg   /* If this is a const call, then set the insn's unchanging bit.  */
49510d565efSmrg   if (ecf_flags & ECF_LOOPING_CONST_OR_PURE)
49610d565efSmrg     RTL_LOOPING_CONST_OR_PURE_CALL_P (call_insn) = 1;
49710d565efSmrg 
49810d565efSmrg   /* Create a nothrow REG_EH_REGION note, if needed.  */
49910d565efSmrg   make_reg_eh_region_note (call_insn, ecf_flags, 0);
50010d565efSmrg 
50110d565efSmrg   if (ecf_flags & ECF_NORETURN)
50210d565efSmrg     add_reg_note (call_insn, REG_NORETURN, const0_rtx);
50310d565efSmrg 
50410d565efSmrg   if (ecf_flags & ECF_RETURNS_TWICE)
50510d565efSmrg     {
50610d565efSmrg       add_reg_note (call_insn, REG_SETJMP, const0_rtx);
50710d565efSmrg       cfun->calls_setjmp = 1;
50810d565efSmrg     }
50910d565efSmrg 
51010d565efSmrg   SIBLING_CALL_P (call_insn) = ((ecf_flags & ECF_SIBCALL) != 0);
51110d565efSmrg 
51210d565efSmrg   /* Restore this now, so that we do defer pops for this call's args
51310d565efSmrg      if the context of the call as a whole permits.  */
51410d565efSmrg   inhibit_defer_pop = old_inhibit_defer_pop;
51510d565efSmrg 
516c7a68eb7Smrg   if (maybe_ne (n_popped, 0))
51710d565efSmrg     {
51810d565efSmrg       if (!already_popped)
51910d565efSmrg 	CALL_INSN_FUNCTION_USAGE (call_insn)
52010d565efSmrg 	  = gen_rtx_EXPR_LIST (VOIDmode,
52110d565efSmrg 			       gen_rtx_CLOBBER (VOIDmode, stack_pointer_rtx),
52210d565efSmrg 			       CALL_INSN_FUNCTION_USAGE (call_insn));
52310d565efSmrg       rounded_stack_size -= n_popped;
524c7a68eb7Smrg       rounded_stack_size_rtx = gen_int_mode (rounded_stack_size, Pmode);
52510d565efSmrg       stack_pointer_delta -= n_popped;
52610d565efSmrg 
527c7a68eb7Smrg       add_args_size_note (call_insn, stack_pointer_delta);
52810d565efSmrg 
52910d565efSmrg       /* If popup is needed, stack realign must use DRAP  */
53010d565efSmrg       if (SUPPORTS_STACK_ALIGNMENT)
53110d565efSmrg         crtl->need_drap = true;
53210d565efSmrg     }
53310d565efSmrg   /* For noreturn calls when not accumulating outgoing args force
53410d565efSmrg      REG_ARGS_SIZE note to prevent crossjumping of calls with different
53510d565efSmrg      args sizes.  */
53610d565efSmrg   else if (!ACCUMULATE_OUTGOING_ARGS && (ecf_flags & ECF_NORETURN) != 0)
537c7a68eb7Smrg     add_args_size_note (call_insn, stack_pointer_delta);
53810d565efSmrg 
53910d565efSmrg   if (!ACCUMULATE_OUTGOING_ARGS)
54010d565efSmrg     {
54110d565efSmrg       /* If returning from the subroutine does not automatically pop the args,
54210d565efSmrg 	 we need an instruction to pop them sooner or later.
54310d565efSmrg 	 Perhaps do it now; perhaps just record how much space to pop later.
54410d565efSmrg 
54510d565efSmrg 	 If returning from the subroutine does pop the args, indicate that the
54610d565efSmrg 	 stack pointer will be changed.  */
54710d565efSmrg 
548c7a68eb7Smrg       if (maybe_ne (rounded_stack_size, 0))
54910d565efSmrg 	{
55010d565efSmrg 	  if (ecf_flags & ECF_NORETURN)
55110d565efSmrg 	    /* Just pretend we did the pop.  */
55210d565efSmrg 	    stack_pointer_delta -= rounded_stack_size;
55310d565efSmrg 	  else if (flag_defer_pop && inhibit_defer_pop == 0
55410d565efSmrg 	      && ! (ecf_flags & (ECF_CONST | ECF_PURE)))
55510d565efSmrg 	    pending_stack_adjust += rounded_stack_size;
55610d565efSmrg 	  else
55710d565efSmrg 	    adjust_stack (rounded_stack_size_rtx);
55810d565efSmrg 	}
55910d565efSmrg     }
56010d565efSmrg   /* When we accumulate outgoing args, we must avoid any stack manipulations.
56110d565efSmrg      Restore the stack pointer to its original value now.  Usually
56210d565efSmrg      ACCUMULATE_OUTGOING_ARGS targets don't get here, but there are exceptions.
56310d565efSmrg      On  i386 ACCUMULATE_OUTGOING_ARGS can be enabled on demand, and
56410d565efSmrg      popping variants of functions exist as well.
56510d565efSmrg 
56610d565efSmrg      ??? We may optimize similar to defer_pop above, but it is
56710d565efSmrg      probably not worthwhile.
56810d565efSmrg 
56910d565efSmrg      ??? It will be worthwhile to enable combine_stack_adjustments even for
57010d565efSmrg      such machines.  */
571c7a68eb7Smrg   else if (maybe_ne (n_popped, 0))
572c7a68eb7Smrg     anti_adjust_stack (gen_int_mode (n_popped, Pmode));
57310d565efSmrg }
57410d565efSmrg 
57510d565efSmrg /* Determine if the function identified by FNDECL is one with
57610d565efSmrg    special properties we wish to know about.  Modify FLAGS accordingly.
57710d565efSmrg 
57810d565efSmrg    For example, if the function might return more than one time (setjmp), then
57910d565efSmrg    set ECF_RETURNS_TWICE.
58010d565efSmrg 
58110d565efSmrg    Set ECF_MAY_BE_ALLOCA for any memory allocation function that might allocate
58210d565efSmrg    space from the stack such as alloca.  */
58310d565efSmrg 
58410d565efSmrg static int
special_function_p(const_tree fndecl,int flags)58510d565efSmrg special_function_p (const_tree fndecl, int flags)
58610d565efSmrg {
58710d565efSmrg   tree name_decl = DECL_NAME (fndecl);
58810d565efSmrg 
589*ec02198aSmrg   if (maybe_special_function_p (fndecl)
590*ec02198aSmrg       && IDENTIFIER_LENGTH (name_decl) <= 11)
59110d565efSmrg     {
59210d565efSmrg       const char *name = IDENTIFIER_POINTER (name_decl);
59310d565efSmrg       const char *tname = name;
59410d565efSmrg 
59510d565efSmrg       /* We assume that alloca will always be called by name.  It
59610d565efSmrg 	 makes no sense to pass it as a pointer-to-function to
59710d565efSmrg 	 anything that does not understand its behavior.  */
59810d565efSmrg       if (IDENTIFIER_LENGTH (name_decl) == 6
59910d565efSmrg 	  && name[0] == 'a'
60010d565efSmrg 	  && ! strcmp (name, "alloca"))
60110d565efSmrg 	flags |= ECF_MAY_BE_ALLOCA;
60210d565efSmrg 
60310d565efSmrg       /* Disregard prefix _ or __.  */
60410d565efSmrg       if (name[0] == '_')
60510d565efSmrg 	{
60610d565efSmrg 	  if (name[1] == '_')
60710d565efSmrg 	    tname += 2;
60810d565efSmrg 	  else
60910d565efSmrg 	    tname += 1;
61010d565efSmrg 	}
61110d565efSmrg 
61210d565efSmrg       /* ECF_RETURNS_TWICE is safe even for -ffreestanding.  */
61310d565efSmrg       if (! strcmp (tname, "setjmp")
61410d565efSmrg 	  || ! strcmp (tname, "sigsetjmp")
61510d565efSmrg 	  || ! strcmp (name, "savectx")
61610d565efSmrg 	  || ! strcmp (name, "vfork")
61710d565efSmrg 	  || ! strcmp (name, "getcontext"))
61810d565efSmrg 	flags |= ECF_RETURNS_TWICE;
61910d565efSmrg     }
62010d565efSmrg 
621c7a68eb7Smrg   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
622c7a68eb7Smrg       && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (fndecl)))
62310d565efSmrg     flags |= ECF_MAY_BE_ALLOCA;
62410d565efSmrg 
62510d565efSmrg   return flags;
62610d565efSmrg }
62710d565efSmrg 
62810d565efSmrg /* Similar to special_function_p; return a set of ERF_ flags for the
62910d565efSmrg    function FNDECL.  */
63010d565efSmrg static int
decl_return_flags(tree fndecl)63110d565efSmrg decl_return_flags (tree fndecl)
63210d565efSmrg {
63310d565efSmrg   tree attr;
63410d565efSmrg   tree type = TREE_TYPE (fndecl);
63510d565efSmrg   if (!type)
63610d565efSmrg     return 0;
63710d565efSmrg 
63810d565efSmrg   attr = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (type));
63910d565efSmrg   if (!attr)
64010d565efSmrg     return 0;
64110d565efSmrg 
64210d565efSmrg   attr = TREE_VALUE (TREE_VALUE (attr));
64310d565efSmrg   if (!attr || TREE_STRING_LENGTH (attr) < 1)
64410d565efSmrg     return 0;
64510d565efSmrg 
64610d565efSmrg   switch (TREE_STRING_POINTER (attr)[0])
64710d565efSmrg     {
64810d565efSmrg     case '1':
64910d565efSmrg     case '2':
65010d565efSmrg     case '3':
65110d565efSmrg     case '4':
65210d565efSmrg       return ERF_RETURNS_ARG | (TREE_STRING_POINTER (attr)[0] - '1');
65310d565efSmrg 
65410d565efSmrg     case 'm':
65510d565efSmrg       return ERF_NOALIAS;
65610d565efSmrg 
65710d565efSmrg     case '.':
65810d565efSmrg     default:
65910d565efSmrg       return 0;
66010d565efSmrg     }
66110d565efSmrg }
66210d565efSmrg 
66310d565efSmrg /* Return nonzero when FNDECL represents a call to setjmp.  */
66410d565efSmrg 
66510d565efSmrg int
setjmp_call_p(const_tree fndecl)66610d565efSmrg setjmp_call_p (const_tree fndecl)
66710d565efSmrg {
66810d565efSmrg   if (DECL_IS_RETURNS_TWICE (fndecl))
66910d565efSmrg     return ECF_RETURNS_TWICE;
67010d565efSmrg   return special_function_p (fndecl, 0) & ECF_RETURNS_TWICE;
67110d565efSmrg }
67210d565efSmrg 
67310d565efSmrg 
67410d565efSmrg /* Return true if STMT may be an alloca call.  */
67510d565efSmrg 
67610d565efSmrg bool
gimple_maybe_alloca_call_p(const gimple * stmt)67710d565efSmrg gimple_maybe_alloca_call_p (const gimple *stmt)
67810d565efSmrg {
67910d565efSmrg   tree fndecl;
68010d565efSmrg 
68110d565efSmrg   if (!is_gimple_call (stmt))
68210d565efSmrg     return false;
68310d565efSmrg 
68410d565efSmrg   fndecl = gimple_call_fndecl (stmt);
68510d565efSmrg   if (fndecl && (special_function_p (fndecl, 0) & ECF_MAY_BE_ALLOCA))
68610d565efSmrg     return true;
68710d565efSmrg 
68810d565efSmrg   return false;
68910d565efSmrg }
69010d565efSmrg 
69110d565efSmrg /* Return true if STMT is a builtin alloca call.  */
69210d565efSmrg 
69310d565efSmrg bool
gimple_alloca_call_p(const gimple * stmt)69410d565efSmrg gimple_alloca_call_p (const gimple *stmt)
69510d565efSmrg {
69610d565efSmrg   tree fndecl;
69710d565efSmrg 
69810d565efSmrg   if (!is_gimple_call (stmt))
69910d565efSmrg     return false;
70010d565efSmrg 
70110d565efSmrg   fndecl = gimple_call_fndecl (stmt);
7020fc04c29Smrg   if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
70310d565efSmrg     switch (DECL_FUNCTION_CODE (fndecl))
70410d565efSmrg       {
705c7a68eb7Smrg       CASE_BUILT_IN_ALLOCA:
706c7a68eb7Smrg 	return gimple_call_num_args (stmt) > 0;
70710d565efSmrg       default:
70810d565efSmrg 	break;
70910d565efSmrg       }
71010d565efSmrg 
71110d565efSmrg   return false;
71210d565efSmrg }
71310d565efSmrg 
71410d565efSmrg /* Return true when exp contains a builtin alloca call.  */
71510d565efSmrg 
71610d565efSmrg bool
alloca_call_p(const_tree exp)71710d565efSmrg alloca_call_p (const_tree exp)
71810d565efSmrg {
71910d565efSmrg   tree fndecl;
72010d565efSmrg   if (TREE_CODE (exp) == CALL_EXPR
72110d565efSmrg       && (fndecl = get_callee_fndecl (exp))
72210d565efSmrg       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
72310d565efSmrg     switch (DECL_FUNCTION_CODE (fndecl))
72410d565efSmrg       {
725c7a68eb7Smrg       CASE_BUILT_IN_ALLOCA:
72610d565efSmrg         return true;
72710d565efSmrg       default:
72810d565efSmrg 	break;
72910d565efSmrg       }
73010d565efSmrg 
73110d565efSmrg   return false;
73210d565efSmrg }
73310d565efSmrg 
73410d565efSmrg /* Return TRUE if FNDECL is either a TM builtin or a TM cloned
73510d565efSmrg    function.  Return FALSE otherwise.  */
73610d565efSmrg 
73710d565efSmrg static bool
is_tm_builtin(const_tree fndecl)73810d565efSmrg is_tm_builtin (const_tree fndecl)
73910d565efSmrg {
74010d565efSmrg   if (fndecl == NULL)
74110d565efSmrg     return false;
74210d565efSmrg 
74310d565efSmrg   if (decl_is_tm_clone (fndecl))
74410d565efSmrg     return true;
74510d565efSmrg 
74610d565efSmrg   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
74710d565efSmrg     {
74810d565efSmrg       switch (DECL_FUNCTION_CODE (fndecl))
74910d565efSmrg 	{
75010d565efSmrg 	case BUILT_IN_TM_COMMIT:
75110d565efSmrg 	case BUILT_IN_TM_COMMIT_EH:
75210d565efSmrg 	case BUILT_IN_TM_ABORT:
75310d565efSmrg 	case BUILT_IN_TM_IRREVOCABLE:
75410d565efSmrg 	case BUILT_IN_TM_GETTMCLONE_IRR:
75510d565efSmrg 	case BUILT_IN_TM_MEMCPY:
75610d565efSmrg 	case BUILT_IN_TM_MEMMOVE:
75710d565efSmrg 	case BUILT_IN_TM_MEMSET:
75810d565efSmrg 	CASE_BUILT_IN_TM_STORE (1):
75910d565efSmrg 	CASE_BUILT_IN_TM_STORE (2):
76010d565efSmrg 	CASE_BUILT_IN_TM_STORE (4):
76110d565efSmrg 	CASE_BUILT_IN_TM_STORE (8):
76210d565efSmrg 	CASE_BUILT_IN_TM_STORE (FLOAT):
76310d565efSmrg 	CASE_BUILT_IN_TM_STORE (DOUBLE):
76410d565efSmrg 	CASE_BUILT_IN_TM_STORE (LDOUBLE):
76510d565efSmrg 	CASE_BUILT_IN_TM_STORE (M64):
76610d565efSmrg 	CASE_BUILT_IN_TM_STORE (M128):
76710d565efSmrg 	CASE_BUILT_IN_TM_STORE (M256):
76810d565efSmrg 	CASE_BUILT_IN_TM_LOAD (1):
76910d565efSmrg 	CASE_BUILT_IN_TM_LOAD (2):
77010d565efSmrg 	CASE_BUILT_IN_TM_LOAD (4):
77110d565efSmrg 	CASE_BUILT_IN_TM_LOAD (8):
77210d565efSmrg 	CASE_BUILT_IN_TM_LOAD (FLOAT):
77310d565efSmrg 	CASE_BUILT_IN_TM_LOAD (DOUBLE):
77410d565efSmrg 	CASE_BUILT_IN_TM_LOAD (LDOUBLE):
77510d565efSmrg 	CASE_BUILT_IN_TM_LOAD (M64):
77610d565efSmrg 	CASE_BUILT_IN_TM_LOAD (M128):
77710d565efSmrg 	CASE_BUILT_IN_TM_LOAD (M256):
77810d565efSmrg 	case BUILT_IN_TM_LOG:
77910d565efSmrg 	case BUILT_IN_TM_LOG_1:
78010d565efSmrg 	case BUILT_IN_TM_LOG_2:
78110d565efSmrg 	case BUILT_IN_TM_LOG_4:
78210d565efSmrg 	case BUILT_IN_TM_LOG_8:
78310d565efSmrg 	case BUILT_IN_TM_LOG_FLOAT:
78410d565efSmrg 	case BUILT_IN_TM_LOG_DOUBLE:
78510d565efSmrg 	case BUILT_IN_TM_LOG_LDOUBLE:
78610d565efSmrg 	case BUILT_IN_TM_LOG_M64:
78710d565efSmrg 	case BUILT_IN_TM_LOG_M128:
78810d565efSmrg 	case BUILT_IN_TM_LOG_M256:
78910d565efSmrg 	  return true;
79010d565efSmrg 	default:
79110d565efSmrg 	  break;
79210d565efSmrg 	}
79310d565efSmrg     }
79410d565efSmrg   return false;
79510d565efSmrg }
79610d565efSmrg 
79710d565efSmrg /* Detect flags (function attributes) from the function decl or type node.  */
79810d565efSmrg 
79910d565efSmrg int
flags_from_decl_or_type(const_tree exp)80010d565efSmrg flags_from_decl_or_type (const_tree exp)
80110d565efSmrg {
80210d565efSmrg   int flags = 0;
80310d565efSmrg 
80410d565efSmrg   if (DECL_P (exp))
80510d565efSmrg     {
80610d565efSmrg       /* The function exp may have the `malloc' attribute.  */
80710d565efSmrg       if (DECL_IS_MALLOC (exp))
80810d565efSmrg 	flags |= ECF_MALLOC;
80910d565efSmrg 
81010d565efSmrg       /* The function exp may have the `returns_twice' attribute.  */
81110d565efSmrg       if (DECL_IS_RETURNS_TWICE (exp))
81210d565efSmrg 	flags |= ECF_RETURNS_TWICE;
81310d565efSmrg 
81410d565efSmrg       /* Process the pure and const attributes.  */
81510d565efSmrg       if (TREE_READONLY (exp))
81610d565efSmrg 	flags |= ECF_CONST;
81710d565efSmrg       if (DECL_PURE_P (exp))
81810d565efSmrg 	flags |= ECF_PURE;
81910d565efSmrg       if (DECL_LOOPING_CONST_OR_PURE_P (exp))
82010d565efSmrg 	flags |= ECF_LOOPING_CONST_OR_PURE;
82110d565efSmrg 
82210d565efSmrg       if (DECL_IS_NOVOPS (exp))
82310d565efSmrg 	flags |= ECF_NOVOPS;
82410d565efSmrg       if (lookup_attribute ("leaf", DECL_ATTRIBUTES (exp)))
82510d565efSmrg 	flags |= ECF_LEAF;
826c7a68eb7Smrg       if (lookup_attribute ("cold", DECL_ATTRIBUTES (exp)))
827c7a68eb7Smrg 	flags |= ECF_COLD;
82810d565efSmrg 
82910d565efSmrg       if (TREE_NOTHROW (exp))
83010d565efSmrg 	flags |= ECF_NOTHROW;
83110d565efSmrg 
83210d565efSmrg       if (flag_tm)
83310d565efSmrg 	{
83410d565efSmrg 	  if (is_tm_builtin (exp))
83510d565efSmrg 	    flags |= ECF_TM_BUILTIN;
83610d565efSmrg 	  else if ((flags & (ECF_CONST|ECF_NOVOPS)) != 0
83710d565efSmrg 		   || lookup_attribute ("transaction_pure",
83810d565efSmrg 					TYPE_ATTRIBUTES (TREE_TYPE (exp))))
83910d565efSmrg 	    flags |= ECF_TM_PURE;
84010d565efSmrg 	}
84110d565efSmrg 
84210d565efSmrg       flags = special_function_p (exp, flags);
84310d565efSmrg     }
84410d565efSmrg   else if (TYPE_P (exp))
84510d565efSmrg     {
84610d565efSmrg       if (TYPE_READONLY (exp))
84710d565efSmrg 	flags |= ECF_CONST;
84810d565efSmrg 
84910d565efSmrg       if (flag_tm
85010d565efSmrg 	  && ((flags & ECF_CONST) != 0
85110d565efSmrg 	      || lookup_attribute ("transaction_pure", TYPE_ATTRIBUTES (exp))))
85210d565efSmrg 	flags |= ECF_TM_PURE;
85310d565efSmrg     }
85410d565efSmrg   else
85510d565efSmrg     gcc_unreachable ();
85610d565efSmrg 
85710d565efSmrg   if (TREE_THIS_VOLATILE (exp))
85810d565efSmrg     {
85910d565efSmrg       flags |= ECF_NORETURN;
86010d565efSmrg       if (flags & (ECF_CONST|ECF_PURE))
86110d565efSmrg 	flags |= ECF_LOOPING_CONST_OR_PURE;
86210d565efSmrg     }
86310d565efSmrg 
86410d565efSmrg   return flags;
86510d565efSmrg }
86610d565efSmrg 
86710d565efSmrg /* Detect flags from a CALL_EXPR.  */
86810d565efSmrg 
86910d565efSmrg int
call_expr_flags(const_tree t)87010d565efSmrg call_expr_flags (const_tree t)
87110d565efSmrg {
87210d565efSmrg   int flags;
87310d565efSmrg   tree decl = get_callee_fndecl (t);
87410d565efSmrg 
87510d565efSmrg   if (decl)
87610d565efSmrg     flags = flags_from_decl_or_type (decl);
87710d565efSmrg   else if (CALL_EXPR_FN (t) == NULL_TREE)
87810d565efSmrg     flags = internal_fn_flags (CALL_EXPR_IFN (t));
87910d565efSmrg   else
88010d565efSmrg     {
88110d565efSmrg       tree type = TREE_TYPE (CALL_EXPR_FN (t));
88210d565efSmrg       if (type && TREE_CODE (type) == POINTER_TYPE)
88310d565efSmrg 	flags = flags_from_decl_or_type (TREE_TYPE (type));
88410d565efSmrg       else
88510d565efSmrg 	flags = 0;
88610d565efSmrg       if (CALL_EXPR_BY_DESCRIPTOR (t))
88710d565efSmrg 	flags |= ECF_BY_DESCRIPTOR;
88810d565efSmrg     }
88910d565efSmrg 
89010d565efSmrg   return flags;
89110d565efSmrg }
89210d565efSmrg 
893*ec02198aSmrg /* Return true if ARG should be passed by invisible reference.  */
89410d565efSmrg 
89510d565efSmrg bool
pass_by_reference(CUMULATIVE_ARGS * ca,function_arg_info arg)896*ec02198aSmrg pass_by_reference (CUMULATIVE_ARGS *ca, function_arg_info arg)
89710d565efSmrg {
898*ec02198aSmrg   if (tree type = arg.type)
89910d565efSmrg     {
90010d565efSmrg       /* If this type contains non-trivial constructors, then it is
90110d565efSmrg 	 forbidden for the middle-end to create any new copies.  */
90210d565efSmrg       if (TREE_ADDRESSABLE (type))
90310d565efSmrg 	return true;
90410d565efSmrg 
90510d565efSmrg       /* GCC post 3.4 passes *all* variable sized types by reference.  */
906*ec02198aSmrg       if (!TYPE_SIZE (type) || !poly_int_tree_p (TYPE_SIZE (type)))
90710d565efSmrg 	return true;
90810d565efSmrg 
90910d565efSmrg       /* If a record type should be passed the same as its first (and only)
91010d565efSmrg 	 member, use the type and mode of that member.  */
91110d565efSmrg       if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
91210d565efSmrg 	{
913*ec02198aSmrg 	  arg.type = TREE_TYPE (first_field (type));
914*ec02198aSmrg 	  arg.mode = TYPE_MODE (arg.type);
91510d565efSmrg 	}
91610d565efSmrg     }
91710d565efSmrg 
918*ec02198aSmrg   return targetm.calls.pass_by_reference (pack_cumulative_args (ca), arg);
91910d565efSmrg }
92010d565efSmrg 
921*ec02198aSmrg /* Return true if TYPE should be passed by reference when passed to
922*ec02198aSmrg    the "..." arguments of a function.  */
923*ec02198aSmrg 
924*ec02198aSmrg bool
pass_va_arg_by_reference(tree type)925*ec02198aSmrg pass_va_arg_by_reference (tree type)
926*ec02198aSmrg {
927*ec02198aSmrg   return pass_by_reference (NULL, function_arg_info (type, /*named=*/false));
928*ec02198aSmrg }
929*ec02198aSmrg 
930*ec02198aSmrg /* Decide whether ARG, which occurs in the state described by CA,
931*ec02198aSmrg    should be passed by reference.  Return true if so and update
932*ec02198aSmrg    ARG accordingly.  */
933*ec02198aSmrg 
934*ec02198aSmrg bool
apply_pass_by_reference_rules(CUMULATIVE_ARGS * ca,function_arg_info & arg)935*ec02198aSmrg apply_pass_by_reference_rules (CUMULATIVE_ARGS *ca, function_arg_info &arg)
936*ec02198aSmrg {
937*ec02198aSmrg   if (pass_by_reference (ca, arg))
938*ec02198aSmrg     {
939*ec02198aSmrg       arg.type = build_pointer_type (arg.type);
940*ec02198aSmrg       arg.mode = TYPE_MODE (arg.type);
941*ec02198aSmrg       arg.pass_by_reference = true;
942*ec02198aSmrg       return true;
943*ec02198aSmrg     }
944*ec02198aSmrg   return false;
945*ec02198aSmrg }
946*ec02198aSmrg 
947*ec02198aSmrg /* Return true if ARG, which is passed by reference, should be callee
94810d565efSmrg    copied instead of caller copied.  */
94910d565efSmrg 
95010d565efSmrg bool
reference_callee_copied(CUMULATIVE_ARGS * ca,const function_arg_info & arg)951*ec02198aSmrg reference_callee_copied (CUMULATIVE_ARGS *ca, const function_arg_info &arg)
95210d565efSmrg {
953*ec02198aSmrg   if (arg.type && TREE_ADDRESSABLE (arg.type))
95410d565efSmrg     return false;
955*ec02198aSmrg   return targetm.calls.callee_copies (pack_cumulative_args (ca), arg);
95610d565efSmrg }
95710d565efSmrg 
95810d565efSmrg 
95910d565efSmrg /* Precompute all register parameters as described by ARGS, storing values
96010d565efSmrg    into fields within the ARGS array.
96110d565efSmrg 
96210d565efSmrg    NUM_ACTUALS indicates the total number elements in the ARGS array.
96310d565efSmrg 
96410d565efSmrg    Set REG_PARM_SEEN if we encounter a register parameter.  */
96510d565efSmrg 
96610d565efSmrg static void
precompute_register_parameters(int num_actuals,struct arg_data * args,int * reg_parm_seen)96710d565efSmrg precompute_register_parameters (int num_actuals, struct arg_data *args,
96810d565efSmrg 				int *reg_parm_seen)
96910d565efSmrg {
97010d565efSmrg   int i;
97110d565efSmrg 
97210d565efSmrg   *reg_parm_seen = 0;
97310d565efSmrg 
97410d565efSmrg   for (i = 0; i < num_actuals; i++)
97510d565efSmrg     if (args[i].reg != 0 && ! args[i].pass_on_stack)
97610d565efSmrg       {
97710d565efSmrg 	*reg_parm_seen = 1;
97810d565efSmrg 
97910d565efSmrg 	if (args[i].value == 0)
98010d565efSmrg 	  {
98110d565efSmrg 	    push_temp_slots ();
98210d565efSmrg 	    args[i].value = expand_normal (args[i].tree_value);
98310d565efSmrg 	    preserve_temp_slots (args[i].value);
98410d565efSmrg 	    pop_temp_slots ();
98510d565efSmrg 	  }
98610d565efSmrg 
98710d565efSmrg 	/* If we are to promote the function arg to a wider mode,
98810d565efSmrg 	   do it now.  */
98910d565efSmrg 
99010d565efSmrg 	if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value)))
99110d565efSmrg 	  args[i].value
99210d565efSmrg 	    = convert_modes (args[i].mode,
99310d565efSmrg 			     TYPE_MODE (TREE_TYPE (args[i].tree_value)),
99410d565efSmrg 			     args[i].value, args[i].unsignedp);
99510d565efSmrg 
99610d565efSmrg 	/* If the value is a non-legitimate constant, force it into a
99710d565efSmrg 	   pseudo now.  TLS symbols sometimes need a call to resolve.  */
99810d565efSmrg 	if (CONSTANT_P (args[i].value)
99910d565efSmrg 	    && !targetm.legitimate_constant_p (args[i].mode, args[i].value))
100010d565efSmrg 	  args[i].value = force_reg (args[i].mode, args[i].value);
100110d565efSmrg 
100210d565efSmrg 	/* If we're going to have to load the value by parts, pull the
100310d565efSmrg 	   parts into pseudos.  The part extraction process can involve
100410d565efSmrg 	   non-trivial computation.  */
100510d565efSmrg 	if (GET_CODE (args[i].reg) == PARALLEL)
100610d565efSmrg 	  {
100710d565efSmrg 	    tree type = TREE_TYPE (args[i].tree_value);
100810d565efSmrg 	    args[i].parallel_value
100910d565efSmrg 	      = emit_group_load_into_temps (args[i].reg, args[i].value,
101010d565efSmrg 					    type, int_size_in_bytes (type));
101110d565efSmrg 	  }
101210d565efSmrg 
101310d565efSmrg 	/* If the value is expensive, and we are inside an appropriately
101410d565efSmrg 	   short loop, put the value into a pseudo and then put the pseudo
101510d565efSmrg 	   into the hard reg.
101610d565efSmrg 
101710d565efSmrg 	   For small register classes, also do this if this call uses
101810d565efSmrg 	   register parameters.  This is to avoid reload conflicts while
101910d565efSmrg 	   loading the parameters registers.  */
102010d565efSmrg 
102110d565efSmrg 	else if ((! (REG_P (args[i].value)
102210d565efSmrg 		     || (GET_CODE (args[i].value) == SUBREG
102310d565efSmrg 			 && REG_P (SUBREG_REG (args[i].value)))))
102410d565efSmrg 		 && args[i].mode != BLKmode
102510d565efSmrg 		 && (set_src_cost (args[i].value, args[i].mode,
102610d565efSmrg 				   optimize_insn_for_speed_p ())
102710d565efSmrg 		     > COSTS_N_INSNS (1))
102810d565efSmrg 		 && ((*reg_parm_seen
102910d565efSmrg 		      && targetm.small_register_classes_for_mode_p (args[i].mode))
103010d565efSmrg 		     || optimize))
103110d565efSmrg 	  args[i].value = copy_to_mode_reg (args[i].mode, args[i].value);
103210d565efSmrg       }
103310d565efSmrg }
103410d565efSmrg 
103510d565efSmrg #ifdef REG_PARM_STACK_SPACE
103610d565efSmrg 
103710d565efSmrg   /* The argument list is the property of the called routine and it
103810d565efSmrg      may clobber it.  If the fixed area has been used for previous
103910d565efSmrg      parameters, we must save and restore it.  */
104010d565efSmrg 
104110d565efSmrg static rtx
save_fixed_argument_area(int reg_parm_stack_space,rtx argblock,int * low_to_save,int * high_to_save)104210d565efSmrg save_fixed_argument_area (int reg_parm_stack_space, rtx argblock, int *low_to_save, int *high_to_save)
104310d565efSmrg {
1044c7a68eb7Smrg   unsigned int low;
1045c7a68eb7Smrg   unsigned int high;
104610d565efSmrg 
104710d565efSmrg   /* Compute the boundary of the area that needs to be saved, if any.  */
104810d565efSmrg   high = reg_parm_stack_space;
104910d565efSmrg   if (ARGS_GROW_DOWNWARD)
105010d565efSmrg     high += 1;
105110d565efSmrg 
105210d565efSmrg   if (high > highest_outgoing_arg_in_use)
105310d565efSmrg     high = highest_outgoing_arg_in_use;
105410d565efSmrg 
105510d565efSmrg   for (low = 0; low < high; low++)
1056c7a68eb7Smrg     if (stack_usage_map[low] != 0 || low >= stack_usage_watermark)
105710d565efSmrg       {
105810d565efSmrg 	int num_to_save;
105910d565efSmrg 	machine_mode save_mode;
106010d565efSmrg 	int delta;
106110d565efSmrg 	rtx addr;
106210d565efSmrg 	rtx stack_area;
106310d565efSmrg 	rtx save_area;
106410d565efSmrg 
106510d565efSmrg 	while (stack_usage_map[--high] == 0)
106610d565efSmrg 	  ;
106710d565efSmrg 
106810d565efSmrg 	*low_to_save = low;
106910d565efSmrg 	*high_to_save = high;
107010d565efSmrg 
107110d565efSmrg 	num_to_save = high - low + 1;
107210d565efSmrg 
107310d565efSmrg 	/* If we don't have the required alignment, must do this
107410d565efSmrg 	   in BLKmode.  */
1075c7a68eb7Smrg 	scalar_int_mode imode;
1076c7a68eb7Smrg 	if (int_mode_for_size (num_to_save * BITS_PER_UNIT, 1).exists (&imode)
1077c7a68eb7Smrg 	    && (low & (MIN (GET_MODE_SIZE (imode),
1078c7a68eb7Smrg 			    BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1)) == 0)
1079c7a68eb7Smrg 	  save_mode = imode;
1080c7a68eb7Smrg 	else
108110d565efSmrg 	  save_mode = BLKmode;
108210d565efSmrg 
108310d565efSmrg 	if (ARGS_GROW_DOWNWARD)
108410d565efSmrg 	  delta = -high;
108510d565efSmrg 	else
108610d565efSmrg 	  delta = low;
108710d565efSmrg 
108810d565efSmrg 	addr = plus_constant (Pmode, argblock, delta);
108910d565efSmrg 	stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
109010d565efSmrg 
109110d565efSmrg 	set_mem_align (stack_area, PARM_BOUNDARY);
109210d565efSmrg 	if (save_mode == BLKmode)
109310d565efSmrg 	  {
109410d565efSmrg 	    save_area = assign_stack_temp (BLKmode, num_to_save);
109510d565efSmrg 	    emit_block_move (validize_mem (save_area), stack_area,
109610d565efSmrg 			     GEN_INT (num_to_save), BLOCK_OP_CALL_PARM);
109710d565efSmrg 	  }
109810d565efSmrg 	else
109910d565efSmrg 	  {
110010d565efSmrg 	    save_area = gen_reg_rtx (save_mode);
110110d565efSmrg 	    emit_move_insn (save_area, stack_area);
110210d565efSmrg 	  }
110310d565efSmrg 
110410d565efSmrg 	return save_area;
110510d565efSmrg       }
110610d565efSmrg 
110710d565efSmrg   return NULL_RTX;
110810d565efSmrg }
110910d565efSmrg 
111010d565efSmrg static void
restore_fixed_argument_area(rtx save_area,rtx argblock,int high_to_save,int low_to_save)111110d565efSmrg restore_fixed_argument_area (rtx save_area, rtx argblock, int high_to_save, int low_to_save)
111210d565efSmrg {
111310d565efSmrg   machine_mode save_mode = GET_MODE (save_area);
111410d565efSmrg   int delta;
111510d565efSmrg   rtx addr, stack_area;
111610d565efSmrg 
111710d565efSmrg   if (ARGS_GROW_DOWNWARD)
111810d565efSmrg     delta = -high_to_save;
111910d565efSmrg   else
112010d565efSmrg     delta = low_to_save;
112110d565efSmrg 
112210d565efSmrg   addr = plus_constant (Pmode, argblock, delta);
112310d565efSmrg   stack_area = gen_rtx_MEM (save_mode, memory_address (save_mode, addr));
112410d565efSmrg   set_mem_align (stack_area, PARM_BOUNDARY);
112510d565efSmrg 
112610d565efSmrg   if (save_mode != BLKmode)
112710d565efSmrg     emit_move_insn (stack_area, save_area);
112810d565efSmrg   else
112910d565efSmrg     emit_block_move (stack_area, validize_mem (save_area),
113010d565efSmrg 		     GEN_INT (high_to_save - low_to_save + 1),
113110d565efSmrg 		     BLOCK_OP_CALL_PARM);
113210d565efSmrg }
113310d565efSmrg #endif /* REG_PARM_STACK_SPACE */
113410d565efSmrg 
113510d565efSmrg /* If any elements in ARGS refer to parameters that are to be passed in
113610d565efSmrg    registers, but not in memory, and whose alignment does not permit a
113710d565efSmrg    direct copy into registers.  Copy the values into a group of pseudos
113810d565efSmrg    which we will later copy into the appropriate hard registers.
113910d565efSmrg 
114010d565efSmrg    Pseudos for each unaligned argument will be stored into the array
114110d565efSmrg    args[argnum].aligned_regs.  The caller is responsible for deallocating
114210d565efSmrg    the aligned_regs array if it is nonzero.  */
114310d565efSmrg 
114410d565efSmrg static void
store_unaligned_arguments_into_pseudos(struct arg_data * args,int num_actuals)114510d565efSmrg store_unaligned_arguments_into_pseudos (struct arg_data *args, int num_actuals)
114610d565efSmrg {
114710d565efSmrg   int i, j;
114810d565efSmrg 
114910d565efSmrg   for (i = 0; i < num_actuals; i++)
115010d565efSmrg     if (args[i].reg != 0 && ! args[i].pass_on_stack
115110d565efSmrg 	&& GET_CODE (args[i].reg) != PARALLEL
115210d565efSmrg 	&& args[i].mode == BLKmode
115310d565efSmrg 	&& MEM_P (args[i].value)
115410d565efSmrg 	&& (MEM_ALIGN (args[i].value)
115510d565efSmrg 	    < (unsigned int) MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD)))
115610d565efSmrg       {
115710d565efSmrg 	int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value));
115810d565efSmrg 	int endian_correction = 0;
115910d565efSmrg 
116010d565efSmrg 	if (args[i].partial)
116110d565efSmrg 	  {
116210d565efSmrg 	    gcc_assert (args[i].partial % UNITS_PER_WORD == 0);
116310d565efSmrg 	    args[i].n_aligned_regs = args[i].partial / UNITS_PER_WORD;
116410d565efSmrg 	  }
116510d565efSmrg 	else
116610d565efSmrg 	  {
116710d565efSmrg 	    args[i].n_aligned_regs
116810d565efSmrg 	      = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
116910d565efSmrg 	  }
117010d565efSmrg 
117110d565efSmrg 	args[i].aligned_regs = XNEWVEC (rtx, args[i].n_aligned_regs);
117210d565efSmrg 
117310d565efSmrg 	/* Structures smaller than a word are normally aligned to the
117410d565efSmrg 	   least significant byte.  On a BYTES_BIG_ENDIAN machine,
117510d565efSmrg 	   this means we must skip the empty high order bytes when
117610d565efSmrg 	   calculating the bit offset.  */
117710d565efSmrg 	if (bytes < UNITS_PER_WORD
117810d565efSmrg #ifdef BLOCK_REG_PADDING
117910d565efSmrg 	    && (BLOCK_REG_PADDING (args[i].mode,
118010d565efSmrg 				   TREE_TYPE (args[i].tree_value), 1)
1181c7a68eb7Smrg 		== PAD_DOWNWARD)
118210d565efSmrg #else
118310d565efSmrg 	    && BYTES_BIG_ENDIAN
118410d565efSmrg #endif
118510d565efSmrg 	    )
118610d565efSmrg 	  endian_correction = BITS_PER_WORD - bytes * BITS_PER_UNIT;
118710d565efSmrg 
118810d565efSmrg 	for (j = 0; j < args[i].n_aligned_regs; j++)
118910d565efSmrg 	  {
119010d565efSmrg 	    rtx reg = gen_reg_rtx (word_mode);
119110d565efSmrg 	    rtx word = operand_subword_force (args[i].value, j, BLKmode);
119210d565efSmrg 	    int bitsize = MIN (bytes * BITS_PER_UNIT, BITS_PER_WORD);
119310d565efSmrg 
119410d565efSmrg 	    args[i].aligned_regs[j] = reg;
119510d565efSmrg 	    word = extract_bit_field (word, bitsize, 0, 1, NULL_RTX,
1196c7a68eb7Smrg 				      word_mode, word_mode, false, NULL);
119710d565efSmrg 
119810d565efSmrg 	    /* There is no need to restrict this code to loading items
119910d565efSmrg 	       in TYPE_ALIGN sized hunks.  The bitfield instructions can
120010d565efSmrg 	       load up entire word sized registers efficiently.
120110d565efSmrg 
120210d565efSmrg 	       ??? This may not be needed anymore.
120310d565efSmrg 	       We use to emit a clobber here but that doesn't let later
120410d565efSmrg 	       passes optimize the instructions we emit.  By storing 0 into
120510d565efSmrg 	       the register later passes know the first AND to zero out the
120610d565efSmrg 	       bitfield being set in the register is unnecessary.  The store
120710d565efSmrg 	       of 0 will be deleted as will at least the first AND.  */
120810d565efSmrg 
120910d565efSmrg 	    emit_move_insn (reg, const0_rtx);
121010d565efSmrg 
121110d565efSmrg 	    bytes -= bitsize / BITS_PER_UNIT;
121210d565efSmrg 	    store_bit_field (reg, bitsize, endian_correction, 0, 0,
121310d565efSmrg 			     word_mode, word, false);
121410d565efSmrg 	  }
121510d565efSmrg       }
121610d565efSmrg }
121710d565efSmrg 
121810d565efSmrg /* The limit set by -Walloc-larger-than=.  */
121910d565efSmrg static GTY(()) tree alloc_object_size_limit;
122010d565efSmrg 
122110d565efSmrg /* Initialize ALLOC_OBJECT_SIZE_LIMIT based on the -Walloc-size-larger-than=
122210d565efSmrg    setting if the option is specified, or to the maximum object size if it
122310d565efSmrg    is not.  Return the initialized value.  */
122410d565efSmrg 
122510d565efSmrg static tree
alloc_max_size(void)122610d565efSmrg alloc_max_size (void)
122710d565efSmrg {
122810d565efSmrg   if (alloc_object_size_limit)
122910d565efSmrg     return alloc_object_size_limit;
123010d565efSmrg 
12310fc04c29Smrg   HOST_WIDE_INT limit = warn_alloc_size_limit;
12320fc04c29Smrg   if (limit == HOST_WIDE_INT_MAX)
12330fc04c29Smrg     limit = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
123410d565efSmrg 
12350fc04c29Smrg   alloc_object_size_limit = build_int_cst (size_type_node, limit);
1236c7a68eb7Smrg 
123710d565efSmrg   return alloc_object_size_limit;
123810d565efSmrg }
123910d565efSmrg 
124010d565efSmrg /* Return true when EXP's range can be determined and set RANGE[] to it
1241c7a68eb7Smrg    after adjusting it if necessary to make EXP a represents a valid size
1242c7a68eb7Smrg    of object, or a valid size argument to an allocation function declared
1243c7a68eb7Smrg    with attribute alloc_size (whose argument may be signed), or to a string
1244c7a68eb7Smrg    manipulation function like memset.  When ALLOW_ZERO is true, allow
1245c7a68eb7Smrg    returning a range of [0, 0] for a size in an anti-range [1, N] where
1246c7a68eb7Smrg    N > PTRDIFF_MAX.  A zero range is a (nearly) invalid argument to
1247c7a68eb7Smrg    allocation functions like malloc but it is a valid argument to
1248c7a68eb7Smrg    functions like memset.  */
124910d565efSmrg 
125010d565efSmrg bool
get_size_range(tree exp,tree range[2],bool allow_zero)1251c7a68eb7Smrg get_size_range (tree exp, tree range[2], bool allow_zero /* = false */)
125210d565efSmrg {
1253*ec02198aSmrg   if (!exp)
1254*ec02198aSmrg     return false;
1255*ec02198aSmrg 
125610d565efSmrg   if (tree_fits_uhwi_p (exp))
125710d565efSmrg     {
125810d565efSmrg       /* EXP is a constant.  */
125910d565efSmrg       range[0] = range[1] = exp;
126010d565efSmrg       return true;
126110d565efSmrg     }
126210d565efSmrg 
1263c7a68eb7Smrg   tree exptype = TREE_TYPE (exp);
1264c7a68eb7Smrg   bool integral = INTEGRAL_TYPE_P (exptype);
1265c7a68eb7Smrg 
126610d565efSmrg   wide_int min, max;
12670fc04c29Smrg   enum value_range_kind range_type;
1268c7a68eb7Smrg 
12690fc04c29Smrg   if (integral)
12700fc04c29Smrg     range_type = determine_value_range (exp, &min, &max);
1271c7a68eb7Smrg   else
1272c7a68eb7Smrg     range_type = VR_VARYING;
127310d565efSmrg 
127410d565efSmrg   if (range_type == VR_VARYING)
127510d565efSmrg     {
1276c7a68eb7Smrg       if (integral)
1277c7a68eb7Smrg 	{
1278c7a68eb7Smrg 	  /* Use the full range of the type of the expression when
1279c7a68eb7Smrg 	     no value range information is available.  */
1280c7a68eb7Smrg 	  range[0] = TYPE_MIN_VALUE (exptype);
1281c7a68eb7Smrg 	  range[1] = TYPE_MAX_VALUE (exptype);
1282c7a68eb7Smrg 	  return true;
1283c7a68eb7Smrg 	}
1284c7a68eb7Smrg 
128510d565efSmrg       range[0] = NULL_TREE;
128610d565efSmrg       range[1] = NULL_TREE;
128710d565efSmrg       return false;
128810d565efSmrg     }
128910d565efSmrg 
129010d565efSmrg   unsigned expprec = TYPE_PRECISION (exptype);
129110d565efSmrg 
129210d565efSmrg   bool signed_p = !TYPE_UNSIGNED (exptype);
129310d565efSmrg 
129410d565efSmrg   if (range_type == VR_ANTI_RANGE)
129510d565efSmrg     {
129610d565efSmrg       if (signed_p)
129710d565efSmrg 	{
1298c7a68eb7Smrg 	  if (wi::les_p (max, 0))
129910d565efSmrg 	    {
130010d565efSmrg 	      /* EXP is not in a strictly negative range.  That means
130110d565efSmrg 		 it must be in some (not necessarily strictly) positive
130210d565efSmrg 		 range which includes zero.  Since in signed to unsigned
130310d565efSmrg 		 conversions negative values end up converted to large
130410d565efSmrg 		 positive values, and otherwise they are not valid sizes,
130510d565efSmrg 		 the resulting range is in both cases [0, TYPE_MAX].  */
1306c7a68eb7Smrg 	      min = wi::zero (expprec);
1307c7a68eb7Smrg 	      max = wi::to_wide (TYPE_MAX_VALUE (exptype));
130810d565efSmrg 	    }
1309c7a68eb7Smrg 	  else if (wi::les_p (min - 1, 0))
131010d565efSmrg 	    {
131110d565efSmrg 	      /* EXP is not in a negative-positive range.  That means EXP
131210d565efSmrg 		 is either negative, or greater than max.  Since negative
131310d565efSmrg 		 sizes are invalid make the range [MAX + 1, TYPE_MAX].  */
131410d565efSmrg 	      min = max + 1;
1315c7a68eb7Smrg 	      max = wi::to_wide (TYPE_MAX_VALUE (exptype));
131610d565efSmrg 	    }
131710d565efSmrg 	  else
131810d565efSmrg 	    {
131910d565efSmrg 	      max = min - 1;
1320c7a68eb7Smrg 	      min = wi::zero (expprec);
132110d565efSmrg 	    }
132210d565efSmrg 	}
1323c7a68eb7Smrg       else if (wi::eq_p (0, min - 1))
132410d565efSmrg 	{
132510d565efSmrg 	  /* EXP is unsigned and not in the range [1, MAX].  That means
132610d565efSmrg 	     it's either zero or greater than MAX.  Even though 0 would
1327c7a68eb7Smrg 	     normally be detected by -Walloc-zero, unless ALLOW_ZERO
1328c7a68eb7Smrg 	     is true, set the range to [MAX, TYPE_MAX] so that when MAX
1329c7a68eb7Smrg 	     is greater than the limit the whole range is diagnosed.  */
1330c7a68eb7Smrg 	  if (allow_zero)
1331c7a68eb7Smrg 	    min = max = wi::zero (expprec);
1332c7a68eb7Smrg 	  else
1333c7a68eb7Smrg 	    {
133410d565efSmrg 	      min = max + 1;
1335c7a68eb7Smrg 	      max = wi::to_wide (TYPE_MAX_VALUE (exptype));
1336c7a68eb7Smrg 	    }
133710d565efSmrg 	}
133810d565efSmrg       else
133910d565efSmrg 	{
134010d565efSmrg 	  max = min - 1;
1341c7a68eb7Smrg 	  min = wi::zero (expprec);
134210d565efSmrg 	}
134310d565efSmrg     }
134410d565efSmrg 
134510d565efSmrg   range[0] = wide_int_to_tree (exptype, min);
134610d565efSmrg   range[1] = wide_int_to_tree (exptype, max);
134710d565efSmrg 
134810d565efSmrg   return true;
134910d565efSmrg }
135010d565efSmrg 
135110d565efSmrg /* Diagnose a call EXP to function FN decorated with attribute alloc_size
135210d565efSmrg    whose argument numbers given by IDX with values given by ARGS exceed
135310d565efSmrg    the maximum object size or cause an unsigned oveflow (wrapping) when
13540fc04c29Smrg    multiplied.  FN is null when EXP is a call via a function pointer.
13550fc04c29Smrg    When ARGS[0] is null the function does nothing.  ARGS[1] may be null
13560fc04c29Smrg    for functions like malloc, and non-null for those like calloc that
13570fc04c29Smrg    are decorated with a two-argument attribute alloc_size.  */
135810d565efSmrg 
135910d565efSmrg void
maybe_warn_alloc_args_overflow(tree fn,tree exp,tree args[2],int idx[2])136010d565efSmrg maybe_warn_alloc_args_overflow (tree fn, tree exp, tree args[2], int idx[2])
136110d565efSmrg {
136210d565efSmrg   /* The range each of the (up to) two arguments is known to be in.  */
136310d565efSmrg   tree argrange[2][2] = { { NULL_TREE, NULL_TREE }, { NULL_TREE, NULL_TREE } };
136410d565efSmrg 
136510d565efSmrg   /* Maximum object size set by -Walloc-size-larger-than= or SIZE_MAX / 2.  */
136610d565efSmrg   tree maxobjsize = alloc_max_size ();
136710d565efSmrg 
136810d565efSmrg   location_t loc = EXPR_LOCATION (exp);
136910d565efSmrg 
13700fc04c29Smrg   tree fntype = fn ? TREE_TYPE (fn) : TREE_TYPE (TREE_TYPE (exp));
137110d565efSmrg   bool warned = false;
137210d565efSmrg 
137310d565efSmrg   /* Validate each argument individually.  */
137410d565efSmrg   for (unsigned i = 0; i != 2 && args[i]; ++i)
137510d565efSmrg     {
137610d565efSmrg       if (TREE_CODE (args[i]) == INTEGER_CST)
137710d565efSmrg 	{
137810d565efSmrg 	  argrange[i][0] = args[i];
137910d565efSmrg 	  argrange[i][1] = args[i];
138010d565efSmrg 
138110d565efSmrg 	  if (tree_int_cst_lt (args[i], integer_zero_node))
138210d565efSmrg 	    {
138310d565efSmrg 	      warned = warning_at (loc, OPT_Walloc_size_larger_than_,
138410d565efSmrg 				   "%Kargument %i value %qE is negative",
138510d565efSmrg 				   exp, idx[i] + 1, args[i]);
138610d565efSmrg 	    }
138710d565efSmrg 	  else if (integer_zerop (args[i]))
138810d565efSmrg 	    {
138910d565efSmrg 	      /* Avoid issuing -Walloc-zero for allocation functions other
139010d565efSmrg 		 than __builtin_alloca that are declared with attribute
139110d565efSmrg 		 returns_nonnull because there's no portability risk.  This
139210d565efSmrg 		 avoids warning for such calls to libiberty's xmalloc and
139310d565efSmrg 		 friends.
139410d565efSmrg 		 Also avoid issuing the warning for calls to function named
139510d565efSmrg 		 "alloca".  */
1396*ec02198aSmrg 	      if (fn && fndecl_built_in_p (fn, BUILT_IN_ALLOCA)
1397*ec02198aSmrg 		  ? IDENTIFIER_LENGTH (DECL_NAME (fn)) != 6
1398*ec02198aSmrg 		  : !lookup_attribute ("returns_nonnull",
1399*ec02198aSmrg 				       TYPE_ATTRIBUTES (fntype)))
140010d565efSmrg 		warned = warning_at (loc, OPT_Walloc_zero,
140110d565efSmrg 				     "%Kargument %i value is zero",
140210d565efSmrg 				     exp, idx[i] + 1);
140310d565efSmrg 	    }
140410d565efSmrg 	  else if (tree_int_cst_lt (maxobjsize, args[i]))
140510d565efSmrg 	    {
140610d565efSmrg 	      /* G++ emits calls to ::operator new[](SIZE_MAX) in C++98
140710d565efSmrg 		 mode and with -fno-exceptions as a way to indicate array
140810d565efSmrg 		 size overflow.  There's no good way to detect C++98 here
140910d565efSmrg 		 so avoid diagnosing these calls for all C++ modes.  */
141010d565efSmrg 	      if (i == 0
14110fc04c29Smrg 		  && fn
141210d565efSmrg 		  && !args[1]
141310d565efSmrg 		  && lang_GNU_CXX ()
1414*ec02198aSmrg 		  && DECL_IS_OPERATOR_NEW_P (fn)
141510d565efSmrg 		  && integer_all_onesp (args[i]))
141610d565efSmrg 		continue;
141710d565efSmrg 
141810d565efSmrg 	      warned = warning_at (loc, OPT_Walloc_size_larger_than_,
141910d565efSmrg 				   "%Kargument %i value %qE exceeds "
142010d565efSmrg 				   "maximum object size %E",
142110d565efSmrg 				   exp, idx[i] + 1, args[i], maxobjsize);
142210d565efSmrg 	    }
142310d565efSmrg 	}
142410d565efSmrg       else if (TREE_CODE (args[i]) == SSA_NAME
142510d565efSmrg 	       && get_size_range (args[i], argrange[i]))
142610d565efSmrg 	{
142710d565efSmrg 	  /* Verify that the argument's range is not negative (including
142810d565efSmrg 	     upper bound of zero).  */
142910d565efSmrg 	  if (tree_int_cst_lt (argrange[i][0], integer_zero_node)
143010d565efSmrg 	      && tree_int_cst_le (argrange[i][1], integer_zero_node))
143110d565efSmrg 	    {
143210d565efSmrg 	      warned = warning_at (loc, OPT_Walloc_size_larger_than_,
143310d565efSmrg 				   "%Kargument %i range [%E, %E] is negative",
143410d565efSmrg 				   exp, idx[i] + 1,
143510d565efSmrg 				   argrange[i][0], argrange[i][1]);
143610d565efSmrg 	    }
143710d565efSmrg 	  else if (tree_int_cst_lt (maxobjsize, argrange[i][0]))
143810d565efSmrg 	    {
143910d565efSmrg 	      warned = warning_at (loc, OPT_Walloc_size_larger_than_,
144010d565efSmrg 				   "%Kargument %i range [%E, %E] exceeds "
144110d565efSmrg 				   "maximum object size %E",
144210d565efSmrg 				   exp, idx[i] + 1,
144310d565efSmrg 				   argrange[i][0], argrange[i][1],
144410d565efSmrg 				   maxobjsize);
144510d565efSmrg 	    }
144610d565efSmrg 	}
144710d565efSmrg     }
144810d565efSmrg 
144910d565efSmrg   if (!argrange[0])
145010d565efSmrg     return;
145110d565efSmrg 
145210d565efSmrg   /* For a two-argument alloc_size, validate the product of the two
145310d565efSmrg      arguments if both of their values or ranges are known.  */
145410d565efSmrg   if (!warned && tree_fits_uhwi_p (argrange[0][0])
145510d565efSmrg       && argrange[1][0] && tree_fits_uhwi_p (argrange[1][0])
145610d565efSmrg       && !integer_onep (argrange[0][0])
145710d565efSmrg       && !integer_onep (argrange[1][0]))
145810d565efSmrg     {
145910d565efSmrg       /* Check for overflow in the product of a function decorated with
146010d565efSmrg 	 attribute alloc_size (X, Y).  */
146110d565efSmrg       unsigned szprec = TYPE_PRECISION (size_type_node);
146210d565efSmrg       wide_int x = wi::to_wide (argrange[0][0], szprec);
146310d565efSmrg       wide_int y = wi::to_wide (argrange[1][0], szprec);
146410d565efSmrg 
14650fc04c29Smrg       wi::overflow_type vflow;
146610d565efSmrg       wide_int prod = wi::umul (x, y, &vflow);
146710d565efSmrg 
146810d565efSmrg       if (vflow)
146910d565efSmrg 	warned = warning_at (loc, OPT_Walloc_size_larger_than_,
147010d565efSmrg 			     "%Kproduct %<%E * %E%> of arguments %i and %i "
147110d565efSmrg 			     "exceeds %<SIZE_MAX%>",
147210d565efSmrg 			     exp, argrange[0][0], argrange[1][0],
147310d565efSmrg 			     idx[0] + 1, idx[1] + 1);
147410d565efSmrg       else if (wi::ltu_p (wi::to_wide (maxobjsize, szprec), prod))
147510d565efSmrg 	warned = warning_at (loc, OPT_Walloc_size_larger_than_,
147610d565efSmrg 			     "%Kproduct %<%E * %E%> of arguments %i and %i "
147710d565efSmrg 			     "exceeds maximum object size %E",
147810d565efSmrg 			     exp, argrange[0][0], argrange[1][0],
147910d565efSmrg 			     idx[0] + 1, idx[1] + 1,
148010d565efSmrg 			     maxobjsize);
148110d565efSmrg 
148210d565efSmrg       if (warned)
148310d565efSmrg 	{
148410d565efSmrg 	  /* Print the full range of each of the two arguments to make
148510d565efSmrg 	     it clear when it is, in fact, in a range and not constant.  */
148610d565efSmrg 	  if (argrange[0][0] != argrange [0][1])
148710d565efSmrg 	    inform (loc, "argument %i in the range [%E, %E]",
148810d565efSmrg 		    idx[0] + 1, argrange[0][0], argrange[0][1]);
148910d565efSmrg 	  if (argrange[1][0] != argrange [1][1])
149010d565efSmrg 	    inform (loc, "argument %i in the range [%E, %E]",
149110d565efSmrg 		    idx[1] + 1, argrange[1][0], argrange[1][1]);
149210d565efSmrg 	}
149310d565efSmrg     }
149410d565efSmrg 
14950fc04c29Smrg   if (warned && fn)
149610d565efSmrg     {
149710d565efSmrg       location_t fnloc = DECL_SOURCE_LOCATION (fn);
149810d565efSmrg 
149910d565efSmrg       if (DECL_IS_BUILTIN (fn))
150010d565efSmrg 	inform (loc,
150110d565efSmrg 		"in a call to built-in allocation function %qD", fn);
150210d565efSmrg       else
150310d565efSmrg 	inform (fnloc,
150410d565efSmrg 		"in a call to allocation function %qD declared here", fn);
150510d565efSmrg     }
150610d565efSmrg }
150710d565efSmrg 
1508c7a68eb7Smrg /* If EXPR refers to a character array or pointer declared attribute
1509c7a68eb7Smrg    nonstring return a decl for that array or pointer and set *REF to
1510c7a68eb7Smrg    the referenced enclosing object or pointer.  Otherwise returns
1511c7a68eb7Smrg    null.  */
1512c7a68eb7Smrg 
1513c7a68eb7Smrg tree
get_attr_nonstring_decl(tree expr,tree * ref)1514c7a68eb7Smrg get_attr_nonstring_decl (tree expr, tree *ref)
1515c7a68eb7Smrg {
1516c7a68eb7Smrg   tree decl = expr;
15170fc04c29Smrg   tree var = NULL_TREE;
1518c7a68eb7Smrg   if (TREE_CODE (decl) == SSA_NAME)
1519c7a68eb7Smrg     {
1520c7a68eb7Smrg       gimple *def = SSA_NAME_DEF_STMT (decl);
1521c7a68eb7Smrg 
1522c7a68eb7Smrg       if (is_gimple_assign (def))
1523c7a68eb7Smrg 	{
1524c7a68eb7Smrg 	  tree_code code = gimple_assign_rhs_code (def);
1525c7a68eb7Smrg 	  if (code == ADDR_EXPR
1526c7a68eb7Smrg 	      || code == COMPONENT_REF
1527c7a68eb7Smrg 	      || code == VAR_DECL)
1528c7a68eb7Smrg 	    decl = gimple_assign_rhs1 (def);
1529c7a68eb7Smrg 	}
15300fc04c29Smrg       else
15310fc04c29Smrg 	var = SSA_NAME_VAR (decl);
1532c7a68eb7Smrg     }
1533c7a68eb7Smrg 
1534c7a68eb7Smrg   if (TREE_CODE (decl) == ADDR_EXPR)
1535c7a68eb7Smrg     decl = TREE_OPERAND (decl, 0);
1536c7a68eb7Smrg 
15370fc04c29Smrg   /* To simplify calling code, store the referenced DECL regardless of
15380fc04c29Smrg      the attribute determined below, but avoid storing the SSA_NAME_VAR
15390fc04c29Smrg      obtained above (it's not useful for dataflow purposes).  */
1540c7a68eb7Smrg   if (ref)
1541c7a68eb7Smrg     *ref = decl;
1542c7a68eb7Smrg 
15430fc04c29Smrg   /* Use the SSA_NAME_VAR that was determined above to see if it's
15440fc04c29Smrg      declared nonstring.  Otherwise drill down into the referenced
15450fc04c29Smrg      DECL.  */
15460fc04c29Smrg   if (var)
15470fc04c29Smrg     decl = var;
15480fc04c29Smrg   else if (TREE_CODE (decl) == ARRAY_REF)
1549c7a68eb7Smrg     decl = TREE_OPERAND (decl, 0);
1550c7a68eb7Smrg   else if (TREE_CODE (decl) == COMPONENT_REF)
1551c7a68eb7Smrg     decl = TREE_OPERAND (decl, 1);
1552c7a68eb7Smrg   else if (TREE_CODE (decl) == MEM_REF)
1553c7a68eb7Smrg     return get_attr_nonstring_decl (TREE_OPERAND (decl, 0), ref);
1554c7a68eb7Smrg 
1555c7a68eb7Smrg   if (DECL_P (decl)
1556c7a68eb7Smrg       && lookup_attribute ("nonstring", DECL_ATTRIBUTES (decl)))
1557c7a68eb7Smrg     return decl;
1558c7a68eb7Smrg 
1559c7a68eb7Smrg   return NULL_TREE;
1560c7a68eb7Smrg }
1561c7a68eb7Smrg 
1562c7a68eb7Smrg /* Warn about passing a non-string array/pointer to a function that
1563c7a68eb7Smrg    expects a nul-terminated string argument.  */
1564c7a68eb7Smrg 
1565c7a68eb7Smrg void
maybe_warn_nonstring_arg(tree fndecl,tree exp)1566c7a68eb7Smrg maybe_warn_nonstring_arg (tree fndecl, tree exp)
1567c7a68eb7Smrg {
15680fc04c29Smrg   if (!fndecl || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
1569c7a68eb7Smrg     return;
1570c7a68eb7Smrg 
15710fc04c29Smrg   if (TREE_NO_WARNING (exp) || !warn_stringop_overflow)
1572c7a68eb7Smrg     return;
1573c7a68eb7Smrg 
15740fc04c29Smrg   /* Avoid clearly invalid calls (more checking done below).  */
1575c7a68eb7Smrg   unsigned nargs = call_expr_nargs (exp);
15760fc04c29Smrg   if (!nargs)
15770fc04c29Smrg     return;
1578c7a68eb7Smrg 
1579c7a68eb7Smrg   /* The bound argument to a bounded string function like strncpy.  */
1580c7a68eb7Smrg   tree bound = NULL_TREE;
1581c7a68eb7Smrg 
15820fc04c29Smrg   /* The longest known or possible string argument to one of the comparison
15830fc04c29Smrg      functions.  If the length is less than the bound it is used instead.
15840fc04c29Smrg      Since the length is only used for warning and not for code generation
15850fc04c29Smrg      disable strict mode in the calls to get_range_strlen below.  */
15860fc04c29Smrg   tree maxlen = NULL_TREE;
1587c7a68eb7Smrg 
1588c7a68eb7Smrg   /* It's safe to call "bounded" string functions with a non-string
1589c7a68eb7Smrg      argument since the functions provide an explicit bound for this
1590c7a68eb7Smrg      purpose.  The exception is strncat where the bound may refer to
1591c7a68eb7Smrg      either the destination or the source.  */
1592c7a68eb7Smrg   int fncode = DECL_FUNCTION_CODE (fndecl);
1593c7a68eb7Smrg   switch (fncode)
1594c7a68eb7Smrg     {
1595c7a68eb7Smrg     case BUILT_IN_STRCMP:
1596c7a68eb7Smrg     case BUILT_IN_STRNCMP:
1597c7a68eb7Smrg     case BUILT_IN_STRNCASECMP:
1598c7a68eb7Smrg       {
1599c7a68eb7Smrg 	/* For these, if one argument refers to one or more of a set
1600c7a68eb7Smrg 	   of string constants or arrays of known size, determine
1601c7a68eb7Smrg 	   the range of their known or possible lengths and use it
1602c7a68eb7Smrg 	   conservatively as the bound for the unbounded function,
1603c7a68eb7Smrg 	   and to adjust the range of the bound of the bounded ones.  */
1604c7a68eb7Smrg 	for (unsigned argno = 0;
16050fc04c29Smrg 	     argno < MIN (nargs, 2)
16060fc04c29Smrg 	       && !(maxlen && TREE_CODE (maxlen) == INTEGER_CST); argno++)
1607c7a68eb7Smrg 	  {
1608c7a68eb7Smrg 	    tree arg = CALL_EXPR_ARG (exp, argno);
1609c7a68eb7Smrg 	    if (!get_attr_nonstring_decl (arg))
16100fc04c29Smrg 	      {
16110fc04c29Smrg 		c_strlen_data lendata = { };
1612*ec02198aSmrg 		/* Set MAXBOUND to an arbitrary non-null non-integer
1613*ec02198aSmrg 		   node as a request to have it set to the length of
1614*ec02198aSmrg 		   the longest string in a PHI.  */
1615*ec02198aSmrg 		lendata.maxbound = arg;
16160fc04c29Smrg 		get_range_strlen (arg, &lendata, /* eltsize = */ 1);
16170fc04c29Smrg 		maxlen = lendata.maxbound;
16180fc04c29Smrg 	      }
1619c7a68eb7Smrg 	  }
1620c7a68eb7Smrg       }
1621c7a68eb7Smrg       /* Fall through.  */
1622c7a68eb7Smrg 
1623c7a68eb7Smrg     case BUILT_IN_STRNCAT:
1624c7a68eb7Smrg     case BUILT_IN_STPNCPY:
1625c7a68eb7Smrg     case BUILT_IN_STRNCPY:
16260fc04c29Smrg       if (nargs > 2)
16270fc04c29Smrg 	bound = CALL_EXPR_ARG (exp, 2);
1628c7a68eb7Smrg       break;
1629c7a68eb7Smrg 
1630c7a68eb7Smrg     case BUILT_IN_STRNDUP:
16310fc04c29Smrg       if (nargs > 1)
16320fc04c29Smrg 	bound = CALL_EXPR_ARG (exp, 1);
16330fc04c29Smrg       break;
16340fc04c29Smrg 
16350fc04c29Smrg     case BUILT_IN_STRNLEN:
1636c7a68eb7Smrg       {
16370fc04c29Smrg 	tree arg = CALL_EXPR_ARG (exp, 0);
16380fc04c29Smrg 	if (!get_attr_nonstring_decl (arg))
16390fc04c29Smrg 	  {
16400fc04c29Smrg 	    c_strlen_data lendata = { };
1641*ec02198aSmrg 	    /* Set MAXBOUND to an arbitrary non-null non-integer
1642*ec02198aSmrg 	       node as a request to have it set to the length of
1643*ec02198aSmrg 	       the longest string in a PHI.  */
1644*ec02198aSmrg 	    lendata.maxbound = arg;
16450fc04c29Smrg 	    get_range_strlen (arg, &lendata, /* eltsize = */ 1);
16460fc04c29Smrg 	    maxlen = lendata.maxbound;
16470fc04c29Smrg 	  }
16480fc04c29Smrg 	if (nargs > 1)
16490fc04c29Smrg 	  bound = CALL_EXPR_ARG (exp, 1);
1650c7a68eb7Smrg 	break;
1651c7a68eb7Smrg       }
1652c7a68eb7Smrg 
1653c7a68eb7Smrg     default:
1654c7a68eb7Smrg       break;
1655c7a68eb7Smrg     }
1656c7a68eb7Smrg 
1657c7a68eb7Smrg   /* Determine the range of the bound argument (if specified).  */
1658c7a68eb7Smrg   tree bndrng[2] = { NULL_TREE, NULL_TREE };
1659c7a68eb7Smrg   if (bound)
16600fc04c29Smrg     {
16610fc04c29Smrg       STRIP_NOPS (bound);
1662c7a68eb7Smrg       get_size_range (bound, bndrng);
16630fc04c29Smrg     }
1664c7a68eb7Smrg 
16650fc04c29Smrg   location_t loc = EXPR_LOCATION (exp);
16660fc04c29Smrg 
16670fc04c29Smrg   if (bndrng[0])
16680fc04c29Smrg     {
16690fc04c29Smrg       /* Diagnose excessive bound prior the adjustment below and
16700fc04c29Smrg 	 regardless of attribute nonstring.  */
16710fc04c29Smrg       tree maxobjsize = max_object_size ();
16720fc04c29Smrg       if (tree_int_cst_lt (maxobjsize, bndrng[0]))
16730fc04c29Smrg 	{
16740fc04c29Smrg 	  if (tree_int_cst_equal (bndrng[0], bndrng[1]))
16750fc04c29Smrg 	    warning_at (loc, OPT_Wstringop_overflow_,
16760fc04c29Smrg 			"%K%qD specified bound %E "
16770fc04c29Smrg 			"exceeds maximum object size %E",
16780fc04c29Smrg 			exp, fndecl, bndrng[0], maxobjsize);
16790fc04c29Smrg 	  else
16800fc04c29Smrg 	    warning_at (loc, OPT_Wstringop_overflow_,
16810fc04c29Smrg 			"%K%qD specified bound [%E, %E] "
16820fc04c29Smrg 			"exceeds maximum object size %E",
16830fc04c29Smrg 			exp, fndecl, bndrng[0], bndrng[1], maxobjsize);
16840fc04c29Smrg 	  return;
16850fc04c29Smrg 	}
16860fc04c29Smrg     }
16870fc04c29Smrg 
16880fc04c29Smrg   if (maxlen && !integer_all_onesp (maxlen))
1689c7a68eb7Smrg     {
1690c7a68eb7Smrg       /* Add one for the nul.  */
16910fc04c29Smrg       maxlen = const_binop (PLUS_EXPR, TREE_TYPE (maxlen), maxlen,
16920fc04c29Smrg 			    size_one_node);
1693c7a68eb7Smrg 
1694c7a68eb7Smrg       if (!bndrng[0])
1695c7a68eb7Smrg 	{
1696c7a68eb7Smrg 	  /* Conservatively use the upper bound of the lengths for
1697c7a68eb7Smrg 	     both the lower and the upper bound of the operation.  */
16980fc04c29Smrg 	  bndrng[0] = maxlen;
16990fc04c29Smrg 	  bndrng[1] = maxlen;
1700c7a68eb7Smrg 	  bound = void_type_node;
1701c7a68eb7Smrg 	}
17020fc04c29Smrg       else if (maxlen)
1703c7a68eb7Smrg 	{
17040fc04c29Smrg 	  /* Replace the bound on the operation with the upper bound
1705c7a68eb7Smrg 	     of the length of the string if the latter is smaller.  */
17060fc04c29Smrg 	  if (tree_int_cst_lt (maxlen, bndrng[0]))
17070fc04c29Smrg 	    bndrng[0] = maxlen;
17080fc04c29Smrg 	  else if (tree_int_cst_lt (maxlen, bndrng[1]))
17090fc04c29Smrg 	    bndrng[1] = maxlen;
1710c7a68eb7Smrg 	}
1711c7a68eb7Smrg     }
1712c7a68eb7Smrg 
1713c7a68eb7Smrg   /* Iterate over the built-in function's formal arguments and check
1714c7a68eb7Smrg      each const char* against the actual argument.  If the actual
1715c7a68eb7Smrg      argument is declared attribute non-string issue a warning unless
1716c7a68eb7Smrg      the argument's maximum length is bounded.  */
1717c7a68eb7Smrg   function_args_iterator it;
1718c7a68eb7Smrg   function_args_iter_init (&it, TREE_TYPE (fndecl));
1719c7a68eb7Smrg 
1720c7a68eb7Smrg   for (unsigned argno = 0; ; ++argno, function_args_iter_next (&it))
1721c7a68eb7Smrg     {
1722c7a68eb7Smrg       /* Avoid iterating past the declared argument in a call
1723c7a68eb7Smrg 	 to function declared without a prototype.  */
1724c7a68eb7Smrg       if (argno >= nargs)
1725c7a68eb7Smrg 	break;
1726c7a68eb7Smrg 
1727c7a68eb7Smrg       tree argtype = function_args_iter_cond (&it);
1728c7a68eb7Smrg       if (!argtype)
1729c7a68eb7Smrg 	break;
1730c7a68eb7Smrg 
1731c7a68eb7Smrg       if (TREE_CODE (argtype) != POINTER_TYPE)
1732c7a68eb7Smrg 	continue;
1733c7a68eb7Smrg 
1734c7a68eb7Smrg       argtype = TREE_TYPE (argtype);
1735c7a68eb7Smrg 
1736c7a68eb7Smrg       if (TREE_CODE (argtype) != INTEGER_TYPE
1737c7a68eb7Smrg 	  || !TYPE_READONLY (argtype))
1738c7a68eb7Smrg 	continue;
1739c7a68eb7Smrg 
1740c7a68eb7Smrg       argtype = TYPE_MAIN_VARIANT (argtype);
1741c7a68eb7Smrg       if (argtype != char_type_node)
1742c7a68eb7Smrg 	continue;
1743c7a68eb7Smrg 
1744c7a68eb7Smrg       tree callarg = CALL_EXPR_ARG (exp, argno);
1745c7a68eb7Smrg       if (TREE_CODE (callarg) == ADDR_EXPR)
1746c7a68eb7Smrg 	callarg = TREE_OPERAND (callarg, 0);
1747c7a68eb7Smrg 
1748c7a68eb7Smrg       /* See if the destination is declared with attribute "nonstring".  */
1749c7a68eb7Smrg       tree decl = get_attr_nonstring_decl (callarg);
1750c7a68eb7Smrg       if (!decl)
1751c7a68eb7Smrg 	continue;
1752c7a68eb7Smrg 
1753c7a68eb7Smrg       /* The maximum number of array elements accessed.  */
1754c7a68eb7Smrg       offset_int wibnd = 0;
1755c7a68eb7Smrg 
1756c7a68eb7Smrg       if (argno && fncode == BUILT_IN_STRNCAT)
1757c7a68eb7Smrg 	{
1758c7a68eb7Smrg 	  /* See if the bound in strncat is derived from the length
1759c7a68eb7Smrg 	     of the strlen of the destination (as it's expected to be).
1760c7a68eb7Smrg 	     If so, reset BOUND and FNCODE to trigger a warning.  */
1761c7a68eb7Smrg 	  tree dstarg = CALL_EXPR_ARG (exp, 0);
1762c7a68eb7Smrg 	  if (is_strlen_related_p (dstarg, bound))
1763c7a68eb7Smrg 	    {
1764c7a68eb7Smrg 	      /* The bound applies to the destination, not to the source,
1765c7a68eb7Smrg 		 so reset these to trigger a warning without mentioning
1766c7a68eb7Smrg 		 the bound.  */
1767c7a68eb7Smrg 	      bound = NULL;
1768c7a68eb7Smrg 	      fncode = 0;
1769c7a68eb7Smrg 	    }
1770c7a68eb7Smrg 	  else if (bndrng[1])
1771c7a68eb7Smrg 	    /* Use the upper bound of the range for strncat.  */
1772c7a68eb7Smrg 	    wibnd = wi::to_offset (bndrng[1]);
1773c7a68eb7Smrg 	}
1774c7a68eb7Smrg       else if (bndrng[0])
1775c7a68eb7Smrg 	/* Use the lower bound of the range for functions other than
1776c7a68eb7Smrg 	   strncat.  */
1777c7a68eb7Smrg 	wibnd = wi::to_offset (bndrng[0]);
1778c7a68eb7Smrg 
1779c7a68eb7Smrg       /* Determine the size of the argument array if it is one.  */
1780c7a68eb7Smrg       offset_int asize = wibnd;
1781c7a68eb7Smrg       bool known_size = false;
1782c7a68eb7Smrg       tree type = TREE_TYPE (decl);
1783c7a68eb7Smrg 
1784c7a68eb7Smrg       /* Determine the array size.  For arrays of unknown bound and
1785c7a68eb7Smrg 	 pointers reset BOUND to trigger the appropriate warning.  */
1786c7a68eb7Smrg       if (TREE_CODE (type) == ARRAY_TYPE)
1787c7a68eb7Smrg 	{
1788c7a68eb7Smrg 	  if (tree arrbnd = TYPE_DOMAIN (type))
1789c7a68eb7Smrg 	    {
1790c7a68eb7Smrg 	      if ((arrbnd = TYPE_MAX_VALUE (arrbnd)))
1791c7a68eb7Smrg 		{
1792c7a68eb7Smrg 		  asize = wi::to_offset (arrbnd) + 1;
1793c7a68eb7Smrg 		  known_size = true;
1794c7a68eb7Smrg 		}
1795c7a68eb7Smrg 	    }
1796c7a68eb7Smrg 	  else if (bound == void_type_node)
1797c7a68eb7Smrg 	    bound = NULL_TREE;
1798c7a68eb7Smrg 	}
1799c7a68eb7Smrg       else if (bound == void_type_node)
1800c7a68eb7Smrg 	bound = NULL_TREE;
1801c7a68eb7Smrg 
1802c7a68eb7Smrg       /* In a call to strncat with a bound in a range whose lower but
1803c7a68eb7Smrg 	 not upper bound is less than the array size, reset ASIZE to
1804c7a68eb7Smrg 	 be the same as the bound and the other variable to trigger
1805c7a68eb7Smrg 	 the apprpriate warning below.  */
1806c7a68eb7Smrg       if (fncode == BUILT_IN_STRNCAT
1807c7a68eb7Smrg 	  && bndrng[0] != bndrng[1]
1808c7a68eb7Smrg 	  && wi::ltu_p (wi::to_offset (bndrng[0]), asize)
1809c7a68eb7Smrg 	  && (!known_size
1810c7a68eb7Smrg 	      || wi::ltu_p (asize, wibnd)))
1811c7a68eb7Smrg 	{
1812c7a68eb7Smrg 	  asize = wibnd;
1813c7a68eb7Smrg 	  bound = NULL_TREE;
1814c7a68eb7Smrg 	  fncode = 0;
1815c7a68eb7Smrg 	}
1816c7a68eb7Smrg 
1817c7a68eb7Smrg       bool warned = false;
1818c7a68eb7Smrg 
18190fc04c29Smrg       auto_diagnostic_group d;
1820c7a68eb7Smrg       if (wi::ltu_p (asize, wibnd))
1821c7a68eb7Smrg 	{
1822c7a68eb7Smrg 	  if (bndrng[0] == bndrng[1])
1823c7a68eb7Smrg 	    warned = warning_at (loc, OPT_Wstringop_overflow_,
1824c7a68eb7Smrg 				 "%qD argument %i declared attribute "
1825c7a68eb7Smrg 				 "%<nonstring%> is smaller than the specified "
1826c7a68eb7Smrg 				 "bound %wu",
1827c7a68eb7Smrg 				 fndecl, argno + 1, wibnd.to_uhwi ());
1828c7a68eb7Smrg 	  else if (wi::ltu_p (asize, wi::to_offset (bndrng[0])))
1829c7a68eb7Smrg 	    warned = warning_at (loc, OPT_Wstringop_overflow_,
1830c7a68eb7Smrg 				 "%qD argument %i declared attribute "
1831c7a68eb7Smrg 				 "%<nonstring%> is smaller than "
1832c7a68eb7Smrg 				 "the specified bound [%E, %E]",
1833c7a68eb7Smrg 				 fndecl, argno + 1, bndrng[0], bndrng[1]);
1834c7a68eb7Smrg 	  else
1835c7a68eb7Smrg 	    warned = warning_at (loc, OPT_Wstringop_overflow_,
1836c7a68eb7Smrg 				 "%qD argument %i declared attribute "
1837c7a68eb7Smrg 				 "%<nonstring%> may be smaller than "
1838c7a68eb7Smrg 				 "the specified bound [%E, %E]",
1839c7a68eb7Smrg 				 fndecl, argno + 1, bndrng[0], bndrng[1]);
1840c7a68eb7Smrg 	}
1841c7a68eb7Smrg       else if (fncode == BUILT_IN_STRNCAT)
1842c7a68eb7Smrg 	; /* Avoid warning for calls to strncat() when the bound
1843c7a68eb7Smrg 	     is equal to the size of the non-string argument.  */
1844c7a68eb7Smrg       else if (!bound)
1845c7a68eb7Smrg 	warned = warning_at (loc, OPT_Wstringop_overflow_,
1846c7a68eb7Smrg 			     "%qD argument %i declared attribute %<nonstring%>",
1847c7a68eb7Smrg 			     fndecl, argno + 1);
1848c7a68eb7Smrg 
1849c7a68eb7Smrg       if (warned)
1850c7a68eb7Smrg 	inform (DECL_SOURCE_LOCATION (decl),
1851c7a68eb7Smrg 		"argument %qD declared here", decl);
1852c7a68eb7Smrg     }
1853c7a68eb7Smrg }
1854c7a68eb7Smrg 
185510d565efSmrg /* Issue an error if CALL_EXPR was flagged as requiring
185610d565efSmrg    tall-call optimization.  */
185710d565efSmrg 
185810d565efSmrg static void
maybe_complain_about_tail_call(tree call_expr,const char * reason)185910d565efSmrg maybe_complain_about_tail_call (tree call_expr, const char *reason)
186010d565efSmrg {
186110d565efSmrg   gcc_assert (TREE_CODE (call_expr) == CALL_EXPR);
186210d565efSmrg   if (!CALL_EXPR_MUST_TAIL_CALL (call_expr))
186310d565efSmrg     return;
186410d565efSmrg 
186510d565efSmrg   error_at (EXPR_LOCATION (call_expr), "cannot tail-call: %s", reason);
186610d565efSmrg }
186710d565efSmrg 
1868*ec02198aSmrg /* Used to define rdwr_map below.  */
1869*ec02198aSmrg struct rdwr_access_hash: int_hash<int, -1> { };
1870*ec02198aSmrg 
1871*ec02198aSmrg /* A mapping between argument number corresponding to attribute access
1872*ec02198aSmrg    mode (read_only, write_only, or read_write) and operands.  */
1873*ec02198aSmrg typedef hash_map<rdwr_access_hash, attr_access> rdwr_map;
1874*ec02198aSmrg 
1875*ec02198aSmrg /* Initialize a mapping for a call to function FNDECL declared with
1876*ec02198aSmrg    attribute access.  Each attribute positional operand inserts one
1877*ec02198aSmrg    entry into the mapping with the operand number as the key.  */
1878*ec02198aSmrg 
1879*ec02198aSmrg static void
init_attr_rdwr_indices(rdwr_map * rwm,tree fntype)1880*ec02198aSmrg init_attr_rdwr_indices (rdwr_map *rwm, tree fntype)
1881*ec02198aSmrg {
1882*ec02198aSmrg   if (!fntype)
1883*ec02198aSmrg     return;
1884*ec02198aSmrg 
1885*ec02198aSmrg   for (tree access = TYPE_ATTRIBUTES (fntype);
1886*ec02198aSmrg        (access = lookup_attribute ("access", access));
1887*ec02198aSmrg        access = TREE_CHAIN (access))
1888*ec02198aSmrg     {
1889*ec02198aSmrg       /* The TREE_VALUE of an attribute is a TREE_LIST whose TREE_VALUE
1890*ec02198aSmrg 	 is the attribute argument's value.  */
1891*ec02198aSmrg       tree mode = TREE_VALUE (access);
1892*ec02198aSmrg       gcc_assert (TREE_CODE (mode) == TREE_LIST);
1893*ec02198aSmrg       mode = TREE_VALUE (mode);
1894*ec02198aSmrg       gcc_assert (TREE_CODE (mode) == STRING_CST);
1895*ec02198aSmrg 
1896*ec02198aSmrg       const char *modestr = TREE_STRING_POINTER (mode);
1897*ec02198aSmrg       for (const char *m = modestr; *m; )
1898*ec02198aSmrg 	{
1899*ec02198aSmrg 	  attr_access acc = { };
1900*ec02198aSmrg 
1901*ec02198aSmrg 	  switch (*m)
1902*ec02198aSmrg 	    {
1903*ec02198aSmrg 	    case 'r': acc.mode = acc.read_only; break;
1904*ec02198aSmrg 	    case 'w': acc.mode = acc.write_only; break;
1905*ec02198aSmrg 	    default: acc.mode = acc.read_write; break;
1906*ec02198aSmrg 	    }
1907*ec02198aSmrg 
1908*ec02198aSmrg 	  char *end;
1909*ec02198aSmrg 	  acc.ptrarg = strtoul (++m, &end, 10);
1910*ec02198aSmrg 	  m = end;
1911*ec02198aSmrg 	  if (*m == ',')
1912*ec02198aSmrg 	    {
1913*ec02198aSmrg 	      acc.sizarg = strtoul (++m, &end, 10);
1914*ec02198aSmrg 	      m = end;
1915*ec02198aSmrg 	    }
1916*ec02198aSmrg 	  else
1917*ec02198aSmrg 	    acc.sizarg = UINT_MAX;
1918*ec02198aSmrg 
1919*ec02198aSmrg 	  acc.ptr = NULL_TREE;
1920*ec02198aSmrg 	  acc.size = NULL_TREE;
1921*ec02198aSmrg 
1922*ec02198aSmrg 	  /* Unconditionally add an entry for the required pointer
1923*ec02198aSmrg 	     operand of the attribute, and one for the optional size
1924*ec02198aSmrg 	     operand when it's specified.  */
1925*ec02198aSmrg 	  rwm->put (acc.ptrarg, acc);
1926*ec02198aSmrg 	  if (acc.sizarg != UINT_MAX)
1927*ec02198aSmrg 	    rwm->put (acc.sizarg, acc);
1928*ec02198aSmrg 	}
1929*ec02198aSmrg     }
1930*ec02198aSmrg }
1931*ec02198aSmrg 
1932*ec02198aSmrg /* Returns the type of the argument ARGNO to function with type FNTYPE
1933*ec02198aSmrg    or null when the typoe cannot be determined or no such argument exists.  */
1934*ec02198aSmrg 
1935*ec02198aSmrg static tree
fntype_argno_type(tree fntype,unsigned argno)1936*ec02198aSmrg fntype_argno_type (tree fntype, unsigned argno)
1937*ec02198aSmrg {
1938*ec02198aSmrg   if (!prototype_p (fntype))
1939*ec02198aSmrg     return NULL_TREE;
1940*ec02198aSmrg 
1941*ec02198aSmrg   tree argtype;
1942*ec02198aSmrg   function_args_iterator it;
1943*ec02198aSmrg   FOREACH_FUNCTION_ARGS (fntype, argtype, it)
1944*ec02198aSmrg     if (argno-- == 0)
1945*ec02198aSmrg       return argtype;
1946*ec02198aSmrg 
1947*ec02198aSmrg   return NULL_TREE;
1948*ec02198aSmrg }
1949*ec02198aSmrg 
1950*ec02198aSmrg /* Helper to append the "rdwr" attribute specification described
1951*ec02198aSmrg    by ACCESS to the array ATTRSTR with size STRSIZE.  Used in
1952*ec02198aSmrg    diagnostics.  */
1953*ec02198aSmrg 
1954*ec02198aSmrg static inline void
append_attrname(const std::pair<int,attr_access> & access,char * attrstr,size_t strsize)1955*ec02198aSmrg append_attrname (const std::pair<int, attr_access> &access,
1956*ec02198aSmrg 		 char *attrstr, size_t strsize)
1957*ec02198aSmrg {
1958*ec02198aSmrg   /* Append the relevant attribute to the string.  This (deliberately)
1959*ec02198aSmrg      appends the attribute pointer operand even when none was specified.  */
1960*ec02198aSmrg   size_t len = strlen (attrstr);
1961*ec02198aSmrg 
1962*ec02198aSmrg   const char *atname
1963*ec02198aSmrg     = (access.second.mode == attr_access::read_only
1964*ec02198aSmrg        ? "read_only"
1965*ec02198aSmrg        : (access.second.mode == attr_access::write_only
1966*ec02198aSmrg 	  ? "write_only" : "read_write"));
1967*ec02198aSmrg 
1968*ec02198aSmrg   const char *sep = len ? ", " : "";
1969*ec02198aSmrg 
1970*ec02198aSmrg   if (access.second.sizarg == UINT_MAX)
1971*ec02198aSmrg     snprintf (attrstr + len, strsize - len,
1972*ec02198aSmrg 	      "%s%s (%i)", sep, atname,
1973*ec02198aSmrg 	      access.second.ptrarg + 1);
1974*ec02198aSmrg   else
1975*ec02198aSmrg     snprintf (attrstr + len, strsize - len,
1976*ec02198aSmrg 	      "%s%s (%i, %i)", sep, atname,
1977*ec02198aSmrg 	      access.second.ptrarg + 1, access.second.sizarg + 1);
1978*ec02198aSmrg }
1979*ec02198aSmrg 
1980*ec02198aSmrg /* Iterate over attribute access read-only, read-write, and write-only
1981*ec02198aSmrg    arguments and diagnose past-the-end accesses and related problems
1982*ec02198aSmrg    in the function call EXP.  */
1983*ec02198aSmrg 
1984*ec02198aSmrg static void
maybe_warn_rdwr_sizes(rdwr_map * rwm,tree fndecl,tree fntype,tree exp)1985*ec02198aSmrg maybe_warn_rdwr_sizes (rdwr_map *rwm, tree fndecl, tree fntype, tree exp)
1986*ec02198aSmrg {
1987*ec02198aSmrg   /* A string describing the attributes that the warnings issued by this
1988*ec02198aSmrg      function apply to.  Used to print one informational note per function
1989*ec02198aSmrg      call, rather than one per warning.  That reduces clutter.  */
1990*ec02198aSmrg   char attrstr[80];
1991*ec02198aSmrg   attrstr[0] = 0;
1992*ec02198aSmrg 
1993*ec02198aSmrg   for (rdwr_map::iterator it = rwm->begin (); it != rwm->end (); ++it)
1994*ec02198aSmrg     {
1995*ec02198aSmrg       std::pair<int, attr_access> access = *it;
1996*ec02198aSmrg 
1997*ec02198aSmrg       /* Get the function call arguments corresponding to the attribute's
1998*ec02198aSmrg 	 positional arguments.  When both arguments have been specified
1999*ec02198aSmrg 	 there will be two entries in *RWM, one for each.  They are
2000*ec02198aSmrg 	 cross-referenced by their respective argument numbers in
2001*ec02198aSmrg 	 ACCESS.PTRARG and ACCESS.SIZARG.  */
2002*ec02198aSmrg       const int ptridx = access.second.ptrarg;
2003*ec02198aSmrg       const int sizidx = access.second.sizarg;
2004*ec02198aSmrg 
2005*ec02198aSmrg       gcc_assert (ptridx != -1);
2006*ec02198aSmrg       gcc_assert (access.first == ptridx || access.first == sizidx);
2007*ec02198aSmrg 
2008*ec02198aSmrg       /* The pointer is set to null for the entry corresponding to
2009*ec02198aSmrg 	 the size argument.  Skip it.  It's handled when the entry
2010*ec02198aSmrg 	 corresponding to the pointer argument comes up.  */
2011*ec02198aSmrg       if (!access.second.ptr)
2012*ec02198aSmrg 	continue;
2013*ec02198aSmrg 
2014*ec02198aSmrg       tree argtype = fntype_argno_type (fntype, ptridx);
2015*ec02198aSmrg       argtype = TREE_TYPE (argtype);
2016*ec02198aSmrg 
2017*ec02198aSmrg       tree size;
2018*ec02198aSmrg       if (sizidx == -1)
2019*ec02198aSmrg 	{
2020*ec02198aSmrg 	  /* If only the pointer attribute operand was specified
2021*ec02198aSmrg 	     and not size, set SIZE to the size of one element of
2022*ec02198aSmrg 	     the pointed to type to detect smaller objects (null
2023*ec02198aSmrg 	     pointers are diagnosed in this case only if
2024*ec02198aSmrg 	     the pointer is also declared with attribute nonnull.  */
2025*ec02198aSmrg 	  size = size_one_node;
2026*ec02198aSmrg 	}
2027*ec02198aSmrg       else
2028*ec02198aSmrg 	size = rwm->get (sizidx)->size;
2029*ec02198aSmrg 
2030*ec02198aSmrg       tree ptr = access.second.ptr;
2031*ec02198aSmrg       tree sizrng[2] = { size_zero_node, build_all_ones_cst (sizetype) };
2032*ec02198aSmrg       if (get_size_range (size, sizrng, true)
2033*ec02198aSmrg 	  && tree_int_cst_sgn (sizrng[0]) < 0
2034*ec02198aSmrg 	  && tree_int_cst_sgn (sizrng[1]) < 0)
2035*ec02198aSmrg 	{
2036*ec02198aSmrg 	  /* Warn about negative sizes.  */
2037*ec02198aSmrg 	  bool warned = false;
2038*ec02198aSmrg 	  location_t loc = EXPR_LOCATION (exp);
2039*ec02198aSmrg 	  if (tree_int_cst_equal (sizrng[0], sizrng[1]))
2040*ec02198aSmrg 	    warned = warning_at (loc, OPT_Wstringop_overflow_,
2041*ec02198aSmrg 				 "%Kargument %i value %E is negative",
2042*ec02198aSmrg 				 exp, sizidx + 1, size);
2043*ec02198aSmrg 	  else
2044*ec02198aSmrg 	    warned = warning_at (loc, OPT_Wstringop_overflow_,
2045*ec02198aSmrg 				 "%Kargument %i range [%E, %E] is negative",
2046*ec02198aSmrg 				 exp, sizidx + 1, sizrng[0], sizrng[1]);
2047*ec02198aSmrg 	  if (warned)
2048*ec02198aSmrg 	    {
2049*ec02198aSmrg 	      append_attrname (access, attrstr, sizeof attrstr);
2050*ec02198aSmrg 	      /* Avoid warning again for the same attribute.  */
2051*ec02198aSmrg 	      continue;
2052*ec02198aSmrg 	    }
2053*ec02198aSmrg 	}
2054*ec02198aSmrg 
2055*ec02198aSmrg       if (tree_int_cst_sgn (sizrng[0]) >= 0)
2056*ec02198aSmrg 	{
2057*ec02198aSmrg 	  if (COMPLETE_TYPE_P (argtype))
2058*ec02198aSmrg 	    {
2059*ec02198aSmrg 	      /* Multiple SIZE by the size of the type the pointer
2060*ec02198aSmrg 		 argument points to.  If it's incomplete the size
2061*ec02198aSmrg 		 is used as is.  */
2062*ec02198aSmrg 	      size = NULL_TREE;
2063*ec02198aSmrg 	      if (tree argsize = TYPE_SIZE_UNIT (argtype))
2064*ec02198aSmrg 		if (TREE_CODE (argsize) == INTEGER_CST)
2065*ec02198aSmrg 		  {
2066*ec02198aSmrg 		    const int prec = TYPE_PRECISION (sizetype);
2067*ec02198aSmrg 		    wide_int minsize = wi::to_wide (sizrng[0], prec);
2068*ec02198aSmrg 		    minsize *= wi::to_wide (argsize, prec);
2069*ec02198aSmrg 		    size = wide_int_to_tree (sizetype, minsize);
2070*ec02198aSmrg 		  }
2071*ec02198aSmrg 	    }
2072*ec02198aSmrg 	}
2073*ec02198aSmrg       else
2074*ec02198aSmrg 	size = NULL_TREE;
2075*ec02198aSmrg 
2076*ec02198aSmrg       if (sizidx >= 0
2077*ec02198aSmrg 	  && integer_zerop (ptr)
2078*ec02198aSmrg 	  && tree_int_cst_sgn (sizrng[0]) > 0)
2079*ec02198aSmrg 	{
2080*ec02198aSmrg 	  /* Warn about null pointers with positive sizes.  This is
2081*ec02198aSmrg 	     different from also declaring the pointer argument with
2082*ec02198aSmrg 	     attribute nonnull when the function accepts null pointers
2083*ec02198aSmrg 	     only when the corresponding size is zero.  */
2084*ec02198aSmrg 	  bool warned = false;
2085*ec02198aSmrg 	  location_t loc = EXPR_LOCATION (exp);
2086*ec02198aSmrg 	  if (tree_int_cst_equal (sizrng[0], sizrng[1]))
2087*ec02198aSmrg 	    warned = warning_at (loc, OPT_Wnonnull,
2088*ec02198aSmrg 				 "%Kargument %i is null but the corresponding "
2089*ec02198aSmrg 				 "size argument %i value is %E",
2090*ec02198aSmrg 				 exp, ptridx + 1, sizidx + 1, size);
2091*ec02198aSmrg 	  else
2092*ec02198aSmrg 	    warned = warning_at (loc, OPT_Wnonnull,
2093*ec02198aSmrg 				 "%Kargument %i is null but the corresponding "
2094*ec02198aSmrg 				 "size argument %i range is [%E, %E]",
2095*ec02198aSmrg 				 exp, ptridx + 1, sizidx + 1,
2096*ec02198aSmrg 				 sizrng[0], sizrng[1]);
2097*ec02198aSmrg 	  if (warned)
2098*ec02198aSmrg 	    {
2099*ec02198aSmrg 	      append_attrname (access, attrstr, sizeof attrstr);
2100*ec02198aSmrg 	      /* Avoid warning again for the same attribute.  */
2101*ec02198aSmrg 	      continue;
2102*ec02198aSmrg 	    }
2103*ec02198aSmrg 	}
2104*ec02198aSmrg 
2105*ec02198aSmrg       tree objsize = compute_objsize (ptr, 0);
2106*ec02198aSmrg 
2107*ec02198aSmrg       tree srcsize;
2108*ec02198aSmrg       if (access.second.mode == attr_access::write_only)
2109*ec02198aSmrg 	{
2110*ec02198aSmrg 	  /* For a write-only argument there is no source.  */
2111*ec02198aSmrg 	  srcsize = NULL_TREE;
2112*ec02198aSmrg 	}
2113*ec02198aSmrg       else
2114*ec02198aSmrg 	{
2115*ec02198aSmrg 	  /* If the size cannot be determined clear it to keep it from
2116*ec02198aSmrg 	     being taken as real (and excessive).  */
2117*ec02198aSmrg 	  if (objsize && integer_all_onesp (objsize))
2118*ec02198aSmrg 	    objsize = NULL_TREE;
2119*ec02198aSmrg 
2120*ec02198aSmrg 	  /* For read-only and read-write attributes also set the source
2121*ec02198aSmrg 	     size.  */
2122*ec02198aSmrg 	  srcsize = objsize;
2123*ec02198aSmrg 	  if (access.second.mode == attr_access::read_only)
2124*ec02198aSmrg 	    {
2125*ec02198aSmrg 	      /* For a read-only attribute there is no destination so
2126*ec02198aSmrg 		 clear OBJSIZE.  This emits "reading N bytes" kind of
2127*ec02198aSmrg 		 diagnostics instead of the "writing N bytes" kind.  */
2128*ec02198aSmrg 	      objsize = NULL_TREE;
2129*ec02198aSmrg 	    }
2130*ec02198aSmrg 	}
2131*ec02198aSmrg 
2132*ec02198aSmrg       /* Clear the no-warning bit in case it was set in a prior
2133*ec02198aSmrg 	 iteration so that accesses via different arguments are
2134*ec02198aSmrg 	 diagnosed.  */
2135*ec02198aSmrg       TREE_NO_WARNING (exp) = false;
2136*ec02198aSmrg       check_access (exp, NULL_TREE, NULL_TREE, size, /*maxread=*/ NULL_TREE,
2137*ec02198aSmrg 		    srcsize, objsize);
2138*ec02198aSmrg 
2139*ec02198aSmrg       if (TREE_NO_WARNING (exp))
2140*ec02198aSmrg 	/* If check_access issued a warning above, append the relevant
2141*ec02198aSmrg 	   attribute to the string.  */
2142*ec02198aSmrg 	append_attrname (access, attrstr, sizeof attrstr);
2143*ec02198aSmrg     }
2144*ec02198aSmrg 
2145*ec02198aSmrg   if (!*attrstr)
2146*ec02198aSmrg     return;
2147*ec02198aSmrg 
2148*ec02198aSmrg   if (fndecl)
2149*ec02198aSmrg     inform (DECL_SOURCE_LOCATION (fndecl),
2150*ec02198aSmrg 	    "in a call to function %qD declared with attribute %qs",
2151*ec02198aSmrg 	    fndecl, attrstr);
2152*ec02198aSmrg   else
2153*ec02198aSmrg     inform (EXPR_LOCATION (fndecl),
2154*ec02198aSmrg 	    "in a call with type %qT and attribute %qs",
2155*ec02198aSmrg 	    fntype, attrstr);
2156*ec02198aSmrg 
2157*ec02198aSmrg   /* Set the bit in case if was cleared and not set above.  */
2158*ec02198aSmrg   TREE_NO_WARNING (exp) = true;
2159*ec02198aSmrg }
2160*ec02198aSmrg 
216110d565efSmrg /* Fill in ARGS_SIZE and ARGS array based on the parameters found in
216210d565efSmrg    CALL_EXPR EXP.
216310d565efSmrg 
216410d565efSmrg    NUM_ACTUALS is the total number of parameters.
216510d565efSmrg 
216610d565efSmrg    N_NAMED_ARGS is the total number of named arguments.
216710d565efSmrg 
216810d565efSmrg    STRUCT_VALUE_ADDR_VALUE is the implicit argument for a struct return
216910d565efSmrg    value, or null.
217010d565efSmrg 
217110d565efSmrg    FNDECL is the tree code for the target of this call (if known)
217210d565efSmrg 
217310d565efSmrg    ARGS_SO_FAR holds state needed by the target to know where to place
217410d565efSmrg    the next argument.
217510d565efSmrg 
217610d565efSmrg    REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
217710d565efSmrg    for arguments which are passed in registers.
217810d565efSmrg 
217910d565efSmrg    OLD_STACK_LEVEL is a pointer to an rtx which olds the old stack level
218010d565efSmrg    and may be modified by this routine.
218110d565efSmrg 
218210d565efSmrg    OLD_PENDING_ADJ, MUST_PREALLOCATE and FLAGS are pointers to integer
218310d565efSmrg    flags which may be modified by this routine.
218410d565efSmrg 
218510d565efSmrg    MAY_TAILCALL is cleared if we encounter an invisible pass-by-reference
218610d565efSmrg    that requires allocation of stack space.
218710d565efSmrg 
218810d565efSmrg    CALL_FROM_THUNK_P is true if this call is the jump from a thunk to
218910d565efSmrg    the thunked-to function.  */
219010d565efSmrg 
219110d565efSmrg static void
initialize_argument_information(int num_actuals ATTRIBUTE_UNUSED,struct arg_data * args,struct args_size * args_size,int n_named_args ATTRIBUTE_UNUSED,tree exp,tree struct_value_addr_value,tree fndecl,tree fntype,cumulative_args_t args_so_far,int reg_parm_stack_space,rtx * old_stack_level,poly_int64_pod * old_pending_adj,int * must_preallocate,int * ecf_flags,bool * may_tailcall,bool call_from_thunk_p)219210d565efSmrg initialize_argument_information (int num_actuals ATTRIBUTE_UNUSED,
219310d565efSmrg 				 struct arg_data *args,
219410d565efSmrg 				 struct args_size *args_size,
219510d565efSmrg 				 int n_named_args ATTRIBUTE_UNUSED,
219610d565efSmrg 				 tree exp, tree struct_value_addr_value,
219710d565efSmrg 				 tree fndecl, tree fntype,
219810d565efSmrg 				 cumulative_args_t args_so_far,
219910d565efSmrg 				 int reg_parm_stack_space,
2200c7a68eb7Smrg 				 rtx *old_stack_level,
2201c7a68eb7Smrg 				 poly_int64_pod *old_pending_adj,
220210d565efSmrg 				 int *must_preallocate, int *ecf_flags,
220310d565efSmrg 				 bool *may_tailcall, bool call_from_thunk_p)
220410d565efSmrg {
220510d565efSmrg   CUMULATIVE_ARGS *args_so_far_pnt = get_cumulative_args (args_so_far);
220610d565efSmrg   location_t loc = EXPR_LOCATION (exp);
220710d565efSmrg 
220810d565efSmrg   /* Count arg position in order args appear.  */
220910d565efSmrg   int argpos;
221010d565efSmrg 
221110d565efSmrg   int i;
221210d565efSmrg 
221310d565efSmrg   args_size->constant = 0;
221410d565efSmrg   args_size->var = 0;
221510d565efSmrg 
221610d565efSmrg   bitmap_obstack_initialize (NULL);
221710d565efSmrg 
221810d565efSmrg   /* In this loop, we consider args in the order they are written.
221910d565efSmrg      We fill up ARGS from the back.  */
222010d565efSmrg 
222110d565efSmrg   i = num_actuals - 1;
222210d565efSmrg   {
22230fc04c29Smrg     int j = i;
222410d565efSmrg     call_expr_arg_iterator iter;
222510d565efSmrg     tree arg;
222610d565efSmrg     bitmap slots = NULL;
222710d565efSmrg 
222810d565efSmrg     if (struct_value_addr_value)
222910d565efSmrg       {
223010d565efSmrg 	args[j].tree_value = struct_value_addr_value;
223110d565efSmrg 	j--;
223210d565efSmrg       }
223310d565efSmrg     argpos = 0;
223410d565efSmrg     FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
223510d565efSmrg       {
223610d565efSmrg 	tree argtype = TREE_TYPE (arg);
223710d565efSmrg 
223810d565efSmrg 	if (targetm.calls.split_complex_arg
223910d565efSmrg 	    && argtype
224010d565efSmrg 	    && TREE_CODE (argtype) == COMPLEX_TYPE
224110d565efSmrg 	    && targetm.calls.split_complex_arg (argtype))
224210d565efSmrg 	  {
224310d565efSmrg 	    tree subtype = TREE_TYPE (argtype);
224410d565efSmrg 	    args[j].tree_value = build1 (REALPART_EXPR, subtype, arg);
224510d565efSmrg 	    j--;
224610d565efSmrg 	    args[j].tree_value = build1 (IMAGPART_EXPR, subtype, arg);
224710d565efSmrg 	  }
224810d565efSmrg 	else
224910d565efSmrg 	  args[j].tree_value = arg;
225010d565efSmrg 	j--;
225110d565efSmrg 	argpos++;
225210d565efSmrg       }
225310d565efSmrg 
225410d565efSmrg     if (slots)
225510d565efSmrg       BITMAP_FREE (slots);
225610d565efSmrg   }
225710d565efSmrg 
225810d565efSmrg   bitmap_obstack_release (NULL);
225910d565efSmrg 
22600fc04c29Smrg   /* Extract attribute alloc_size from the type of the called expression
22610fc04c29Smrg      (which could be a function or a function pointer) and if set, store
22620fc04c29Smrg      the indices of the corresponding arguments in ALLOC_IDX, and then
22630fc04c29Smrg      the actual argument(s) at those indices in ALLOC_ARGS.  */
226410d565efSmrg   int alloc_idx[2] = { -1, -1 };
22650fc04c29Smrg   if (tree alloc_size = lookup_attribute ("alloc_size",
22660fc04c29Smrg 					  TYPE_ATTRIBUTES (fntype)))
226710d565efSmrg     {
226810d565efSmrg       tree args = TREE_VALUE (alloc_size);
226910d565efSmrg       alloc_idx[0] = TREE_INT_CST_LOW (TREE_VALUE (args)) - 1;
227010d565efSmrg       if (TREE_CHAIN (args))
227110d565efSmrg 	alloc_idx[1] = TREE_INT_CST_LOW (TREE_VALUE (TREE_CHAIN (args))) - 1;
227210d565efSmrg     }
227310d565efSmrg 
227410d565efSmrg   /* Array for up to the two attribute alloc_size arguments.  */
227510d565efSmrg   tree alloc_args[] = { NULL_TREE, NULL_TREE };
227610d565efSmrg 
2277*ec02198aSmrg   /* Map of attribute read_only, write_only, or read_write specifications
2278*ec02198aSmrg      for function arguments.  */
2279*ec02198aSmrg   rdwr_map rdwr_idx;
2280*ec02198aSmrg   init_attr_rdwr_indices (&rdwr_idx, fntype);
2281*ec02198aSmrg 
228210d565efSmrg   /* I counts args in order (to be) pushed; ARGPOS counts in order written.  */
228310d565efSmrg   for (argpos = 0; argpos < num_actuals; i--, argpos++)
228410d565efSmrg     {
228510d565efSmrg       tree type = TREE_TYPE (args[i].tree_value);
228610d565efSmrg       int unsignedp;
228710d565efSmrg 
228810d565efSmrg       /* Replace erroneous argument with constant zero.  */
228910d565efSmrg       if (type == error_mark_node || !COMPLETE_TYPE_P (type))
229010d565efSmrg 	args[i].tree_value = integer_zero_node, type = integer_type_node;
229110d565efSmrg 
229210d565efSmrg       /* If TYPE is a transparent union or record, pass things the way
229310d565efSmrg 	 we would pass the first field of the union or record.  We have
229410d565efSmrg 	 already verified that the modes are the same.  */
2295c7a68eb7Smrg       if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
229610d565efSmrg 	type = TREE_TYPE (first_field (type));
229710d565efSmrg 
229810d565efSmrg       /* Decide where to pass this arg.
229910d565efSmrg 
230010d565efSmrg 	 args[i].reg is nonzero if all or part is passed in registers.
230110d565efSmrg 
230210d565efSmrg 	 args[i].partial is nonzero if part but not all is passed in registers,
230310d565efSmrg 	 and the exact value says how many bytes are passed in registers.
230410d565efSmrg 
230510d565efSmrg 	 args[i].pass_on_stack is nonzero if the argument must at least be
230610d565efSmrg 	 computed on the stack.  It may then be loaded back into registers
230710d565efSmrg 	 if args[i].reg is nonzero.
230810d565efSmrg 
230910d565efSmrg 	 These decisions are driven by the FUNCTION_... macros and must agree
231010d565efSmrg 	 with those made by function.c.  */
231110d565efSmrg 
231210d565efSmrg       /* See if this argument should be passed by invisible reference.  */
2313*ec02198aSmrg       function_arg_info arg (type, argpos < n_named_args);
2314*ec02198aSmrg       if (pass_by_reference (args_so_far_pnt, arg))
231510d565efSmrg 	{
2316*ec02198aSmrg 	  const bool callee_copies
2317*ec02198aSmrg 	    = reference_callee_copied (args_so_far_pnt, arg);
2318*ec02198aSmrg 	  tree base;
231910d565efSmrg 
2320*ec02198aSmrg 	  /* If we're compiling a thunk, pass directly the address of an object
2321*ec02198aSmrg 	     already in memory, instead of making a copy.  Likewise if we want
2322*ec02198aSmrg 	     to make the copy in the callee instead of the caller.  */
2323*ec02198aSmrg 	  if ((call_from_thunk_p || callee_copies)
232410d565efSmrg 	      && (base = get_base_address (args[i].tree_value))
232510d565efSmrg 	      && TREE_CODE (base) != SSA_NAME
2326*ec02198aSmrg 	      && (!DECL_P (base) || MEM_P (DECL_RTL (base))))
232710d565efSmrg 	    {
232810d565efSmrg 	      /* We may have turned the parameter value into an SSA name.
232910d565efSmrg 		 Go back to the original parameter so we can take the
233010d565efSmrg 		 address.  */
233110d565efSmrg 	      if (TREE_CODE (args[i].tree_value) == SSA_NAME)
233210d565efSmrg 		{
233310d565efSmrg 		  gcc_assert (SSA_NAME_IS_DEFAULT_DEF (args[i].tree_value));
233410d565efSmrg 		  args[i].tree_value = SSA_NAME_VAR (args[i].tree_value);
233510d565efSmrg 		  gcc_assert (TREE_CODE (args[i].tree_value) == PARM_DECL);
233610d565efSmrg 		}
233710d565efSmrg 	      /* Argument setup code may have copied the value to register.  We
233810d565efSmrg 		 revert that optimization now because the tail call code must
233910d565efSmrg 		 use the original location.  */
234010d565efSmrg 	      if (TREE_CODE (args[i].tree_value) == PARM_DECL
234110d565efSmrg 		  && !MEM_P (DECL_RTL (args[i].tree_value))
234210d565efSmrg 		  && DECL_INCOMING_RTL (args[i].tree_value)
234310d565efSmrg 		  && MEM_P (DECL_INCOMING_RTL (args[i].tree_value)))
234410d565efSmrg 		set_decl_rtl (args[i].tree_value,
234510d565efSmrg 			      DECL_INCOMING_RTL (args[i].tree_value));
234610d565efSmrg 
234710d565efSmrg 	      mark_addressable (args[i].tree_value);
234810d565efSmrg 
234910d565efSmrg 	      /* We can't use sibcalls if a callee-copied argument is
235010d565efSmrg 		 stored in the current function's frame.  */
235110d565efSmrg 	      if (!call_from_thunk_p && DECL_P (base) && !TREE_STATIC (base))
235210d565efSmrg 		{
235310d565efSmrg 		  *may_tailcall = false;
235410d565efSmrg 		  maybe_complain_about_tail_call (exp,
235510d565efSmrg 						  "a callee-copied argument is"
235610d565efSmrg 						  " stored in the current"
235710d565efSmrg 						  " function's frame");
235810d565efSmrg 		}
235910d565efSmrg 
236010d565efSmrg 	      args[i].tree_value = build_fold_addr_expr_loc (loc,
236110d565efSmrg 							 args[i].tree_value);
236210d565efSmrg 	      type = TREE_TYPE (args[i].tree_value);
236310d565efSmrg 
236410d565efSmrg 	      if (*ecf_flags & ECF_CONST)
236510d565efSmrg 		*ecf_flags &= ~(ECF_CONST | ECF_LOOPING_CONST_OR_PURE);
236610d565efSmrg 	    }
236710d565efSmrg 	  else
236810d565efSmrg 	    {
236910d565efSmrg 	      /* We make a copy of the object and pass the address to the
237010d565efSmrg 		 function being called.  */
237110d565efSmrg 	      rtx copy;
237210d565efSmrg 
237310d565efSmrg 	      if (!COMPLETE_TYPE_P (type)
237410d565efSmrg 		  || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
237510d565efSmrg 		  || (flag_stack_check == GENERIC_STACK_CHECK
237610d565efSmrg 		      && compare_tree_int (TYPE_SIZE_UNIT (type),
237710d565efSmrg 					   STACK_CHECK_MAX_VAR_SIZE) > 0))
237810d565efSmrg 		{
237910d565efSmrg 		  /* This is a variable-sized object.  Make space on the stack
238010d565efSmrg 		     for it.  */
238110d565efSmrg 		  rtx size_rtx = expr_size (args[i].tree_value);
238210d565efSmrg 
238310d565efSmrg 		  if (*old_stack_level == 0)
238410d565efSmrg 		    {
238510d565efSmrg 		      emit_stack_save (SAVE_BLOCK, old_stack_level);
238610d565efSmrg 		      *old_pending_adj = pending_stack_adjust;
238710d565efSmrg 		      pending_stack_adjust = 0;
238810d565efSmrg 		    }
238910d565efSmrg 
239010d565efSmrg 		  /* We can pass TRUE as the 4th argument because we just
239110d565efSmrg 		     saved the stack pointer and will restore it right after
239210d565efSmrg 		     the call.  */
239310d565efSmrg 		  copy = allocate_dynamic_stack_space (size_rtx,
239410d565efSmrg 						       TYPE_ALIGN (type),
239510d565efSmrg 						       TYPE_ALIGN (type),
2396c7a68eb7Smrg 						       max_int_size_in_bytes
2397c7a68eb7Smrg 						       (type),
239810d565efSmrg 						       true);
239910d565efSmrg 		  copy = gen_rtx_MEM (BLKmode, copy);
240010d565efSmrg 		  set_mem_attributes (copy, type, 1);
240110d565efSmrg 		}
240210d565efSmrg 	      else
240310d565efSmrg 		copy = assign_temp (type, 1, 0);
240410d565efSmrg 
240510d565efSmrg 	      store_expr (args[i].tree_value, copy, 0, false, false);
240610d565efSmrg 
240710d565efSmrg 	      /* Just change the const function to pure and then let
240810d565efSmrg 		 the next test clear the pure based on
240910d565efSmrg 		 callee_copies.  */
241010d565efSmrg 	      if (*ecf_flags & ECF_CONST)
241110d565efSmrg 		{
241210d565efSmrg 		  *ecf_flags &= ~ECF_CONST;
241310d565efSmrg 		  *ecf_flags |= ECF_PURE;
241410d565efSmrg 		}
241510d565efSmrg 
241610d565efSmrg 	      if (!callee_copies && *ecf_flags & ECF_PURE)
241710d565efSmrg 		*ecf_flags &= ~(ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
241810d565efSmrg 
241910d565efSmrg 	      args[i].tree_value
242010d565efSmrg 		= build_fold_addr_expr_loc (loc, make_tree (type, copy));
242110d565efSmrg 	      type = TREE_TYPE (args[i].tree_value);
242210d565efSmrg 	      *may_tailcall = false;
242310d565efSmrg 	      maybe_complain_about_tail_call (exp,
242410d565efSmrg 					      "argument must be passed"
242510d565efSmrg 					      " by copying");
242610d565efSmrg 	    }
2427*ec02198aSmrg 	  arg.pass_by_reference = true;
242810d565efSmrg 	}
242910d565efSmrg 
243010d565efSmrg       unsignedp = TYPE_UNSIGNED (type);
2431*ec02198aSmrg       arg.type = type;
2432*ec02198aSmrg       arg.mode
2433*ec02198aSmrg 	= promote_function_mode (type, TYPE_MODE (type), &unsignedp,
243410d565efSmrg 				 fndecl ? TREE_TYPE (fndecl) : fntype, 0);
243510d565efSmrg 
243610d565efSmrg       args[i].unsignedp = unsignedp;
2437*ec02198aSmrg       args[i].mode = arg.mode;
243810d565efSmrg 
2439c7a68eb7Smrg       targetm.calls.warn_parameter_passing_abi (args_so_far, type);
2440c7a68eb7Smrg 
2441*ec02198aSmrg       args[i].reg = targetm.calls.function_arg (args_so_far, arg);
244210d565efSmrg 
244310d565efSmrg       if (args[i].reg && CONST_INT_P (args[i].reg))
244410d565efSmrg 	args[i].reg = NULL;
244510d565efSmrg 
244610d565efSmrg       /* If this is a sibling call and the machine has register windows, the
244710d565efSmrg 	 register window has to be unwinded before calling the routine, so
244810d565efSmrg 	 arguments have to go into the incoming registers.  */
244910d565efSmrg       if (targetm.calls.function_incoming_arg != targetm.calls.function_arg)
245010d565efSmrg 	args[i].tail_call_reg
2451*ec02198aSmrg 	  = targetm.calls.function_incoming_arg (args_so_far, arg);
245210d565efSmrg       else
245310d565efSmrg 	args[i].tail_call_reg = args[i].reg;
245410d565efSmrg 
245510d565efSmrg       if (args[i].reg)
2456*ec02198aSmrg 	args[i].partial = targetm.calls.arg_partial_bytes (args_so_far, arg);
245710d565efSmrg 
2458*ec02198aSmrg       args[i].pass_on_stack = targetm.calls.must_pass_in_stack (arg);
245910d565efSmrg 
246010d565efSmrg       /* If FUNCTION_ARG returned a (parallel [(expr_list (nil) ...) ...]),
246110d565efSmrg 	 it means that we are to pass this arg in the register(s) designated
246210d565efSmrg 	 by the PARALLEL, but also to pass it in the stack.  */
246310d565efSmrg       if (args[i].reg && GET_CODE (args[i].reg) == PARALLEL
246410d565efSmrg 	  && XEXP (XVECEXP (args[i].reg, 0, 0), 0) == 0)
246510d565efSmrg 	args[i].pass_on_stack = 1;
246610d565efSmrg 
246710d565efSmrg       /* If this is an addressable type, we must preallocate the stack
246810d565efSmrg 	 since we must evaluate the object into its final location.
246910d565efSmrg 
247010d565efSmrg 	 If this is to be passed in both registers and the stack, it is simpler
247110d565efSmrg 	 to preallocate.  */
247210d565efSmrg       if (TREE_ADDRESSABLE (type)
247310d565efSmrg 	  || (args[i].pass_on_stack && args[i].reg != 0))
247410d565efSmrg 	*must_preallocate = 1;
247510d565efSmrg 
247610d565efSmrg       /* Compute the stack-size of this argument.  */
24770fc04c29Smrg       if (args[i].reg == 0 || args[i].partial != 0
247810d565efSmrg 	       || reg_parm_stack_space > 0
247910d565efSmrg 	       || args[i].pass_on_stack)
2480*ec02198aSmrg 	locate_and_pad_parm (arg.mode, type,
248110d565efSmrg #ifdef STACK_PARMS_IN_REG_PARM_AREA
248210d565efSmrg 			     1,
248310d565efSmrg #else
248410d565efSmrg 			     args[i].reg != 0,
248510d565efSmrg #endif
248610d565efSmrg 			     reg_parm_stack_space,
248710d565efSmrg 			     args[i].pass_on_stack ? 0 : args[i].partial,
248810d565efSmrg 			     fndecl, args_size, &args[i].locate);
248910d565efSmrg #ifdef BLOCK_REG_PADDING
249010d565efSmrg       else
249110d565efSmrg 	/* The argument is passed entirely in registers.  See at which
249210d565efSmrg 	   end it should be padded.  */
249310d565efSmrg 	args[i].locate.where_pad =
2494*ec02198aSmrg 	  BLOCK_REG_PADDING (arg.mode, type,
249510d565efSmrg 			     int_size_in_bytes (type) <= UNITS_PER_WORD);
249610d565efSmrg #endif
249710d565efSmrg 
249810d565efSmrg       /* Update ARGS_SIZE, the total stack space for args so far.  */
249910d565efSmrg 
250010d565efSmrg       args_size->constant += args[i].locate.size.constant;
250110d565efSmrg       if (args[i].locate.size.var)
250210d565efSmrg 	ADD_PARM_SIZE (*args_size, args[i].locate.size.var);
250310d565efSmrg 
250410d565efSmrg       /* Increment ARGS_SO_FAR, which has info about which arg-registers
250510d565efSmrg 	 have been used, etc.  */
250610d565efSmrg 
2507*ec02198aSmrg       /* ??? Traditionally we've passed TYPE_MODE here, instead of the
2508*ec02198aSmrg 	 promoted_mode used for function_arg above.  However, the
2509*ec02198aSmrg 	 corresponding handling of incoming arguments in function.c
2510*ec02198aSmrg 	 does pass the promoted mode.  */
2511*ec02198aSmrg       arg.mode = TYPE_MODE (type);
2512*ec02198aSmrg       targetm.calls.function_arg_advance (args_so_far, arg);
251310d565efSmrg 
251410d565efSmrg       /* Store argument values for functions decorated with attribute
251510d565efSmrg 	 alloc_size.  */
251610d565efSmrg       if (argpos == alloc_idx[0])
251710d565efSmrg 	alloc_args[0] = args[i].tree_value;
251810d565efSmrg       else if (argpos == alloc_idx[1])
251910d565efSmrg 	alloc_args[1] = args[i].tree_value;
2520*ec02198aSmrg 
2521*ec02198aSmrg       /* Save the actual argument that corresponds to the access attribute
2522*ec02198aSmrg 	 operand for later processing.  */
2523*ec02198aSmrg       if (attr_access *access = rdwr_idx.get (argpos))
2524*ec02198aSmrg 	{
2525*ec02198aSmrg 	  if (POINTER_TYPE_P (type))
2526*ec02198aSmrg 	    {
2527*ec02198aSmrg 	      access->ptr = args[i].tree_value;
2528*ec02198aSmrg 	      gcc_assert (access->size == NULL_TREE);
2529*ec02198aSmrg 	    }
2530*ec02198aSmrg 	  else
2531*ec02198aSmrg 	    {
2532*ec02198aSmrg 	      access->size = args[i].tree_value;
2533*ec02198aSmrg 	      gcc_assert (access->ptr == NULL_TREE);
2534*ec02198aSmrg 	    }
2535*ec02198aSmrg 	}
253610d565efSmrg     }
253710d565efSmrg 
253810d565efSmrg   if (alloc_args[0])
253910d565efSmrg     {
254010d565efSmrg       /* Check the arguments of functions decorated with attribute
254110d565efSmrg 	 alloc_size.  */
254210d565efSmrg       maybe_warn_alloc_args_overflow (fndecl, exp, alloc_args, alloc_idx);
254310d565efSmrg     }
2544c7a68eb7Smrg 
2545c7a68eb7Smrg   /* Detect passing non-string arguments to functions expecting
2546c7a68eb7Smrg      nul-terminated strings.  */
2547c7a68eb7Smrg   maybe_warn_nonstring_arg (fndecl, exp);
2548*ec02198aSmrg 
2549*ec02198aSmrg   /* Check read_only, write_only, and read_write arguments.  */
2550*ec02198aSmrg   maybe_warn_rdwr_sizes (&rdwr_idx, fndecl, fntype, exp);
255110d565efSmrg }
255210d565efSmrg 
255310d565efSmrg /* Update ARGS_SIZE to contain the total size for the argument block.
255410d565efSmrg    Return the original constant component of the argument block's size.
255510d565efSmrg 
255610d565efSmrg    REG_PARM_STACK_SPACE holds the number of bytes of stack space reserved
255710d565efSmrg    for arguments passed in registers.  */
255810d565efSmrg 
2559c7a68eb7Smrg static poly_int64
compute_argument_block_size(int reg_parm_stack_space,struct args_size * args_size,tree fndecl ATTRIBUTE_UNUSED,tree fntype ATTRIBUTE_UNUSED,int preferred_stack_boundary ATTRIBUTE_UNUSED)256010d565efSmrg compute_argument_block_size (int reg_parm_stack_space,
256110d565efSmrg 			     struct args_size *args_size,
256210d565efSmrg 			     tree fndecl ATTRIBUTE_UNUSED,
256310d565efSmrg 			     tree fntype ATTRIBUTE_UNUSED,
256410d565efSmrg 			     int preferred_stack_boundary ATTRIBUTE_UNUSED)
256510d565efSmrg {
2566c7a68eb7Smrg   poly_int64 unadjusted_args_size = args_size->constant;
256710d565efSmrg 
256810d565efSmrg   /* For accumulate outgoing args mode we don't need to align, since the frame
256910d565efSmrg      will be already aligned.  Align to STACK_BOUNDARY in order to prevent
257010d565efSmrg      backends from generating misaligned frame sizes.  */
257110d565efSmrg   if (ACCUMULATE_OUTGOING_ARGS && preferred_stack_boundary > STACK_BOUNDARY)
257210d565efSmrg     preferred_stack_boundary = STACK_BOUNDARY;
257310d565efSmrg 
257410d565efSmrg   /* Compute the actual size of the argument block required.  The variable
257510d565efSmrg      and constant sizes must be combined, the size may have to be rounded,
257610d565efSmrg      and there may be a minimum required size.  */
257710d565efSmrg 
257810d565efSmrg   if (args_size->var)
257910d565efSmrg     {
258010d565efSmrg       args_size->var = ARGS_SIZE_TREE (*args_size);
258110d565efSmrg       args_size->constant = 0;
258210d565efSmrg 
258310d565efSmrg       preferred_stack_boundary /= BITS_PER_UNIT;
258410d565efSmrg       if (preferred_stack_boundary > 1)
258510d565efSmrg 	{
258610d565efSmrg 	  /* We don't handle this case yet.  To handle it correctly we have
258710d565efSmrg 	     to add the delta, round and subtract the delta.
258810d565efSmrg 	     Currently no machine description requires this support.  */
2589c7a68eb7Smrg 	  gcc_assert (multiple_p (stack_pointer_delta,
2590c7a68eb7Smrg 				  preferred_stack_boundary));
259110d565efSmrg 	  args_size->var = round_up (args_size->var, preferred_stack_boundary);
259210d565efSmrg 	}
259310d565efSmrg 
259410d565efSmrg       if (reg_parm_stack_space > 0)
259510d565efSmrg 	{
259610d565efSmrg 	  args_size->var
259710d565efSmrg 	    = size_binop (MAX_EXPR, args_size->var,
259810d565efSmrg 			  ssize_int (reg_parm_stack_space));
259910d565efSmrg 
260010d565efSmrg 	  /* The area corresponding to register parameters is not to count in
260110d565efSmrg 	     the size of the block we need.  So make the adjustment.  */
260210d565efSmrg 	  if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
260310d565efSmrg 	    args_size->var
260410d565efSmrg 	      = size_binop (MINUS_EXPR, args_size->var,
260510d565efSmrg 			    ssize_int (reg_parm_stack_space));
260610d565efSmrg 	}
260710d565efSmrg     }
260810d565efSmrg   else
260910d565efSmrg     {
261010d565efSmrg       preferred_stack_boundary /= BITS_PER_UNIT;
261110d565efSmrg       if (preferred_stack_boundary < 1)
261210d565efSmrg 	preferred_stack_boundary = 1;
2613c7a68eb7Smrg       args_size->constant = (aligned_upper_bound (args_size->constant
2614c7a68eb7Smrg 						  + stack_pointer_delta,
2615c7a68eb7Smrg 						  preferred_stack_boundary)
261610d565efSmrg 			     - stack_pointer_delta);
261710d565efSmrg 
2618c7a68eb7Smrg       args_size->constant = upper_bound (args_size->constant,
261910d565efSmrg 					 reg_parm_stack_space);
262010d565efSmrg 
262110d565efSmrg       if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
262210d565efSmrg 	args_size->constant -= reg_parm_stack_space;
262310d565efSmrg     }
262410d565efSmrg   return unadjusted_args_size;
262510d565efSmrg }
262610d565efSmrg 
262710d565efSmrg /* Precompute parameters as needed for a function call.
262810d565efSmrg 
262910d565efSmrg    FLAGS is mask of ECF_* constants.
263010d565efSmrg 
263110d565efSmrg    NUM_ACTUALS is the number of arguments.
263210d565efSmrg 
263310d565efSmrg    ARGS is an array containing information for each argument; this
263410d565efSmrg    routine fills in the INITIAL_VALUE and VALUE fields for each
263510d565efSmrg    precomputed argument.  */
263610d565efSmrg 
263710d565efSmrg static void
precompute_arguments(int num_actuals,struct arg_data * args)263810d565efSmrg precompute_arguments (int num_actuals, struct arg_data *args)
263910d565efSmrg {
264010d565efSmrg   int i;
264110d565efSmrg 
264210d565efSmrg   /* If this is a libcall, then precompute all arguments so that we do not
264310d565efSmrg      get extraneous instructions emitted as part of the libcall sequence.  */
264410d565efSmrg 
264510d565efSmrg   /* If we preallocated the stack space, and some arguments must be passed
264610d565efSmrg      on the stack, then we must precompute any parameter which contains a
264710d565efSmrg      function call which will store arguments on the stack.
264810d565efSmrg      Otherwise, evaluating the parameter may clobber previous parameters
264910d565efSmrg      which have already been stored into the stack.  (we have code to avoid
265010d565efSmrg      such case by saving the outgoing stack arguments, but it results in
265110d565efSmrg      worse code)  */
265210d565efSmrg   if (!ACCUMULATE_OUTGOING_ARGS)
265310d565efSmrg     return;
265410d565efSmrg 
265510d565efSmrg   for (i = 0; i < num_actuals; i++)
265610d565efSmrg     {
265710d565efSmrg       tree type;
265810d565efSmrg       machine_mode mode;
265910d565efSmrg 
266010d565efSmrg       if (TREE_CODE (args[i].tree_value) != CALL_EXPR)
266110d565efSmrg 	continue;
266210d565efSmrg 
266310d565efSmrg       /* If this is an addressable type, we cannot pre-evaluate it.  */
266410d565efSmrg       type = TREE_TYPE (args[i].tree_value);
266510d565efSmrg       gcc_assert (!TREE_ADDRESSABLE (type));
266610d565efSmrg 
266710d565efSmrg       args[i].initial_value = args[i].value
266810d565efSmrg 	= expand_normal (args[i].tree_value);
266910d565efSmrg 
267010d565efSmrg       mode = TYPE_MODE (type);
267110d565efSmrg       if (mode != args[i].mode)
267210d565efSmrg 	{
267310d565efSmrg 	  int unsignedp = args[i].unsignedp;
267410d565efSmrg 	  args[i].value
267510d565efSmrg 	    = convert_modes (args[i].mode, mode,
267610d565efSmrg 			     args[i].value, args[i].unsignedp);
267710d565efSmrg 
267810d565efSmrg 	  /* CSE will replace this only if it contains args[i].value
267910d565efSmrg 	     pseudo, so convert it down to the declared mode using
268010d565efSmrg 	     a SUBREG.  */
268110d565efSmrg 	  if (REG_P (args[i].value)
268210d565efSmrg 	      && GET_MODE_CLASS (args[i].mode) == MODE_INT
268310d565efSmrg 	      && promote_mode (type, mode, &unsignedp) != args[i].mode)
268410d565efSmrg 	    {
268510d565efSmrg 	      args[i].initial_value
268610d565efSmrg 		= gen_lowpart_SUBREG (mode, args[i].value);
268710d565efSmrg 	      SUBREG_PROMOTED_VAR_P (args[i].initial_value) = 1;
268810d565efSmrg 	      SUBREG_PROMOTED_SET (args[i].initial_value, args[i].unsignedp);
268910d565efSmrg 	    }
269010d565efSmrg 	}
269110d565efSmrg     }
269210d565efSmrg }
269310d565efSmrg 
269410d565efSmrg /* Given the current state of MUST_PREALLOCATE and information about
269510d565efSmrg    arguments to a function call in NUM_ACTUALS, ARGS and ARGS_SIZE,
269610d565efSmrg    compute and return the final value for MUST_PREALLOCATE.  */
269710d565efSmrg 
269810d565efSmrg static int
finalize_must_preallocate(int must_preallocate,int num_actuals,struct arg_data * args,struct args_size * args_size)269910d565efSmrg finalize_must_preallocate (int must_preallocate, int num_actuals,
270010d565efSmrg 			   struct arg_data *args, struct args_size *args_size)
270110d565efSmrg {
270210d565efSmrg   /* See if we have or want to preallocate stack space.
270310d565efSmrg 
270410d565efSmrg      If we would have to push a partially-in-regs parm
270510d565efSmrg      before other stack parms, preallocate stack space instead.
270610d565efSmrg 
270710d565efSmrg      If the size of some parm is not a multiple of the required stack
270810d565efSmrg      alignment, we must preallocate.
270910d565efSmrg 
271010d565efSmrg      If the total size of arguments that would otherwise create a copy in
271110d565efSmrg      a temporary (such as a CALL) is more than half the total argument list
271210d565efSmrg      size, preallocation is faster.
271310d565efSmrg 
271410d565efSmrg      Another reason to preallocate is if we have a machine (like the m88k)
271510d565efSmrg      where stack alignment is required to be maintained between every
271610d565efSmrg      pair of insns, not just when the call is made.  However, we assume here
271710d565efSmrg      that such machines either do not have push insns (and hence preallocation
271810d565efSmrg      would occur anyway) or the problem is taken care of with
271910d565efSmrg      PUSH_ROUNDING.  */
272010d565efSmrg 
272110d565efSmrg   if (! must_preallocate)
272210d565efSmrg     {
272310d565efSmrg       int partial_seen = 0;
2724c7a68eb7Smrg       poly_int64 copy_to_evaluate_size = 0;
272510d565efSmrg       int i;
272610d565efSmrg 
272710d565efSmrg       for (i = 0; i < num_actuals && ! must_preallocate; i++)
272810d565efSmrg 	{
272910d565efSmrg 	  if (args[i].partial > 0 && ! args[i].pass_on_stack)
273010d565efSmrg 	    partial_seen = 1;
273110d565efSmrg 	  else if (partial_seen && args[i].reg == 0)
273210d565efSmrg 	    must_preallocate = 1;
273310d565efSmrg 
273410d565efSmrg 	  if (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode
273510d565efSmrg 	      && (TREE_CODE (args[i].tree_value) == CALL_EXPR
273610d565efSmrg 		  || TREE_CODE (args[i].tree_value) == TARGET_EXPR
273710d565efSmrg 		  || TREE_CODE (args[i].tree_value) == COND_EXPR
273810d565efSmrg 		  || TREE_ADDRESSABLE (TREE_TYPE (args[i].tree_value))))
273910d565efSmrg 	    copy_to_evaluate_size
274010d565efSmrg 	      += int_size_in_bytes (TREE_TYPE (args[i].tree_value));
274110d565efSmrg 	}
274210d565efSmrg 
2743c7a68eb7Smrg       if (maybe_ne (args_size->constant, 0)
2744c7a68eb7Smrg 	  && maybe_ge (copy_to_evaluate_size * 2, args_size->constant))
274510d565efSmrg 	must_preallocate = 1;
274610d565efSmrg     }
274710d565efSmrg   return must_preallocate;
274810d565efSmrg }
274910d565efSmrg 
275010d565efSmrg /* If we preallocated stack space, compute the address of each argument
275110d565efSmrg    and store it into the ARGS array.
275210d565efSmrg 
275310d565efSmrg    We need not ensure it is a valid memory address here; it will be
275410d565efSmrg    validized when it is used.
275510d565efSmrg 
275610d565efSmrg    ARGBLOCK is an rtx for the address of the outgoing arguments.  */
275710d565efSmrg 
275810d565efSmrg static void
compute_argument_addresses(struct arg_data * args,rtx argblock,int num_actuals)275910d565efSmrg compute_argument_addresses (struct arg_data *args, rtx argblock, int num_actuals)
276010d565efSmrg {
276110d565efSmrg   if (argblock)
276210d565efSmrg     {
276310d565efSmrg       rtx arg_reg = argblock;
2764c7a68eb7Smrg       int i;
2765c7a68eb7Smrg       poly_int64 arg_offset = 0;
276610d565efSmrg 
276710d565efSmrg       if (GET_CODE (argblock) == PLUS)
2768c7a68eb7Smrg 	{
2769c7a68eb7Smrg 	  arg_reg = XEXP (argblock, 0);
2770c7a68eb7Smrg 	  arg_offset = rtx_to_poly_int64 (XEXP (argblock, 1));
2771c7a68eb7Smrg 	}
277210d565efSmrg 
277310d565efSmrg       for (i = 0; i < num_actuals; i++)
277410d565efSmrg 	{
277510d565efSmrg 	  rtx offset = ARGS_SIZE_RTX (args[i].locate.offset);
277610d565efSmrg 	  rtx slot_offset = ARGS_SIZE_RTX (args[i].locate.slot_offset);
277710d565efSmrg 	  rtx addr;
277810d565efSmrg 	  unsigned int align, boundary;
2779c7a68eb7Smrg 	  poly_uint64 units_on_stack = 0;
278010d565efSmrg 	  machine_mode partial_mode = VOIDmode;
278110d565efSmrg 
278210d565efSmrg 	  /* Skip this parm if it will not be passed on the stack.  */
278310d565efSmrg 	  if (! args[i].pass_on_stack
278410d565efSmrg 	      && args[i].reg != 0
278510d565efSmrg 	      && args[i].partial == 0)
278610d565efSmrg 	    continue;
278710d565efSmrg 
2788c7a68eb7Smrg 	  if (TYPE_EMPTY_P (TREE_TYPE (args[i].tree_value)))
2789c7a68eb7Smrg 	    continue;
2790c7a68eb7Smrg 
2791c7a68eb7Smrg 	  addr = simplify_gen_binary (PLUS, Pmode, arg_reg, offset);
279210d565efSmrg 	  addr = plus_constant (Pmode, addr, arg_offset);
279310d565efSmrg 
279410d565efSmrg 	  if (args[i].partial != 0)
279510d565efSmrg 	    {
279610d565efSmrg 	      /* Only part of the parameter is being passed on the stack.
279710d565efSmrg 		 Generate a simple memory reference of the correct size.  */
279810d565efSmrg 	      units_on_stack = args[i].locate.size.constant;
2799c7a68eb7Smrg 	      poly_uint64 bits_on_stack = units_on_stack * BITS_PER_UNIT;
2800c7a68eb7Smrg 	      partial_mode = int_mode_for_size (bits_on_stack, 1).else_blk ();
280110d565efSmrg 	      args[i].stack = gen_rtx_MEM (partial_mode, addr);
280210d565efSmrg 	      set_mem_size (args[i].stack, units_on_stack);
280310d565efSmrg 	    }
280410d565efSmrg 	  else
280510d565efSmrg 	    {
280610d565efSmrg 	      args[i].stack = gen_rtx_MEM (args[i].mode, addr);
280710d565efSmrg 	      set_mem_attributes (args[i].stack,
280810d565efSmrg 				  TREE_TYPE (args[i].tree_value), 1);
280910d565efSmrg 	    }
281010d565efSmrg 	  align = BITS_PER_UNIT;
281110d565efSmrg 	  boundary = args[i].locate.boundary;
2812c7a68eb7Smrg 	  poly_int64 offset_val;
2813c7a68eb7Smrg 	  if (args[i].locate.where_pad != PAD_DOWNWARD)
281410d565efSmrg 	    align = boundary;
2815c7a68eb7Smrg 	  else if (poly_int_rtx_p (offset, &offset_val))
281610d565efSmrg 	    {
2817c7a68eb7Smrg 	      align = least_bit_hwi (boundary);
2818c7a68eb7Smrg 	      unsigned int offset_align
2819c7a68eb7Smrg 		= known_alignment (offset_val) * BITS_PER_UNIT;
2820c7a68eb7Smrg 	      if (offset_align != 0)
2821c7a68eb7Smrg 		align = MIN (align, offset_align);
282210d565efSmrg 	    }
282310d565efSmrg 	  set_mem_align (args[i].stack, align);
282410d565efSmrg 
2825c7a68eb7Smrg 	  addr = simplify_gen_binary (PLUS, Pmode, arg_reg, slot_offset);
282610d565efSmrg 	  addr = plus_constant (Pmode, addr, arg_offset);
282710d565efSmrg 
282810d565efSmrg 	  if (args[i].partial != 0)
282910d565efSmrg 	    {
283010d565efSmrg 	      /* Only part of the parameter is being passed on the stack.
283110d565efSmrg 		 Generate a simple memory reference of the correct size.
283210d565efSmrg 	       */
283310d565efSmrg 	      args[i].stack_slot = gen_rtx_MEM (partial_mode, addr);
283410d565efSmrg 	      set_mem_size (args[i].stack_slot, units_on_stack);
283510d565efSmrg 	    }
283610d565efSmrg 	  else
283710d565efSmrg 	    {
283810d565efSmrg 	      args[i].stack_slot = gen_rtx_MEM (args[i].mode, addr);
283910d565efSmrg 	      set_mem_attributes (args[i].stack_slot,
284010d565efSmrg 				  TREE_TYPE (args[i].tree_value), 1);
284110d565efSmrg 	    }
284210d565efSmrg 	  set_mem_align (args[i].stack_slot, args[i].locate.boundary);
284310d565efSmrg 
284410d565efSmrg 	  /* Function incoming arguments may overlap with sibling call
284510d565efSmrg 	     outgoing arguments and we cannot allow reordering of reads
284610d565efSmrg 	     from function arguments with stores to outgoing arguments
284710d565efSmrg 	     of sibling calls.  */
284810d565efSmrg 	  set_mem_alias_set (args[i].stack, 0);
284910d565efSmrg 	  set_mem_alias_set (args[i].stack_slot, 0);
285010d565efSmrg 	}
285110d565efSmrg     }
285210d565efSmrg }
285310d565efSmrg 
285410d565efSmrg /* Given a FNDECL and EXP, return an rtx suitable for use as a target address
285510d565efSmrg    in a call instruction.
285610d565efSmrg 
285710d565efSmrg    FNDECL is the tree node for the target function.  For an indirect call
285810d565efSmrg    FNDECL will be NULL_TREE.
285910d565efSmrg 
286010d565efSmrg    ADDR is the operand 0 of CALL_EXPR for this call.  */
286110d565efSmrg 
286210d565efSmrg static rtx
rtx_for_function_call(tree fndecl,tree addr)286310d565efSmrg rtx_for_function_call (tree fndecl, tree addr)
286410d565efSmrg {
286510d565efSmrg   rtx funexp;
286610d565efSmrg 
286710d565efSmrg   /* Get the function to call, in the form of RTL.  */
286810d565efSmrg   if (fndecl)
286910d565efSmrg     {
287010d565efSmrg       if (!TREE_USED (fndecl) && fndecl != current_function_decl)
287110d565efSmrg 	TREE_USED (fndecl) = 1;
287210d565efSmrg 
287310d565efSmrg       /* Get a SYMBOL_REF rtx for the function address.  */
287410d565efSmrg       funexp = XEXP (DECL_RTL (fndecl), 0);
287510d565efSmrg     }
287610d565efSmrg   else
287710d565efSmrg     /* Generate an rtx (probably a pseudo-register) for the address.  */
287810d565efSmrg     {
287910d565efSmrg       push_temp_slots ();
288010d565efSmrg       funexp = expand_normal (addr);
288110d565efSmrg       pop_temp_slots ();	/* FUNEXP can't be BLKmode.  */
288210d565efSmrg     }
288310d565efSmrg   return funexp;
288410d565efSmrg }
288510d565efSmrg 
2886c7a68eb7Smrg /* Return the static chain for this function, if any.  */
2887c7a68eb7Smrg 
2888c7a68eb7Smrg rtx
rtx_for_static_chain(const_tree fndecl_or_type,bool incoming_p)2889c7a68eb7Smrg rtx_for_static_chain (const_tree fndecl_or_type, bool incoming_p)
2890c7a68eb7Smrg {
2891c7a68eb7Smrg   if (DECL_P (fndecl_or_type) && !DECL_STATIC_CHAIN (fndecl_or_type))
2892c7a68eb7Smrg     return NULL;
2893c7a68eb7Smrg 
2894c7a68eb7Smrg   return targetm.calls.static_chain (fndecl_or_type, incoming_p);
2895c7a68eb7Smrg }
2896c7a68eb7Smrg 
289710d565efSmrg /* Internal state for internal_arg_pointer_based_exp and its helpers.  */
289810d565efSmrg static struct
289910d565efSmrg {
290010d565efSmrg   /* Last insn that has been scanned by internal_arg_pointer_based_exp_scan,
290110d565efSmrg      or NULL_RTX if none has been scanned yet.  */
290210d565efSmrg   rtx_insn *scan_start;
290310d565efSmrg   /* Vector indexed by REGNO - FIRST_PSEUDO_REGISTER, recording if a pseudo is
290410d565efSmrg      based on crtl->args.internal_arg_pointer.  The element is NULL_RTX if the
290510d565efSmrg      pseudo isn't based on it, a CONST_INT offset if the pseudo is based on it
290610d565efSmrg      with fixed offset, or PC if this is with variable or unknown offset.  */
290710d565efSmrg   vec<rtx> cache;
290810d565efSmrg } internal_arg_pointer_exp_state;
290910d565efSmrg 
291010d565efSmrg static rtx internal_arg_pointer_based_exp (const_rtx, bool);
291110d565efSmrg 
291210d565efSmrg /* Helper function for internal_arg_pointer_based_exp.  Scan insns in
291310d565efSmrg    the tail call sequence, starting with first insn that hasn't been
291410d565efSmrg    scanned yet, and note for each pseudo on the LHS whether it is based
291510d565efSmrg    on crtl->args.internal_arg_pointer or not, and what offset from that
291610d565efSmrg    that pointer it has.  */
291710d565efSmrg 
291810d565efSmrg static void
internal_arg_pointer_based_exp_scan(void)291910d565efSmrg internal_arg_pointer_based_exp_scan (void)
292010d565efSmrg {
292110d565efSmrg   rtx_insn *insn, *scan_start = internal_arg_pointer_exp_state.scan_start;
292210d565efSmrg 
292310d565efSmrg   if (scan_start == NULL_RTX)
292410d565efSmrg     insn = get_insns ();
292510d565efSmrg   else
292610d565efSmrg     insn = NEXT_INSN (scan_start);
292710d565efSmrg 
292810d565efSmrg   while (insn)
292910d565efSmrg     {
293010d565efSmrg       rtx set = single_set (insn);
293110d565efSmrg       if (set && REG_P (SET_DEST (set)) && !HARD_REGISTER_P (SET_DEST (set)))
293210d565efSmrg 	{
293310d565efSmrg 	  rtx val = NULL_RTX;
293410d565efSmrg 	  unsigned int idx = REGNO (SET_DEST (set)) - FIRST_PSEUDO_REGISTER;
293510d565efSmrg 	  /* Punt on pseudos set multiple times.  */
293610d565efSmrg 	  if (idx < internal_arg_pointer_exp_state.cache.length ()
293710d565efSmrg 	      && (internal_arg_pointer_exp_state.cache[idx]
293810d565efSmrg 		  != NULL_RTX))
293910d565efSmrg 	    val = pc_rtx;
294010d565efSmrg 	  else
294110d565efSmrg 	    val = internal_arg_pointer_based_exp (SET_SRC (set), false);
294210d565efSmrg 	  if (val != NULL_RTX)
294310d565efSmrg 	    {
294410d565efSmrg 	      if (idx >= internal_arg_pointer_exp_state.cache.length ())
294510d565efSmrg 		internal_arg_pointer_exp_state.cache
294610d565efSmrg 		  .safe_grow_cleared (idx + 1);
294710d565efSmrg 	      internal_arg_pointer_exp_state.cache[idx] = val;
294810d565efSmrg 	    }
294910d565efSmrg 	}
295010d565efSmrg       if (NEXT_INSN (insn) == NULL_RTX)
295110d565efSmrg 	scan_start = insn;
295210d565efSmrg       insn = NEXT_INSN (insn);
295310d565efSmrg     }
295410d565efSmrg 
295510d565efSmrg   internal_arg_pointer_exp_state.scan_start = scan_start;
295610d565efSmrg }
295710d565efSmrg 
295810d565efSmrg /* Compute whether RTL is based on crtl->args.internal_arg_pointer.  Return
295910d565efSmrg    NULL_RTX if RTL isn't based on it, a CONST_INT offset if RTL is based on
296010d565efSmrg    it with fixed offset, or PC if this is with variable or unknown offset.
296110d565efSmrg    TOPLEVEL is true if the function is invoked at the topmost level.  */
296210d565efSmrg 
296310d565efSmrg static rtx
internal_arg_pointer_based_exp(const_rtx rtl,bool toplevel)296410d565efSmrg internal_arg_pointer_based_exp (const_rtx rtl, bool toplevel)
296510d565efSmrg {
296610d565efSmrg   if (CONSTANT_P (rtl))
296710d565efSmrg     return NULL_RTX;
296810d565efSmrg 
296910d565efSmrg   if (rtl == crtl->args.internal_arg_pointer)
297010d565efSmrg     return const0_rtx;
297110d565efSmrg 
297210d565efSmrg   if (REG_P (rtl) && HARD_REGISTER_P (rtl))
297310d565efSmrg     return NULL_RTX;
297410d565efSmrg 
2975c7a68eb7Smrg   poly_int64 offset;
2976c7a68eb7Smrg   if (GET_CODE (rtl) == PLUS && poly_int_rtx_p (XEXP (rtl, 1), &offset))
297710d565efSmrg     {
297810d565efSmrg       rtx val = internal_arg_pointer_based_exp (XEXP (rtl, 0), toplevel);
297910d565efSmrg       if (val == NULL_RTX || val == pc_rtx)
298010d565efSmrg 	return val;
2981c7a68eb7Smrg       return plus_constant (Pmode, val, offset);
298210d565efSmrg     }
298310d565efSmrg 
298410d565efSmrg   /* When called at the topmost level, scan pseudo assignments in between the
298510d565efSmrg      last scanned instruction in the tail call sequence and the latest insn
298610d565efSmrg      in that sequence.  */
298710d565efSmrg   if (toplevel)
298810d565efSmrg     internal_arg_pointer_based_exp_scan ();
298910d565efSmrg 
299010d565efSmrg   if (REG_P (rtl))
299110d565efSmrg     {
299210d565efSmrg       unsigned int idx = REGNO (rtl) - FIRST_PSEUDO_REGISTER;
299310d565efSmrg       if (idx < internal_arg_pointer_exp_state.cache.length ())
299410d565efSmrg 	return internal_arg_pointer_exp_state.cache[idx];
299510d565efSmrg 
299610d565efSmrg       return NULL_RTX;
299710d565efSmrg     }
299810d565efSmrg 
299910d565efSmrg   subrtx_iterator::array_type array;
300010d565efSmrg   FOR_EACH_SUBRTX (iter, array, rtl, NONCONST)
300110d565efSmrg     {
300210d565efSmrg       const_rtx x = *iter;
300310d565efSmrg       if (REG_P (x) && internal_arg_pointer_based_exp (x, false) != NULL_RTX)
300410d565efSmrg 	return pc_rtx;
300510d565efSmrg       if (MEM_P (x))
300610d565efSmrg 	iter.skip_subrtxes ();
300710d565efSmrg     }
300810d565efSmrg 
300910d565efSmrg   return NULL_RTX;
301010d565efSmrg }
301110d565efSmrg 
3012c7a68eb7Smrg /* Return true if SIZE bytes starting from address ADDR might overlap an
3013c7a68eb7Smrg    already-clobbered argument area.  This function is used to determine
3014c7a68eb7Smrg    if we should give up a sibcall.  */
301510d565efSmrg 
301610d565efSmrg static bool
mem_might_overlap_already_clobbered_arg_p(rtx addr,poly_uint64 size)3017c7a68eb7Smrg mem_might_overlap_already_clobbered_arg_p (rtx addr, poly_uint64 size)
301810d565efSmrg {
3019c7a68eb7Smrg   poly_int64 i;
3020c7a68eb7Smrg   unsigned HOST_WIDE_INT start, end;
302110d565efSmrg   rtx val;
302210d565efSmrg 
3023c7a68eb7Smrg   if (bitmap_empty_p (stored_args_map)
3024c7a68eb7Smrg       && stored_args_watermark == HOST_WIDE_INT_M1U)
302510d565efSmrg     return false;
302610d565efSmrg   val = internal_arg_pointer_based_exp (addr, true);
302710d565efSmrg   if (val == NULL_RTX)
302810d565efSmrg     return false;
3029c7a68eb7Smrg   else if (!poly_int_rtx_p (val, &i))
303010d565efSmrg     return true;
3031c7a68eb7Smrg 
3032c7a68eb7Smrg   if (known_eq (size, 0U))
3033c7a68eb7Smrg     return false;
303410d565efSmrg 
303510d565efSmrg   if (STACK_GROWS_DOWNWARD)
303610d565efSmrg     i -= crtl->args.pretend_args_size;
303710d565efSmrg   else
303810d565efSmrg     i += crtl->args.pretend_args_size;
303910d565efSmrg 
304010d565efSmrg   if (ARGS_GROW_DOWNWARD)
304110d565efSmrg     i = -i - size;
304210d565efSmrg 
3043c7a68eb7Smrg   /* We can ignore any references to the function's pretend args,
3044c7a68eb7Smrg      which at this point would manifest as negative values of I.  */
3045c7a68eb7Smrg   if (known_le (i, 0) && known_le (size, poly_uint64 (-i)))
3046c7a68eb7Smrg     return false;
304710d565efSmrg 
3048c7a68eb7Smrg   start = maybe_lt (i, 0) ? 0 : constant_lower_bound (i);
3049c7a68eb7Smrg   if (!(i + size).is_constant (&end))
3050c7a68eb7Smrg     end = HOST_WIDE_INT_M1U;
3051c7a68eb7Smrg 
3052c7a68eb7Smrg   if (end > stored_args_watermark)
305310d565efSmrg     return true;
3054c7a68eb7Smrg 
3055c7a68eb7Smrg   end = MIN (end, SBITMAP_SIZE (stored_args_map));
3056c7a68eb7Smrg   for (unsigned HOST_WIDE_INT k = start; k < end; ++k)
3057c7a68eb7Smrg     if (bitmap_bit_p (stored_args_map, k))
3058c7a68eb7Smrg       return true;
305910d565efSmrg 
306010d565efSmrg   return false;
306110d565efSmrg }
306210d565efSmrg 
306310d565efSmrg /* Do the register loads required for any wholly-register parms or any
306410d565efSmrg    parms which are passed both on the stack and in a register.  Their
306510d565efSmrg    expressions were already evaluated.
306610d565efSmrg 
306710d565efSmrg    Mark all register-parms as living through the call, putting these USE
306810d565efSmrg    insns in the CALL_INSN_FUNCTION_USAGE field.
306910d565efSmrg 
307010d565efSmrg    When IS_SIBCALL, perform the check_sibcall_argument_overlap
307110d565efSmrg    checking, setting *SIBCALL_FAILURE if appropriate.  */
307210d565efSmrg 
307310d565efSmrg static void
load_register_parameters(struct arg_data * args,int num_actuals,rtx * call_fusage,int flags,int is_sibcall,int * sibcall_failure)307410d565efSmrg load_register_parameters (struct arg_data *args, int num_actuals,
307510d565efSmrg 			  rtx *call_fusage, int flags, int is_sibcall,
307610d565efSmrg 			  int *sibcall_failure)
307710d565efSmrg {
307810d565efSmrg   int i, j;
307910d565efSmrg 
308010d565efSmrg   for (i = 0; i < num_actuals; i++)
308110d565efSmrg     {
308210d565efSmrg       rtx reg = ((flags & ECF_SIBCALL)
308310d565efSmrg 		 ? args[i].tail_call_reg : args[i].reg);
308410d565efSmrg       if (reg)
308510d565efSmrg 	{
308610d565efSmrg 	  int partial = args[i].partial;
308710d565efSmrg 	  int nregs;
3088c7a68eb7Smrg 	  poly_int64 size = 0;
3089c7a68eb7Smrg 	  HOST_WIDE_INT const_size = 0;
309010d565efSmrg 	  rtx_insn *before_arg = get_last_insn ();
3091c7a68eb7Smrg 	  tree type = TREE_TYPE (args[i].tree_value);
3092c7a68eb7Smrg 	  if (RECORD_OR_UNION_TYPE_P (type) && TYPE_TRANSPARENT_AGGR (type))
3093c7a68eb7Smrg 	    type = TREE_TYPE (first_field (type));
309410d565efSmrg 	  /* Set non-negative if we must move a word at a time, even if
309510d565efSmrg 	     just one word (e.g, partial == 4 && mode == DFmode).  Set
309610d565efSmrg 	     to -1 if we just use a normal move insn.  This value can be
309710d565efSmrg 	     zero if the argument is a zero size structure.  */
309810d565efSmrg 	  nregs = -1;
309910d565efSmrg 	  if (GET_CODE (reg) == PARALLEL)
310010d565efSmrg 	    ;
310110d565efSmrg 	  else if (partial)
310210d565efSmrg 	    {
310310d565efSmrg 	      gcc_assert (partial % UNITS_PER_WORD == 0);
310410d565efSmrg 	      nregs = partial / UNITS_PER_WORD;
310510d565efSmrg 	    }
3106c7a68eb7Smrg 	  else if (TYPE_MODE (type) == BLKmode)
310710d565efSmrg 	    {
3108c7a68eb7Smrg 	      /* Variable-sized parameters should be described by a
3109c7a68eb7Smrg 		 PARALLEL instead.  */
3110c7a68eb7Smrg 	      const_size = int_size_in_bytes (type);
3111c7a68eb7Smrg 	      gcc_assert (const_size >= 0);
3112c7a68eb7Smrg 	      nregs = (const_size + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3113c7a68eb7Smrg 	      size = const_size;
311410d565efSmrg 	    }
311510d565efSmrg 	  else
311610d565efSmrg 	    size = GET_MODE_SIZE (args[i].mode);
311710d565efSmrg 
311810d565efSmrg 	  /* Handle calls that pass values in multiple non-contiguous
311910d565efSmrg 	     locations.  The Irix 6 ABI has examples of this.  */
312010d565efSmrg 
312110d565efSmrg 	  if (GET_CODE (reg) == PARALLEL)
312210d565efSmrg 	    emit_group_move (reg, args[i].parallel_value);
312310d565efSmrg 
312410d565efSmrg 	  /* If simple case, just do move.  If normal partial, store_one_arg
312510d565efSmrg 	     has already loaded the register for us.  In all other cases,
312610d565efSmrg 	     load the register(s) from memory.  */
312710d565efSmrg 
312810d565efSmrg 	  else if (nregs == -1)
312910d565efSmrg 	    {
313010d565efSmrg 	      emit_move_insn (reg, args[i].value);
313110d565efSmrg #ifdef BLOCK_REG_PADDING
313210d565efSmrg 	      /* Handle case where we have a value that needs shifting
313310d565efSmrg 		 up to the msb.  eg. a QImode value and we're padding
313410d565efSmrg 		 upward on a BYTES_BIG_ENDIAN machine.  */
3135c7a68eb7Smrg 	      if (args[i].locate.where_pad
3136c7a68eb7Smrg 		  == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD))
3137c7a68eb7Smrg 		{
3138c7a68eb7Smrg 		  gcc_checking_assert (ordered_p (size, UNITS_PER_WORD));
3139c7a68eb7Smrg 		  if (maybe_lt (size, UNITS_PER_WORD))
314010d565efSmrg 		    {
314110d565efSmrg 		      rtx x;
3142c7a68eb7Smrg 		      poly_int64 shift
3143c7a68eb7Smrg 			= (UNITS_PER_WORD - size) * BITS_PER_UNIT;
314410d565efSmrg 
3145c7a68eb7Smrg 		      /* Assigning REG here rather than a temp makes
3146c7a68eb7Smrg 			 CALL_FUSAGE report the whole reg as used.
3147c7a68eb7Smrg 			 Strictly speaking, the call only uses SIZE
3148c7a68eb7Smrg 			 bytes at the msb end, but it doesn't seem worth
3149c7a68eb7Smrg 			 generating rtl to say that.  */
315010d565efSmrg 		      reg = gen_rtx_REG (word_mode, REGNO (reg));
3151c7a68eb7Smrg 		      x = expand_shift (LSHIFT_EXPR, word_mode,
3152c7a68eb7Smrg 					reg, shift, reg, 1);
315310d565efSmrg 		      if (x != reg)
315410d565efSmrg 			emit_move_insn (reg, x);
315510d565efSmrg 		    }
3156c7a68eb7Smrg 		}
315710d565efSmrg #endif
315810d565efSmrg 	    }
315910d565efSmrg 
316010d565efSmrg 	  /* If we have pre-computed the values to put in the registers in
316110d565efSmrg 	     the case of non-aligned structures, copy them in now.  */
316210d565efSmrg 
316310d565efSmrg 	  else if (args[i].n_aligned_regs != 0)
316410d565efSmrg 	    for (j = 0; j < args[i].n_aligned_regs; j++)
316510d565efSmrg 	      emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg) + j),
316610d565efSmrg 			      args[i].aligned_regs[j]);
316710d565efSmrg 
316810d565efSmrg 	  else if (partial == 0 || args[i].pass_on_stack)
316910d565efSmrg 	    {
3170c7a68eb7Smrg 	      /* SIZE and CONST_SIZE are 0 for partial arguments and
3171c7a68eb7Smrg 		 the size of a BLKmode type otherwise.  */
3172c7a68eb7Smrg 	      gcc_checking_assert (known_eq (size, const_size));
317310d565efSmrg 	      rtx mem = validize_mem (copy_rtx (args[i].value));
317410d565efSmrg 
317510d565efSmrg 	      /* Check for overlap with already clobbered argument area,
317610d565efSmrg 	         providing that this has non-zero size.  */
317710d565efSmrg 	      if (is_sibcall
3178c7a68eb7Smrg 		  && const_size != 0
3179c7a68eb7Smrg 		  && (mem_might_overlap_already_clobbered_arg_p
3180c7a68eb7Smrg 		      (XEXP (args[i].value, 0), const_size)))
318110d565efSmrg 		*sibcall_failure = 1;
318210d565efSmrg 
3183c7a68eb7Smrg 	      if (const_size % UNITS_PER_WORD == 0
318410d565efSmrg 		  || MEM_ALIGN (mem) % BITS_PER_WORD == 0)
318510d565efSmrg 		move_block_to_reg (REGNO (reg), mem, nregs, args[i].mode);
318610d565efSmrg 	      else
318710d565efSmrg 		{
318810d565efSmrg 		  if (nregs > 1)
318910d565efSmrg 		    move_block_to_reg (REGNO (reg), mem, nregs - 1,
319010d565efSmrg 				       args[i].mode);
319110d565efSmrg 		  rtx dest = gen_rtx_REG (word_mode, REGNO (reg) + nregs - 1);
319210d565efSmrg 		  unsigned int bitoff = (nregs - 1) * BITS_PER_WORD;
3193c7a68eb7Smrg 		  unsigned int bitsize = const_size * BITS_PER_UNIT - bitoff;
319410d565efSmrg 		  rtx x = extract_bit_field (mem, bitsize, bitoff, 1, dest,
3195c7a68eb7Smrg 					     word_mode, word_mode, false,
3196c7a68eb7Smrg 					     NULL);
319710d565efSmrg 		  if (BYTES_BIG_ENDIAN)
319810d565efSmrg 		    x = expand_shift (LSHIFT_EXPR, word_mode, x,
319910d565efSmrg 				      BITS_PER_WORD - bitsize, dest, 1);
320010d565efSmrg 		  if (x != dest)
320110d565efSmrg 		    emit_move_insn (dest, x);
320210d565efSmrg 		}
320310d565efSmrg 
320410d565efSmrg 	      /* Handle a BLKmode that needs shifting.  */
3205c7a68eb7Smrg 	      if (nregs == 1 && const_size < UNITS_PER_WORD
320610d565efSmrg #ifdef BLOCK_REG_PADDING
3207c7a68eb7Smrg 		  && args[i].locate.where_pad == PAD_DOWNWARD
320810d565efSmrg #else
320910d565efSmrg 		  && BYTES_BIG_ENDIAN
321010d565efSmrg #endif
321110d565efSmrg 		  )
321210d565efSmrg 		{
321310d565efSmrg 		  rtx dest = gen_rtx_REG (word_mode, REGNO (reg));
3214c7a68eb7Smrg 		  int shift = (UNITS_PER_WORD - const_size) * BITS_PER_UNIT;
321510d565efSmrg 		  enum tree_code dir = (BYTES_BIG_ENDIAN
321610d565efSmrg 					? RSHIFT_EXPR : LSHIFT_EXPR);
321710d565efSmrg 		  rtx x;
321810d565efSmrg 
321910d565efSmrg 		  x = expand_shift (dir, word_mode, dest, shift, dest, 1);
322010d565efSmrg 		  if (x != dest)
322110d565efSmrg 		    emit_move_insn (dest, x);
322210d565efSmrg 		}
322310d565efSmrg 	    }
322410d565efSmrg 
322510d565efSmrg 	  /* When a parameter is a block, and perhaps in other cases, it is
322610d565efSmrg 	     possible that it did a load from an argument slot that was
322710d565efSmrg 	     already clobbered.  */
322810d565efSmrg 	  if (is_sibcall
322910d565efSmrg 	      && check_sibcall_argument_overlap (before_arg, &args[i], 0))
323010d565efSmrg 	    *sibcall_failure = 1;
323110d565efSmrg 
323210d565efSmrg 	  /* Handle calls that pass values in multiple non-contiguous
323310d565efSmrg 	     locations.  The Irix 6 ABI has examples of this.  */
323410d565efSmrg 	  if (GET_CODE (reg) == PARALLEL)
323510d565efSmrg 	    use_group_regs (call_fusage, reg);
323610d565efSmrg 	  else if (nregs == -1)
3237c7a68eb7Smrg 	    use_reg_mode (call_fusage, reg, TYPE_MODE (type));
323810d565efSmrg 	  else if (nregs > 0)
323910d565efSmrg 	    use_regs (call_fusage, REGNO (reg), nregs);
324010d565efSmrg 	}
324110d565efSmrg     }
324210d565efSmrg }
324310d565efSmrg 
324410d565efSmrg /* We need to pop PENDING_STACK_ADJUST bytes.  But, if the arguments
324510d565efSmrg    wouldn't fill up an even multiple of PREFERRED_UNIT_STACK_BOUNDARY
324610d565efSmrg    bytes, then we would need to push some additional bytes to pad the
3247c7a68eb7Smrg    arguments.  So, we try to compute an adjust to the stack pointer for an
324810d565efSmrg    amount that will leave the stack under-aligned by UNADJUSTED_ARGS_SIZE
324910d565efSmrg    bytes.  Then, when the arguments are pushed the stack will be perfectly
3250c7a68eb7Smrg    aligned.
325110d565efSmrg 
3252c7a68eb7Smrg    Return true if this optimization is possible, storing the adjustment
3253c7a68eb7Smrg    in ADJUSTMENT_OUT and setting ARGS_SIZE->CONSTANT to the number of
3254c7a68eb7Smrg    bytes that should be popped after the call.  */
3255c7a68eb7Smrg 
3256c7a68eb7Smrg static bool
combine_pending_stack_adjustment_and_call(poly_int64_pod * adjustment_out,poly_int64 unadjusted_args_size,struct args_size * args_size,unsigned int preferred_unit_stack_boundary)3257c7a68eb7Smrg combine_pending_stack_adjustment_and_call (poly_int64_pod *adjustment_out,
3258c7a68eb7Smrg 					   poly_int64 unadjusted_args_size,
325910d565efSmrg 					   struct args_size *args_size,
326010d565efSmrg 					   unsigned int preferred_unit_stack_boundary)
326110d565efSmrg {
326210d565efSmrg   /* The number of bytes to pop so that the stack will be
326310d565efSmrg      under-aligned by UNADJUSTED_ARGS_SIZE bytes.  */
3264c7a68eb7Smrg   poly_int64 adjustment;
326510d565efSmrg   /* The alignment of the stack after the arguments are pushed, if we
326610d565efSmrg      just pushed the arguments without adjust the stack here.  */
326710d565efSmrg   unsigned HOST_WIDE_INT unadjusted_alignment;
326810d565efSmrg 
3269c7a68eb7Smrg   if (!known_misalignment (stack_pointer_delta + unadjusted_args_size,
3270c7a68eb7Smrg 			   preferred_unit_stack_boundary,
3271c7a68eb7Smrg 			   &unadjusted_alignment))
3272c7a68eb7Smrg     return false;
327310d565efSmrg 
327410d565efSmrg   /* We want to get rid of as many of the PENDING_STACK_ADJUST bytes
327510d565efSmrg      as possible -- leaving just enough left to cancel out the
327610d565efSmrg      UNADJUSTED_ALIGNMENT.  In other words, we want to ensure that the
327710d565efSmrg      PENDING_STACK_ADJUST is non-negative, and congruent to
327810d565efSmrg      -UNADJUSTED_ALIGNMENT modulo the PREFERRED_UNIT_STACK_BOUNDARY.  */
327910d565efSmrg 
328010d565efSmrg   /* Begin by trying to pop all the bytes.  */
3281c7a68eb7Smrg   unsigned HOST_WIDE_INT tmp_misalignment;
3282c7a68eb7Smrg   if (!known_misalignment (pending_stack_adjust,
3283c7a68eb7Smrg 			   preferred_unit_stack_boundary,
3284c7a68eb7Smrg 			   &tmp_misalignment))
3285c7a68eb7Smrg     return false;
3286c7a68eb7Smrg   unadjusted_alignment -= tmp_misalignment;
328710d565efSmrg   adjustment = pending_stack_adjust;
328810d565efSmrg   /* Push enough additional bytes that the stack will be aligned
328910d565efSmrg      after the arguments are pushed.  */
3290c7a68eb7Smrg   if (preferred_unit_stack_boundary > 1 && unadjusted_alignment)
329110d565efSmrg     adjustment -= preferred_unit_stack_boundary - unadjusted_alignment;
3292c7a68eb7Smrg 
3293c7a68eb7Smrg   /* We need to know whether the adjusted argument size
3294c7a68eb7Smrg      (UNADJUSTED_ARGS_SIZE - ADJUSTMENT) constitutes an allocation
3295c7a68eb7Smrg      or a deallocation.  */
3296c7a68eb7Smrg   if (!ordered_p (adjustment, unadjusted_args_size))
3297c7a68eb7Smrg     return false;
329810d565efSmrg 
329910d565efSmrg   /* Now, sets ARGS_SIZE->CONSTANT so that we pop the right number of
330010d565efSmrg      bytes after the call.  The right number is the entire
330110d565efSmrg      PENDING_STACK_ADJUST less our ADJUSTMENT plus the amount required
330210d565efSmrg      by the arguments in the first place.  */
330310d565efSmrg   args_size->constant
330410d565efSmrg     = pending_stack_adjust - adjustment + unadjusted_args_size;
330510d565efSmrg 
3306c7a68eb7Smrg   *adjustment_out = adjustment;
3307c7a68eb7Smrg   return true;
330810d565efSmrg }
330910d565efSmrg 
331010d565efSmrg /* Scan X expression if it does not dereference any argument slots
331110d565efSmrg    we already clobbered by tail call arguments (as noted in stored_args_map
331210d565efSmrg    bitmap).
331310d565efSmrg    Return nonzero if X expression dereferences such argument slots,
331410d565efSmrg    zero otherwise.  */
331510d565efSmrg 
331610d565efSmrg static int
check_sibcall_argument_overlap_1(rtx x)331710d565efSmrg check_sibcall_argument_overlap_1 (rtx x)
331810d565efSmrg {
331910d565efSmrg   RTX_CODE code;
332010d565efSmrg   int i, j;
332110d565efSmrg   const char *fmt;
332210d565efSmrg 
332310d565efSmrg   if (x == NULL_RTX)
332410d565efSmrg     return 0;
332510d565efSmrg 
332610d565efSmrg   code = GET_CODE (x);
332710d565efSmrg 
332810d565efSmrg   /* We need not check the operands of the CALL expression itself.  */
332910d565efSmrg   if (code == CALL)
333010d565efSmrg     return 0;
333110d565efSmrg 
333210d565efSmrg   if (code == MEM)
3333c7a68eb7Smrg     return (mem_might_overlap_already_clobbered_arg_p
3334c7a68eb7Smrg 	    (XEXP (x, 0), GET_MODE_SIZE (GET_MODE (x))));
333510d565efSmrg 
333610d565efSmrg   /* Scan all subexpressions.  */
333710d565efSmrg   fmt = GET_RTX_FORMAT (code);
333810d565efSmrg   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
333910d565efSmrg     {
334010d565efSmrg       if (*fmt == 'e')
334110d565efSmrg 	{
334210d565efSmrg 	  if (check_sibcall_argument_overlap_1 (XEXP (x, i)))
334310d565efSmrg 	    return 1;
334410d565efSmrg 	}
334510d565efSmrg       else if (*fmt == 'E')
334610d565efSmrg 	{
334710d565efSmrg 	  for (j = 0; j < XVECLEN (x, i); j++)
334810d565efSmrg 	    if (check_sibcall_argument_overlap_1 (XVECEXP (x, i, j)))
334910d565efSmrg 	      return 1;
335010d565efSmrg 	}
335110d565efSmrg     }
335210d565efSmrg   return 0;
335310d565efSmrg }
335410d565efSmrg 
335510d565efSmrg /* Scan sequence after INSN if it does not dereference any argument slots
335610d565efSmrg    we already clobbered by tail call arguments (as noted in stored_args_map
335710d565efSmrg    bitmap).  If MARK_STORED_ARGS_MAP, add stack slots for ARG to
335810d565efSmrg    stored_args_map bitmap afterwards (when ARG is a register MARK_STORED_ARGS_MAP
335910d565efSmrg    should be 0).  Return nonzero if sequence after INSN dereferences such argument
336010d565efSmrg    slots, zero otherwise.  */
336110d565efSmrg 
336210d565efSmrg static int
check_sibcall_argument_overlap(rtx_insn * insn,struct arg_data * arg,int mark_stored_args_map)336310d565efSmrg check_sibcall_argument_overlap (rtx_insn *insn, struct arg_data *arg,
336410d565efSmrg 				int mark_stored_args_map)
336510d565efSmrg {
3366c7a68eb7Smrg   poly_uint64 low, high;
3367c7a68eb7Smrg   unsigned HOST_WIDE_INT const_low, const_high;
336810d565efSmrg 
336910d565efSmrg   if (insn == NULL_RTX)
337010d565efSmrg     insn = get_insns ();
337110d565efSmrg   else
337210d565efSmrg     insn = NEXT_INSN (insn);
337310d565efSmrg 
337410d565efSmrg   for (; insn; insn = NEXT_INSN (insn))
337510d565efSmrg     if (INSN_P (insn)
337610d565efSmrg 	&& check_sibcall_argument_overlap_1 (PATTERN (insn)))
337710d565efSmrg       break;
337810d565efSmrg 
337910d565efSmrg   if (mark_stored_args_map)
338010d565efSmrg     {
338110d565efSmrg       if (ARGS_GROW_DOWNWARD)
338210d565efSmrg 	low = -arg->locate.slot_offset.constant - arg->locate.size.constant;
338310d565efSmrg       else
338410d565efSmrg 	low = arg->locate.slot_offset.constant;
3385c7a68eb7Smrg       high = low + arg->locate.size.constant;
338610d565efSmrg 
3387c7a68eb7Smrg       const_low = constant_lower_bound (low);
3388c7a68eb7Smrg       if (high.is_constant (&const_high))
3389c7a68eb7Smrg 	for (unsigned HOST_WIDE_INT i = const_low; i < const_high; ++i)
3390c7a68eb7Smrg 	  bitmap_set_bit (stored_args_map, i);
3391c7a68eb7Smrg       else
3392c7a68eb7Smrg 	stored_args_watermark = MIN (stored_args_watermark, const_low);
339310d565efSmrg     }
339410d565efSmrg   return insn != NULL_RTX;
339510d565efSmrg }
339610d565efSmrg 
339710d565efSmrg /* Given that a function returns a value of mode MODE at the most
339810d565efSmrg    significant end of hard register VALUE, shift VALUE left or right
339910d565efSmrg    as specified by LEFT_P.  Return true if some action was needed.  */
340010d565efSmrg 
340110d565efSmrg bool
shift_return_value(machine_mode mode,bool left_p,rtx value)340210d565efSmrg shift_return_value (machine_mode mode, bool left_p, rtx value)
340310d565efSmrg {
340410d565efSmrg   gcc_assert (REG_P (value) && HARD_REGISTER_P (value));
3405c7a68eb7Smrg   machine_mode value_mode = GET_MODE (value);
3406c7a68eb7Smrg   poly_int64 shift = GET_MODE_BITSIZE (value_mode) - GET_MODE_BITSIZE (mode);
3407c7a68eb7Smrg 
3408c7a68eb7Smrg   if (known_eq (shift, 0))
340910d565efSmrg     return false;
341010d565efSmrg 
341110d565efSmrg   /* Use ashr rather than lshr for right shifts.  This is for the benefit
341210d565efSmrg      of the MIPS port, which requires SImode values to be sign-extended
341310d565efSmrg      when stored in 64-bit registers.  */
3414c7a68eb7Smrg   if (!force_expand_binop (value_mode, left_p ? ashl_optab : ashr_optab,
3415c7a68eb7Smrg 			   value, gen_int_shift_amount (value_mode, shift),
3416c7a68eb7Smrg 			   value, 1, OPTAB_WIDEN))
341710d565efSmrg     gcc_unreachable ();
341810d565efSmrg   return true;
341910d565efSmrg }
342010d565efSmrg 
342110d565efSmrg /* If X is a likely-spilled register value, copy it to a pseudo
342210d565efSmrg    register and return that register.  Return X otherwise.  */
342310d565efSmrg 
342410d565efSmrg static rtx
avoid_likely_spilled_reg(rtx x)342510d565efSmrg avoid_likely_spilled_reg (rtx x)
342610d565efSmrg {
342710d565efSmrg   rtx new_rtx;
342810d565efSmrg 
342910d565efSmrg   if (REG_P (x)
343010d565efSmrg       && HARD_REGISTER_P (x)
343110d565efSmrg       && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (x))))
343210d565efSmrg     {
343310d565efSmrg       /* Make sure that we generate a REG rather than a CONCAT.
343410d565efSmrg 	 Moves into CONCATs can need nontrivial instructions,
343510d565efSmrg 	 and the whole point of this function is to avoid
343610d565efSmrg 	 using the hard register directly in such a situation.  */
343710d565efSmrg       generating_concat_p = 0;
343810d565efSmrg       new_rtx = gen_reg_rtx (GET_MODE (x));
343910d565efSmrg       generating_concat_p = 1;
344010d565efSmrg       emit_move_insn (new_rtx, x);
344110d565efSmrg       return new_rtx;
344210d565efSmrg     }
344310d565efSmrg   return x;
344410d565efSmrg }
344510d565efSmrg 
344610d565efSmrg /* Helper function for expand_call.
344710d565efSmrg    Return false is EXP is not implementable as a sibling call.  */
344810d565efSmrg 
344910d565efSmrg static bool
can_implement_as_sibling_call_p(tree exp,rtx structure_value_addr,tree funtype,int reg_parm_stack_space ATTRIBUTE_UNUSED,tree fndecl,int flags,tree addr,const args_size & args_size)345010d565efSmrg can_implement_as_sibling_call_p (tree exp,
345110d565efSmrg 				 rtx structure_value_addr,
345210d565efSmrg 				 tree funtype,
345310d565efSmrg 				 int reg_parm_stack_space ATTRIBUTE_UNUSED,
345410d565efSmrg 				 tree fndecl,
345510d565efSmrg 				 int flags,
345610d565efSmrg 				 tree addr,
345710d565efSmrg 				 const args_size &args_size)
345810d565efSmrg {
345910d565efSmrg   if (!targetm.have_sibcall_epilogue ())
346010d565efSmrg     {
346110d565efSmrg       maybe_complain_about_tail_call
346210d565efSmrg 	(exp,
346310d565efSmrg 	 "machine description does not have"
346410d565efSmrg 	 " a sibcall_epilogue instruction pattern");
346510d565efSmrg       return false;
346610d565efSmrg     }
346710d565efSmrg 
346810d565efSmrg   /* Doing sibling call optimization needs some work, since
346910d565efSmrg      structure_value_addr can be allocated on the stack.
347010d565efSmrg      It does not seem worth the effort since few optimizable
347110d565efSmrg      sibling calls will return a structure.  */
347210d565efSmrg   if (structure_value_addr != NULL_RTX)
347310d565efSmrg     {
347410d565efSmrg       maybe_complain_about_tail_call (exp, "callee returns a structure");
347510d565efSmrg       return false;
347610d565efSmrg     }
347710d565efSmrg 
347810d565efSmrg #ifdef REG_PARM_STACK_SPACE
347910d565efSmrg   /* If outgoing reg parm stack space changes, we cannot do sibcall.  */
348010d565efSmrg   if (OUTGOING_REG_PARM_STACK_SPACE (funtype)
348110d565efSmrg       != OUTGOING_REG_PARM_STACK_SPACE (TREE_TYPE (current_function_decl))
348210d565efSmrg       || (reg_parm_stack_space != REG_PARM_STACK_SPACE (current_function_decl)))
348310d565efSmrg     {
348410d565efSmrg       maybe_complain_about_tail_call (exp,
348510d565efSmrg 				      "inconsistent size of stack space"
348610d565efSmrg 				      " allocated for arguments which are"
348710d565efSmrg 				      " passed in registers");
348810d565efSmrg       return false;
348910d565efSmrg     }
349010d565efSmrg #endif
349110d565efSmrg 
349210d565efSmrg   /* Check whether the target is able to optimize the call
349310d565efSmrg      into a sibcall.  */
349410d565efSmrg   if (!targetm.function_ok_for_sibcall (fndecl, exp))
349510d565efSmrg     {
349610d565efSmrg       maybe_complain_about_tail_call (exp,
349710d565efSmrg 				      "target is not able to optimize the"
349810d565efSmrg 				      " call into a sibling call");
349910d565efSmrg       return false;
350010d565efSmrg     }
350110d565efSmrg 
350210d565efSmrg   /* Functions that do not return exactly once may not be sibcall
350310d565efSmrg      optimized.  */
350410d565efSmrg   if (flags & ECF_RETURNS_TWICE)
350510d565efSmrg     {
350610d565efSmrg       maybe_complain_about_tail_call (exp, "callee returns twice");
350710d565efSmrg       return false;
350810d565efSmrg     }
350910d565efSmrg   if (flags & ECF_NORETURN)
351010d565efSmrg     {
351110d565efSmrg       maybe_complain_about_tail_call (exp, "callee does not return");
351210d565efSmrg       return false;
351310d565efSmrg     }
351410d565efSmrg 
351510d565efSmrg   if (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (addr))))
351610d565efSmrg     {
351710d565efSmrg       maybe_complain_about_tail_call (exp, "volatile function type");
351810d565efSmrg       return false;
351910d565efSmrg     }
352010d565efSmrg 
352110d565efSmrg   /* If the called function is nested in the current one, it might access
352210d565efSmrg      some of the caller's arguments, but could clobber them beforehand if
352310d565efSmrg      the argument areas are shared.  */
352410d565efSmrg   if (fndecl && decl_function_context (fndecl) == current_function_decl)
352510d565efSmrg     {
352610d565efSmrg       maybe_complain_about_tail_call (exp, "nested function");
352710d565efSmrg       return false;
352810d565efSmrg     }
352910d565efSmrg 
353010d565efSmrg   /* If this function requires more stack slots than the current
353110d565efSmrg      function, we cannot change it into a sibling call.
353210d565efSmrg      crtl->args.pretend_args_size is not part of the
353310d565efSmrg      stack allocated by our caller.  */
3534c7a68eb7Smrg   if (maybe_gt (args_size.constant,
3535c7a68eb7Smrg 		crtl->args.size - crtl->args.pretend_args_size))
353610d565efSmrg     {
353710d565efSmrg       maybe_complain_about_tail_call (exp,
353810d565efSmrg 				      "callee required more stack slots"
353910d565efSmrg 				      " than the caller");
354010d565efSmrg       return false;
354110d565efSmrg     }
354210d565efSmrg 
354310d565efSmrg   /* If the callee pops its own arguments, then it must pop exactly
354410d565efSmrg      the same number of arguments as the current function.  */
3545c7a68eb7Smrg   if (maybe_ne (targetm.calls.return_pops_args (fndecl, funtype,
3546c7a68eb7Smrg 						args_size.constant),
3547c7a68eb7Smrg 		targetm.calls.return_pops_args (current_function_decl,
3548c7a68eb7Smrg 						TREE_TYPE
3549c7a68eb7Smrg 						(current_function_decl),
3550c7a68eb7Smrg 						crtl->args.size)))
355110d565efSmrg     {
355210d565efSmrg       maybe_complain_about_tail_call (exp,
355310d565efSmrg 				      "inconsistent number of"
355410d565efSmrg 				      " popped arguments");
355510d565efSmrg       return false;
355610d565efSmrg     }
355710d565efSmrg 
355810d565efSmrg   if (!lang_hooks.decls.ok_for_sibcall (fndecl))
355910d565efSmrg     {
356010d565efSmrg       maybe_complain_about_tail_call (exp, "frontend does not support"
356110d565efSmrg 					    " sibling call");
356210d565efSmrg       return false;
356310d565efSmrg     }
356410d565efSmrg 
356510d565efSmrg   /* All checks passed.  */
356610d565efSmrg   return true;
356710d565efSmrg }
356810d565efSmrg 
3569*ec02198aSmrg /* Update stack alignment when the parameter is passed in the stack
3570*ec02198aSmrg    since the outgoing parameter requires extra alignment on the calling
3571*ec02198aSmrg    function side. */
3572*ec02198aSmrg 
3573*ec02198aSmrg static void
update_stack_alignment_for_call(struct locate_and_pad_arg_data * locate)3574*ec02198aSmrg update_stack_alignment_for_call (struct locate_and_pad_arg_data *locate)
3575*ec02198aSmrg {
3576*ec02198aSmrg   if (crtl->stack_alignment_needed < locate->boundary)
3577*ec02198aSmrg     crtl->stack_alignment_needed = locate->boundary;
3578*ec02198aSmrg   if (crtl->preferred_stack_boundary < locate->boundary)
3579*ec02198aSmrg     crtl->preferred_stack_boundary = locate->boundary;
3580*ec02198aSmrg }
3581*ec02198aSmrg 
358210d565efSmrg /* Generate all the code for a CALL_EXPR exp
358310d565efSmrg    and return an rtx for its value.
358410d565efSmrg    Store the value in TARGET (specified as an rtx) if convenient.
358510d565efSmrg    If the value is stored in TARGET then TARGET is returned.
358610d565efSmrg    If IGNORE is nonzero, then we ignore the value of the function call.  */
358710d565efSmrg 
358810d565efSmrg rtx
expand_call(tree exp,rtx target,int ignore)358910d565efSmrg expand_call (tree exp, rtx target, int ignore)
359010d565efSmrg {
359110d565efSmrg   /* Nonzero if we are currently expanding a call.  */
359210d565efSmrg   static int currently_expanding_call = 0;
359310d565efSmrg 
359410d565efSmrg   /* RTX for the function to be called.  */
359510d565efSmrg   rtx funexp;
359610d565efSmrg   /* Sequence of insns to perform a normal "call".  */
359710d565efSmrg   rtx_insn *normal_call_insns = NULL;
359810d565efSmrg   /* Sequence of insns to perform a tail "call".  */
359910d565efSmrg   rtx_insn *tail_call_insns = NULL;
360010d565efSmrg   /* Data type of the function.  */
360110d565efSmrg   tree funtype;
360210d565efSmrg   tree type_arg_types;
360310d565efSmrg   tree rettype;
360410d565efSmrg   /* Declaration of the function being called,
360510d565efSmrg      or 0 if the function is computed (not known by name).  */
360610d565efSmrg   tree fndecl = 0;
360710d565efSmrg   /* The type of the function being called.  */
360810d565efSmrg   tree fntype;
360910d565efSmrg   bool try_tail_call = CALL_EXPR_TAILCALL (exp);
361010d565efSmrg   bool must_tail_call = CALL_EXPR_MUST_TAIL_CALL (exp);
361110d565efSmrg   int pass;
361210d565efSmrg 
361310d565efSmrg   /* Register in which non-BLKmode value will be returned,
361410d565efSmrg      or 0 if no value or if value is BLKmode.  */
361510d565efSmrg   rtx valreg;
361610d565efSmrg   /* Address where we should return a BLKmode value;
361710d565efSmrg      0 if value not BLKmode.  */
361810d565efSmrg   rtx structure_value_addr = 0;
361910d565efSmrg   /* Nonzero if that address is being passed by treating it as
362010d565efSmrg      an extra, implicit first parameter.  Otherwise,
362110d565efSmrg      it is passed by being copied directly into struct_value_rtx.  */
362210d565efSmrg   int structure_value_addr_parm = 0;
362310d565efSmrg   /* Holds the value of implicit argument for the struct value.  */
362410d565efSmrg   tree structure_value_addr_value = NULL_TREE;
362510d565efSmrg   /* Size of aggregate value wanted, or zero if none wanted
362610d565efSmrg      or if we are using the non-reentrant PCC calling convention
362710d565efSmrg      or expecting the value in registers.  */
3628c7a68eb7Smrg   poly_int64 struct_value_size = 0;
362910d565efSmrg   /* Nonzero if called function returns an aggregate in memory PCC style,
363010d565efSmrg      by returning the address of where to find it.  */
363110d565efSmrg   int pcc_struct_value = 0;
363210d565efSmrg   rtx struct_value = 0;
363310d565efSmrg 
363410d565efSmrg   /* Number of actual parameters in this call, including struct value addr.  */
363510d565efSmrg   int num_actuals;
363610d565efSmrg   /* Number of named args.  Args after this are anonymous ones
363710d565efSmrg      and they must all go on the stack.  */
363810d565efSmrg   int n_named_args;
363910d565efSmrg   /* Number of complex actual arguments that need to be split.  */
364010d565efSmrg   int num_complex_actuals = 0;
364110d565efSmrg 
364210d565efSmrg   /* Vector of information about each argument.
364310d565efSmrg      Arguments are numbered in the order they will be pushed,
364410d565efSmrg      not the order they are written.  */
364510d565efSmrg   struct arg_data *args;
364610d565efSmrg 
364710d565efSmrg   /* Total size in bytes of all the stack-parms scanned so far.  */
364810d565efSmrg   struct args_size args_size;
364910d565efSmrg   struct args_size adjusted_args_size;
365010d565efSmrg   /* Size of arguments before any adjustments (such as rounding).  */
3651c7a68eb7Smrg   poly_int64 unadjusted_args_size;
365210d565efSmrg   /* Data on reg parms scanned so far.  */
365310d565efSmrg   CUMULATIVE_ARGS args_so_far_v;
365410d565efSmrg   cumulative_args_t args_so_far;
365510d565efSmrg   /* Nonzero if a reg parm has been scanned.  */
365610d565efSmrg   int reg_parm_seen;
365710d565efSmrg   /* Nonzero if this is an indirect function call.  */
365810d565efSmrg 
365910d565efSmrg   /* Nonzero if we must avoid push-insns in the args for this call.
366010d565efSmrg      If stack space is allocated for register parameters, but not by the
366110d565efSmrg      caller, then it is preallocated in the fixed part of the stack frame.
366210d565efSmrg      So the entire argument block must then be preallocated (i.e., we
366310d565efSmrg      ignore PUSH_ROUNDING in that case).  */
366410d565efSmrg 
366510d565efSmrg   int must_preallocate = !PUSH_ARGS;
366610d565efSmrg 
366710d565efSmrg   /* Size of the stack reserved for parameter registers.  */
366810d565efSmrg   int reg_parm_stack_space = 0;
366910d565efSmrg 
367010d565efSmrg   /* Address of space preallocated for stack parms
367110d565efSmrg      (on machines that lack push insns), or 0 if space not preallocated.  */
367210d565efSmrg   rtx argblock = 0;
367310d565efSmrg 
367410d565efSmrg   /* Mask of ECF_ and ERF_ flags.  */
367510d565efSmrg   int flags = 0;
367610d565efSmrg   int return_flags = 0;
367710d565efSmrg #ifdef REG_PARM_STACK_SPACE
367810d565efSmrg   /* Define the boundary of the register parm stack space that needs to be
367910d565efSmrg      saved, if any.  */
368010d565efSmrg   int low_to_save, high_to_save;
368110d565efSmrg   rtx save_area = 0;		/* Place that it is saved */
368210d565efSmrg #endif
368310d565efSmrg 
3684c7a68eb7Smrg   unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
368510d565efSmrg   char *initial_stack_usage_map = stack_usage_map;
3686c7a68eb7Smrg   unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
368710d565efSmrg   char *stack_usage_map_buf = NULL;
368810d565efSmrg 
3689c7a68eb7Smrg   poly_int64 old_stack_allocated;
369010d565efSmrg 
369110d565efSmrg   /* State variables to track stack modifications.  */
369210d565efSmrg   rtx old_stack_level = 0;
369310d565efSmrg   int old_stack_arg_under_construction = 0;
3694c7a68eb7Smrg   poly_int64 old_pending_adj = 0;
369510d565efSmrg   int old_inhibit_defer_pop = inhibit_defer_pop;
369610d565efSmrg 
369710d565efSmrg   /* Some stack pointer alterations we make are performed via
369810d565efSmrg      allocate_dynamic_stack_space. This modifies the stack_pointer_delta,
369910d565efSmrg      which we then also need to save/restore along the way.  */
3700c7a68eb7Smrg   poly_int64 old_stack_pointer_delta = 0;
370110d565efSmrg 
370210d565efSmrg   rtx call_fusage;
370310d565efSmrg   tree addr = CALL_EXPR_FN (exp);
370410d565efSmrg   int i;
370510d565efSmrg   /* The alignment of the stack, in bits.  */
370610d565efSmrg   unsigned HOST_WIDE_INT preferred_stack_boundary;
370710d565efSmrg   /* The alignment of the stack, in bytes.  */
370810d565efSmrg   unsigned HOST_WIDE_INT preferred_unit_stack_boundary;
370910d565efSmrg   /* The static chain value to use for this call.  */
371010d565efSmrg   rtx static_chain_value;
371110d565efSmrg   /* See if this is "nothrow" function call.  */
371210d565efSmrg   if (TREE_NOTHROW (exp))
371310d565efSmrg     flags |= ECF_NOTHROW;
371410d565efSmrg 
371510d565efSmrg   /* See if we can find a DECL-node for the actual function, and get the
371610d565efSmrg      function attributes (flags) from the function decl or type node.  */
371710d565efSmrg   fndecl = get_callee_fndecl (exp);
371810d565efSmrg   if (fndecl)
371910d565efSmrg     {
372010d565efSmrg       fntype = TREE_TYPE (fndecl);
372110d565efSmrg       flags |= flags_from_decl_or_type (fndecl);
372210d565efSmrg       return_flags |= decl_return_flags (fndecl);
372310d565efSmrg     }
372410d565efSmrg   else
372510d565efSmrg     {
372610d565efSmrg       fntype = TREE_TYPE (TREE_TYPE (addr));
372710d565efSmrg       flags |= flags_from_decl_or_type (fntype);
372810d565efSmrg       if (CALL_EXPR_BY_DESCRIPTOR (exp))
372910d565efSmrg 	flags |= ECF_BY_DESCRIPTOR;
373010d565efSmrg     }
373110d565efSmrg   rettype = TREE_TYPE (exp);
373210d565efSmrg 
373310d565efSmrg   struct_value = targetm.calls.struct_value_rtx (fntype, 0);
373410d565efSmrg 
373510d565efSmrg   /* Warn if this value is an aggregate type,
373610d565efSmrg      regardless of which calling convention we are using for it.  */
373710d565efSmrg   if (AGGREGATE_TYPE_P (rettype))
373810d565efSmrg     warning (OPT_Waggregate_return, "function call has aggregate value");
373910d565efSmrg 
374010d565efSmrg   /* If the result of a non looping pure or const function call is
374110d565efSmrg      ignored (or void), and none of its arguments are volatile, we can
374210d565efSmrg      avoid expanding the call and just evaluate the arguments for
374310d565efSmrg      side-effects.  */
374410d565efSmrg   if ((flags & (ECF_CONST | ECF_PURE))
374510d565efSmrg       && (!(flags & ECF_LOOPING_CONST_OR_PURE))
374610d565efSmrg       && (ignore || target == const0_rtx
374710d565efSmrg 	  || TYPE_MODE (rettype) == VOIDmode))
374810d565efSmrg     {
374910d565efSmrg       bool volatilep = false;
375010d565efSmrg       tree arg;
375110d565efSmrg       call_expr_arg_iterator iter;
375210d565efSmrg 
375310d565efSmrg       FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
375410d565efSmrg 	if (TREE_THIS_VOLATILE (arg))
375510d565efSmrg 	  {
375610d565efSmrg 	    volatilep = true;
375710d565efSmrg 	    break;
375810d565efSmrg 	  }
375910d565efSmrg 
376010d565efSmrg       if (! volatilep)
376110d565efSmrg 	{
376210d565efSmrg 	  FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
376310d565efSmrg 	    expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
376410d565efSmrg 	  return const0_rtx;
376510d565efSmrg 	}
376610d565efSmrg     }
376710d565efSmrg 
376810d565efSmrg #ifdef REG_PARM_STACK_SPACE
376910d565efSmrg   reg_parm_stack_space = REG_PARM_STACK_SPACE (!fndecl ? fntype : fndecl);
377010d565efSmrg #endif
377110d565efSmrg 
377210d565efSmrg   if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
377310d565efSmrg       && reg_parm_stack_space > 0 && PUSH_ARGS)
377410d565efSmrg     must_preallocate = 1;
377510d565efSmrg 
377610d565efSmrg   /* Set up a place to return a structure.  */
377710d565efSmrg 
377810d565efSmrg   /* Cater to broken compilers.  */
377910d565efSmrg   if (aggregate_value_p (exp, fntype))
378010d565efSmrg     {
378110d565efSmrg       /* This call returns a big structure.  */
378210d565efSmrg       flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
378310d565efSmrg 
378410d565efSmrg #ifdef PCC_STATIC_STRUCT_RETURN
378510d565efSmrg       {
378610d565efSmrg 	pcc_struct_value = 1;
378710d565efSmrg       }
378810d565efSmrg #else /* not PCC_STATIC_STRUCT_RETURN */
378910d565efSmrg       {
3790c7a68eb7Smrg 	if (!poly_int_tree_p (TYPE_SIZE_UNIT (rettype), &struct_value_size))
3791c7a68eb7Smrg 	  struct_value_size = -1;
379210d565efSmrg 
379310d565efSmrg 	/* Even if it is semantically safe to use the target as the return
379410d565efSmrg 	   slot, it may be not sufficiently aligned for the return type.  */
379510d565efSmrg 	if (CALL_EXPR_RETURN_SLOT_OPT (exp)
379610d565efSmrg 	    && target
379710d565efSmrg 	    && MEM_P (target)
379810d565efSmrg 	    /* If rettype is addressable, we may not create a temporary.
379910d565efSmrg 	       If target is properly aligned at runtime and the compiler
380010d565efSmrg 	       just doesn't know about it, it will work fine, otherwise it
380110d565efSmrg 	       will be UB.  */
380210d565efSmrg 	    && (TREE_ADDRESSABLE (rettype)
380310d565efSmrg 		|| !(MEM_ALIGN (target) < TYPE_ALIGN (rettype)
3804c7a68eb7Smrg 		     && targetm.slow_unaligned_access (TYPE_MODE (rettype),
380510d565efSmrg 						       MEM_ALIGN (target)))))
380610d565efSmrg 	  structure_value_addr = XEXP (target, 0);
380710d565efSmrg 	else
380810d565efSmrg 	  {
380910d565efSmrg 	    /* For variable-sized objects, we must be called with a target
381010d565efSmrg 	       specified.  If we were to allocate space on the stack here,
381110d565efSmrg 	       we would have no way of knowing when to free it.  */
381210d565efSmrg 	    rtx d = assign_temp (rettype, 1, 1);
381310d565efSmrg 	    structure_value_addr = XEXP (d, 0);
381410d565efSmrg 	    target = 0;
381510d565efSmrg 	  }
381610d565efSmrg       }
381710d565efSmrg #endif /* not PCC_STATIC_STRUCT_RETURN */
381810d565efSmrg     }
381910d565efSmrg 
382010d565efSmrg   /* Figure out the amount to which the stack should be aligned.  */
382110d565efSmrg   preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
382210d565efSmrg   if (fndecl)
382310d565efSmrg     {
382410d565efSmrg       struct cgraph_rtl_info *i = cgraph_node::rtl_info (fndecl);
382510d565efSmrg       /* Without automatic stack alignment, we can't increase preferred
382610d565efSmrg 	 stack boundary.  With automatic stack alignment, it is
382710d565efSmrg 	 unnecessary since unless we can guarantee that all callers will
382810d565efSmrg 	 align the outgoing stack properly, callee has to align its
382910d565efSmrg 	 stack anyway.  */
383010d565efSmrg       if (i
383110d565efSmrg 	  && i->preferred_incoming_stack_boundary
383210d565efSmrg 	  && i->preferred_incoming_stack_boundary < preferred_stack_boundary)
383310d565efSmrg 	preferred_stack_boundary = i->preferred_incoming_stack_boundary;
383410d565efSmrg     }
383510d565efSmrg 
383610d565efSmrg   /* Operand 0 is a pointer-to-function; get the type of the function.  */
383710d565efSmrg   funtype = TREE_TYPE (addr);
383810d565efSmrg   gcc_assert (POINTER_TYPE_P (funtype));
383910d565efSmrg   funtype = TREE_TYPE (funtype);
384010d565efSmrg 
384110d565efSmrg   /* Count whether there are actual complex arguments that need to be split
384210d565efSmrg      into their real and imaginary parts.  Munge the type_arg_types
384310d565efSmrg      appropriately here as well.  */
384410d565efSmrg   if (targetm.calls.split_complex_arg)
384510d565efSmrg     {
384610d565efSmrg       call_expr_arg_iterator iter;
384710d565efSmrg       tree arg;
384810d565efSmrg       FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
384910d565efSmrg 	{
385010d565efSmrg 	  tree type = TREE_TYPE (arg);
385110d565efSmrg 	  if (type && TREE_CODE (type) == COMPLEX_TYPE
385210d565efSmrg 	      && targetm.calls.split_complex_arg (type))
385310d565efSmrg 	    num_complex_actuals++;
385410d565efSmrg 	}
385510d565efSmrg       type_arg_types = split_complex_types (TYPE_ARG_TYPES (funtype));
385610d565efSmrg     }
385710d565efSmrg   else
385810d565efSmrg     type_arg_types = TYPE_ARG_TYPES (funtype);
385910d565efSmrg 
386010d565efSmrg   if (flags & ECF_MAY_BE_ALLOCA)
386110d565efSmrg     cfun->calls_alloca = 1;
386210d565efSmrg 
386310d565efSmrg   /* If struct_value_rtx is 0, it means pass the address
386410d565efSmrg      as if it were an extra parameter.  Put the argument expression
386510d565efSmrg      in structure_value_addr_value.  */
386610d565efSmrg   if (structure_value_addr && struct_value == 0)
386710d565efSmrg     {
386810d565efSmrg       /* If structure_value_addr is a REG other than
386910d565efSmrg 	 virtual_outgoing_args_rtx, we can use always use it.  If it
387010d565efSmrg 	 is not a REG, we must always copy it into a register.
387110d565efSmrg 	 If it is virtual_outgoing_args_rtx, we must copy it to another
387210d565efSmrg 	 register in some cases.  */
387310d565efSmrg       rtx temp = (!REG_P (structure_value_addr)
387410d565efSmrg 		  || (ACCUMULATE_OUTGOING_ARGS
387510d565efSmrg 		      && stack_arg_under_construction
387610d565efSmrg 		      && structure_value_addr == virtual_outgoing_args_rtx)
387710d565efSmrg 		  ? copy_addr_to_reg (convert_memory_address
387810d565efSmrg 				      (Pmode, structure_value_addr))
387910d565efSmrg 		  : structure_value_addr);
388010d565efSmrg 
388110d565efSmrg       structure_value_addr_value =
388210d565efSmrg 	make_tree (build_pointer_type (TREE_TYPE (funtype)), temp);
38830fc04c29Smrg       structure_value_addr_parm = 1;
388410d565efSmrg     }
388510d565efSmrg 
388610d565efSmrg   /* Count the arguments and set NUM_ACTUALS.  */
388710d565efSmrg   num_actuals =
388810d565efSmrg     call_expr_nargs (exp) + num_complex_actuals + structure_value_addr_parm;
388910d565efSmrg 
389010d565efSmrg   /* Compute number of named args.
389110d565efSmrg      First, do a raw count of the args for INIT_CUMULATIVE_ARGS.  */
389210d565efSmrg 
389310d565efSmrg   if (type_arg_types != 0)
389410d565efSmrg     n_named_args
389510d565efSmrg       = (list_length (type_arg_types)
389610d565efSmrg 	 /* Count the struct value address, if it is passed as a parm.  */
389710d565efSmrg 	 + structure_value_addr_parm);
389810d565efSmrg   else
389910d565efSmrg     /* If we know nothing, treat all args as named.  */
390010d565efSmrg     n_named_args = num_actuals;
390110d565efSmrg 
390210d565efSmrg   /* Start updating where the next arg would go.
390310d565efSmrg 
390410d565efSmrg      On some machines (such as the PA) indirect calls have a different
390510d565efSmrg      calling convention than normal calls.  The fourth argument in
390610d565efSmrg      INIT_CUMULATIVE_ARGS tells the backend if this is an indirect call
390710d565efSmrg      or not.  */
390810d565efSmrg   INIT_CUMULATIVE_ARGS (args_so_far_v, funtype, NULL_RTX, fndecl, n_named_args);
390910d565efSmrg   args_so_far = pack_cumulative_args (&args_so_far_v);
391010d565efSmrg 
391110d565efSmrg   /* Now possibly adjust the number of named args.
391210d565efSmrg      Normally, don't include the last named arg if anonymous args follow.
391310d565efSmrg      We do include the last named arg if
391410d565efSmrg      targetm.calls.strict_argument_naming() returns nonzero.
391510d565efSmrg      (If no anonymous args follow, the result of list_length is actually
391610d565efSmrg      one too large.  This is harmless.)
391710d565efSmrg 
391810d565efSmrg      If targetm.calls.pretend_outgoing_varargs_named() returns
391910d565efSmrg      nonzero, and targetm.calls.strict_argument_naming() returns zero,
392010d565efSmrg      this machine will be able to place unnamed args that were passed
392110d565efSmrg      in registers into the stack.  So treat all args as named.  This
392210d565efSmrg      allows the insns emitting for a specific argument list to be
392310d565efSmrg      independent of the function declaration.
392410d565efSmrg 
392510d565efSmrg      If targetm.calls.pretend_outgoing_varargs_named() returns zero,
392610d565efSmrg      we do not have any reliable way to pass unnamed args in
392710d565efSmrg      registers, so we must force them into memory.  */
392810d565efSmrg 
392910d565efSmrg   if (type_arg_types != 0
393010d565efSmrg       && targetm.calls.strict_argument_naming (args_so_far))
393110d565efSmrg     ;
393210d565efSmrg   else if (type_arg_types != 0
393310d565efSmrg 	   && ! targetm.calls.pretend_outgoing_varargs_named (args_so_far))
393410d565efSmrg     /* Don't include the last named arg.  */
393510d565efSmrg     --n_named_args;
393610d565efSmrg   else
393710d565efSmrg     /* Treat all args as named.  */
393810d565efSmrg     n_named_args = num_actuals;
393910d565efSmrg 
394010d565efSmrg   /* Make a vector to hold all the information about each arg.  */
394110d565efSmrg   args = XCNEWVEC (struct arg_data, num_actuals);
394210d565efSmrg 
394310d565efSmrg   /* Build up entries in the ARGS array, compute the size of the
394410d565efSmrg      arguments into ARGS_SIZE, etc.  */
394510d565efSmrg   initialize_argument_information (num_actuals, args, &args_size,
394610d565efSmrg 				   n_named_args, exp,
394710d565efSmrg 				   structure_value_addr_value, fndecl, fntype,
394810d565efSmrg 				   args_so_far, reg_parm_stack_space,
394910d565efSmrg 				   &old_stack_level, &old_pending_adj,
395010d565efSmrg 				   &must_preallocate, &flags,
395110d565efSmrg 				   &try_tail_call, CALL_FROM_THUNK_P (exp));
395210d565efSmrg 
395310d565efSmrg   if (args_size.var)
395410d565efSmrg     must_preallocate = 1;
395510d565efSmrg 
395610d565efSmrg   /* Now make final decision about preallocating stack space.  */
395710d565efSmrg   must_preallocate = finalize_must_preallocate (must_preallocate,
395810d565efSmrg 						num_actuals, args,
395910d565efSmrg 						&args_size);
396010d565efSmrg 
396110d565efSmrg   /* If the structure value address will reference the stack pointer, we
396210d565efSmrg      must stabilize it.  We don't need to do this if we know that we are
396310d565efSmrg      not going to adjust the stack pointer in processing this call.  */
396410d565efSmrg 
396510d565efSmrg   if (structure_value_addr
396610d565efSmrg       && (reg_mentioned_p (virtual_stack_dynamic_rtx, structure_value_addr)
396710d565efSmrg 	  || reg_mentioned_p (virtual_outgoing_args_rtx,
396810d565efSmrg 			      structure_value_addr))
396910d565efSmrg       && (args_size.var
3970c7a68eb7Smrg 	  || (!ACCUMULATE_OUTGOING_ARGS
3971c7a68eb7Smrg 	      && maybe_ne (args_size.constant, 0))))
397210d565efSmrg     structure_value_addr = copy_to_reg (structure_value_addr);
397310d565efSmrg 
397410d565efSmrg   /* Tail calls can make things harder to debug, and we've traditionally
397510d565efSmrg      pushed these optimizations into -O2.  Don't try if we're already
397610d565efSmrg      expanding a call, as that means we're an argument.  Don't try if
397710d565efSmrg      there's cleanups, as we know there's code to follow the call.  */
397810d565efSmrg   if (currently_expanding_call++ != 0
39790fc04c29Smrg       || (!flag_optimize_sibling_calls && !CALL_FROM_THUNK_P (exp))
398010d565efSmrg       || args_size.var
398110d565efSmrg       || dbg_cnt (tail_call) == false)
398210d565efSmrg     try_tail_call = 0;
398310d565efSmrg 
398421ff1670Smrg   /* Workaround buggy C/C++ wrappers around Fortran routines with
398521ff1670Smrg      character(len=constant) arguments if the hidden string length arguments
398621ff1670Smrg      are passed on the stack; if the callers forget to pass those arguments,
398721ff1670Smrg      attempting to tail call in such routines leads to stack corruption.
398821ff1670Smrg      Avoid tail calls in functions where at least one such hidden string
398921ff1670Smrg      length argument is passed (partially or fully) on the stack in the
399021ff1670Smrg      caller and the callee needs to pass any arguments on the stack.
399121ff1670Smrg      See PR90329.  */
3992c7a68eb7Smrg   if (try_tail_call && maybe_ne (args_size.constant, 0))
399321ff1670Smrg     for (tree arg = DECL_ARGUMENTS (current_function_decl);
399421ff1670Smrg 	 arg; arg = DECL_CHAIN (arg))
399521ff1670Smrg       if (DECL_HIDDEN_STRING_LENGTH (arg) && DECL_INCOMING_RTL (arg))
399621ff1670Smrg 	{
399721ff1670Smrg 	  subrtx_iterator::array_type array;
399821ff1670Smrg 	  FOR_EACH_SUBRTX (iter, array, DECL_INCOMING_RTL (arg), NONCONST)
399921ff1670Smrg 	    if (MEM_P (*iter))
400021ff1670Smrg 	      {
400121ff1670Smrg 		try_tail_call = 0;
400221ff1670Smrg 		break;
400321ff1670Smrg 	      }
400421ff1670Smrg 	}
400521ff1670Smrg 
400610d565efSmrg   /* If the user has marked the function as requiring tail-call
400710d565efSmrg      optimization, attempt it.  */
400810d565efSmrg   if (must_tail_call)
400910d565efSmrg     try_tail_call = 1;
401010d565efSmrg 
401110d565efSmrg   /*  Rest of purposes for tail call optimizations to fail.  */
401210d565efSmrg   if (try_tail_call)
401310d565efSmrg     try_tail_call = can_implement_as_sibling_call_p (exp,
401410d565efSmrg 						     structure_value_addr,
401510d565efSmrg 						     funtype,
401610d565efSmrg 						     reg_parm_stack_space,
401710d565efSmrg 						     fndecl,
401810d565efSmrg 						     flags, addr, args_size);
401910d565efSmrg 
402010d565efSmrg   /* Check if caller and callee disagree in promotion of function
402110d565efSmrg      return value.  */
402210d565efSmrg   if (try_tail_call)
402310d565efSmrg     {
402410d565efSmrg       machine_mode caller_mode, caller_promoted_mode;
402510d565efSmrg       machine_mode callee_mode, callee_promoted_mode;
402610d565efSmrg       int caller_unsignedp, callee_unsignedp;
402710d565efSmrg       tree caller_res = DECL_RESULT (current_function_decl);
402810d565efSmrg 
402910d565efSmrg       caller_unsignedp = TYPE_UNSIGNED (TREE_TYPE (caller_res));
403010d565efSmrg       caller_mode = DECL_MODE (caller_res);
403110d565efSmrg       callee_unsignedp = TYPE_UNSIGNED (TREE_TYPE (funtype));
403210d565efSmrg       callee_mode = TYPE_MODE (TREE_TYPE (funtype));
403310d565efSmrg       caller_promoted_mode
403410d565efSmrg 	= promote_function_mode (TREE_TYPE (caller_res), caller_mode,
403510d565efSmrg 				 &caller_unsignedp,
403610d565efSmrg 				 TREE_TYPE (current_function_decl), 1);
403710d565efSmrg       callee_promoted_mode
403810d565efSmrg 	= promote_function_mode (TREE_TYPE (funtype), callee_mode,
403910d565efSmrg 				 &callee_unsignedp,
404010d565efSmrg 				 funtype, 1);
404110d565efSmrg       if (caller_mode != VOIDmode
404210d565efSmrg 	  && (caller_promoted_mode != callee_promoted_mode
404310d565efSmrg 	      || ((caller_mode != caller_promoted_mode
404410d565efSmrg 		   || callee_mode != callee_promoted_mode)
404510d565efSmrg 		  && (caller_unsignedp != callee_unsignedp
4046c7a68eb7Smrg 		      || partial_subreg_p (caller_mode, callee_mode)))))
404710d565efSmrg 	{
404810d565efSmrg 	  try_tail_call = 0;
404910d565efSmrg 	  maybe_complain_about_tail_call (exp,
405010d565efSmrg 					  "caller and callee disagree in"
405110d565efSmrg 					  " promotion of function"
405210d565efSmrg 					  " return value");
405310d565efSmrg 	}
405410d565efSmrg     }
405510d565efSmrg 
405610d565efSmrg   /* Ensure current function's preferred stack boundary is at least
405710d565efSmrg      what we need.  Stack alignment may also increase preferred stack
405810d565efSmrg      boundary.  */
4059*ec02198aSmrg   for (i = 0; i < num_actuals; i++)
4060*ec02198aSmrg     if (reg_parm_stack_space > 0
4061*ec02198aSmrg 	|| args[i].reg == 0
4062*ec02198aSmrg 	|| args[i].partial != 0
4063*ec02198aSmrg 	|| args[i].pass_on_stack)
4064*ec02198aSmrg       update_stack_alignment_for_call (&args[i].locate);
406510d565efSmrg   if (crtl->preferred_stack_boundary < preferred_stack_boundary)
406610d565efSmrg     crtl->preferred_stack_boundary = preferred_stack_boundary;
406710d565efSmrg   else
406810d565efSmrg     preferred_stack_boundary = crtl->preferred_stack_boundary;
406910d565efSmrg 
407010d565efSmrg   preferred_unit_stack_boundary = preferred_stack_boundary / BITS_PER_UNIT;
407110d565efSmrg 
4072*ec02198aSmrg   if (flag_callgraph_info)
4073*ec02198aSmrg     record_final_call (fndecl, EXPR_LOCATION (exp));
4074*ec02198aSmrg 
407510d565efSmrg   /* We want to make two insn chains; one for a sibling call, the other
407610d565efSmrg      for a normal call.  We will select one of the two chains after
407710d565efSmrg      initial RTL generation is complete.  */
407810d565efSmrg   for (pass = try_tail_call ? 0 : 1; pass < 2; pass++)
407910d565efSmrg     {
408010d565efSmrg       int sibcall_failure = 0;
408110d565efSmrg       /* We want to emit any pending stack adjustments before the tail
408210d565efSmrg 	 recursion "call".  That way we know any adjustment after the tail
408310d565efSmrg 	 recursion call can be ignored if we indeed use the tail
408410d565efSmrg 	 call expansion.  */
408510d565efSmrg       saved_pending_stack_adjust save;
408610d565efSmrg       rtx_insn *insns, *before_call, *after_args;
408710d565efSmrg       rtx next_arg_reg;
408810d565efSmrg 
408910d565efSmrg       if (pass == 0)
409010d565efSmrg 	{
409110d565efSmrg 	  /* State variables we need to save and restore between
409210d565efSmrg 	     iterations.  */
409310d565efSmrg 	  save_pending_stack_adjust (&save);
409410d565efSmrg 	}
409510d565efSmrg       if (pass)
409610d565efSmrg 	flags &= ~ECF_SIBCALL;
409710d565efSmrg       else
409810d565efSmrg 	flags |= ECF_SIBCALL;
409910d565efSmrg 
410010d565efSmrg       /* Other state variables that we must reinitialize each time
410110d565efSmrg 	 through the loop (that are not initialized by the loop itself).  */
410210d565efSmrg       argblock = 0;
410310d565efSmrg       call_fusage = 0;
410410d565efSmrg 
410510d565efSmrg       /* Start a new sequence for the normal call case.
410610d565efSmrg 
410710d565efSmrg 	 From this point on, if the sibling call fails, we want to set
410810d565efSmrg 	 sibcall_failure instead of continuing the loop.  */
410910d565efSmrg       start_sequence ();
411010d565efSmrg 
411110d565efSmrg       /* Don't let pending stack adjusts add up to too much.
411210d565efSmrg 	 Also, do all pending adjustments now if there is any chance
411310d565efSmrg 	 this might be a call to alloca or if we are expanding a sibling
411410d565efSmrg 	 call sequence.
411510d565efSmrg 	 Also do the adjustments before a throwing call, otherwise
411610d565efSmrg 	 exception handling can fail; PR 19225. */
4117c7a68eb7Smrg       if (maybe_ge (pending_stack_adjust, 32)
4118c7a68eb7Smrg 	  || (maybe_ne (pending_stack_adjust, 0)
411910d565efSmrg 	      && (flags & ECF_MAY_BE_ALLOCA))
4120c7a68eb7Smrg 	  || (maybe_ne (pending_stack_adjust, 0)
412110d565efSmrg 	      && flag_exceptions && !(flags & ECF_NOTHROW))
412210d565efSmrg 	  || pass == 0)
412310d565efSmrg 	do_pending_stack_adjust ();
412410d565efSmrg 
412510d565efSmrg       /* Precompute any arguments as needed.  */
412610d565efSmrg       if (pass)
412710d565efSmrg 	precompute_arguments (num_actuals, args);
412810d565efSmrg 
412910d565efSmrg       /* Now we are about to start emitting insns that can be deleted
413010d565efSmrg 	 if a libcall is deleted.  */
413110d565efSmrg       if (pass && (flags & ECF_MALLOC))
413210d565efSmrg 	start_sequence ();
413310d565efSmrg 
413410d565efSmrg       if (pass == 0
413510d565efSmrg 	  && crtl->stack_protect_guard
413610d565efSmrg 	  && targetm.stack_protect_runtime_enabled_p ())
413710d565efSmrg 	stack_protect_epilogue ();
413810d565efSmrg 
413910d565efSmrg       adjusted_args_size = args_size;
414010d565efSmrg       /* Compute the actual size of the argument block required.  The variable
414110d565efSmrg 	 and constant sizes must be combined, the size may have to be rounded,
414210d565efSmrg 	 and there may be a minimum required size.  When generating a sibcall
414310d565efSmrg 	 pattern, do not round up, since we'll be re-using whatever space our
414410d565efSmrg 	 caller provided.  */
414510d565efSmrg       unadjusted_args_size
414610d565efSmrg 	= compute_argument_block_size (reg_parm_stack_space,
414710d565efSmrg 				       &adjusted_args_size,
414810d565efSmrg 				       fndecl, fntype,
414910d565efSmrg 				       (pass == 0 ? 0
415010d565efSmrg 					: preferred_stack_boundary));
415110d565efSmrg 
415210d565efSmrg       old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
415310d565efSmrg 
415410d565efSmrg       /* The argument block when performing a sibling call is the
415510d565efSmrg 	 incoming argument block.  */
415610d565efSmrg       if (pass == 0)
415710d565efSmrg 	{
415810d565efSmrg 	  argblock = crtl->args.internal_arg_pointer;
415910d565efSmrg 	  if (STACK_GROWS_DOWNWARD)
416010d565efSmrg 	    argblock
416110d565efSmrg 	      = plus_constant (Pmode, argblock, crtl->args.pretend_args_size);
416210d565efSmrg 	  else
416310d565efSmrg 	    argblock
416410d565efSmrg 	      = plus_constant (Pmode, argblock, -crtl->args.pretend_args_size);
416510d565efSmrg 
4166c7a68eb7Smrg 	  HOST_WIDE_INT map_size = constant_lower_bound (args_size.constant);
4167c7a68eb7Smrg 	  stored_args_map = sbitmap_alloc (map_size);
416810d565efSmrg 	  bitmap_clear (stored_args_map);
4169c7a68eb7Smrg 	  stored_args_watermark = HOST_WIDE_INT_M1U;
417010d565efSmrg 	}
417110d565efSmrg 
417210d565efSmrg       /* If we have no actual push instructions, or shouldn't use them,
417310d565efSmrg 	 make space for all args right now.  */
417410d565efSmrg       else if (adjusted_args_size.var != 0)
417510d565efSmrg 	{
417610d565efSmrg 	  if (old_stack_level == 0)
417710d565efSmrg 	    {
417810d565efSmrg 	      emit_stack_save (SAVE_BLOCK, &old_stack_level);
417910d565efSmrg 	      old_stack_pointer_delta = stack_pointer_delta;
418010d565efSmrg 	      old_pending_adj = pending_stack_adjust;
418110d565efSmrg 	      pending_stack_adjust = 0;
418210d565efSmrg 	      /* stack_arg_under_construction says whether a stack arg is
418310d565efSmrg 		 being constructed at the old stack level.  Pushing the stack
418410d565efSmrg 		 gets a clean outgoing argument block.  */
418510d565efSmrg 	      old_stack_arg_under_construction = stack_arg_under_construction;
418610d565efSmrg 	      stack_arg_under_construction = 0;
418710d565efSmrg 	    }
418810d565efSmrg 	  argblock = push_block (ARGS_SIZE_RTX (adjusted_args_size), 0, 0);
418910d565efSmrg 	  if (flag_stack_usage_info)
419010d565efSmrg 	    current_function_has_unbounded_dynamic_stack_size = 1;
419110d565efSmrg 	}
419210d565efSmrg       else
419310d565efSmrg 	{
419410d565efSmrg 	  /* Note that we must go through the motions of allocating an argument
419510d565efSmrg 	     block even if the size is zero because we may be storing args
419610d565efSmrg 	     in the area reserved for register arguments, which may be part of
419710d565efSmrg 	     the stack frame.  */
419810d565efSmrg 
4199c7a68eb7Smrg 	  poly_int64 needed = adjusted_args_size.constant;
420010d565efSmrg 
420110d565efSmrg 	  /* Store the maximum argument space used.  It will be pushed by
420210d565efSmrg 	     the prologue (if ACCUMULATE_OUTGOING_ARGS, or stack overflow
420310d565efSmrg 	     checking).  */
420410d565efSmrg 
4205c7a68eb7Smrg 	  crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
4206c7a68eb7Smrg 						  needed);
420710d565efSmrg 
420810d565efSmrg 	  if (must_preallocate)
420910d565efSmrg 	    {
421010d565efSmrg 	      if (ACCUMULATE_OUTGOING_ARGS)
421110d565efSmrg 		{
421210d565efSmrg 		  /* Since the stack pointer will never be pushed, it is
421310d565efSmrg 		     possible for the evaluation of a parm to clobber
421410d565efSmrg 		     something we have already written to the stack.
421510d565efSmrg 		     Since most function calls on RISC machines do not use
421610d565efSmrg 		     the stack, this is uncommon, but must work correctly.
421710d565efSmrg 
421810d565efSmrg 		     Therefore, we save any area of the stack that was already
421910d565efSmrg 		     written and that we are using.  Here we set up to do this
422010d565efSmrg 		     by making a new stack usage map from the old one.  The
422110d565efSmrg 		     actual save will be done by store_one_arg.
422210d565efSmrg 
422310d565efSmrg 		     Another approach might be to try to reorder the argument
422410d565efSmrg 		     evaluations to avoid this conflicting stack usage.  */
422510d565efSmrg 
422610d565efSmrg 		  /* Since we will be writing into the entire argument area,
422710d565efSmrg 		     the map must be allocated for its entire size, not just
422810d565efSmrg 		     the part that is the responsibility of the caller.  */
422910d565efSmrg 		  if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
423010d565efSmrg 		    needed += reg_parm_stack_space;
423110d565efSmrg 
4232c7a68eb7Smrg 		  poly_int64 limit = needed;
423310d565efSmrg 		  if (ARGS_GROW_DOWNWARD)
4234c7a68eb7Smrg 		    limit += 1;
4235c7a68eb7Smrg 
4236c7a68eb7Smrg 		  /* For polynomial sizes, this is the maximum possible
4237c7a68eb7Smrg 		     size needed for arguments with a constant size
4238c7a68eb7Smrg 		     and offset.  */
4239c7a68eb7Smrg 		  HOST_WIDE_INT const_limit = constant_lower_bound (limit);
424010d565efSmrg 		  highest_outgoing_arg_in_use
4241c7a68eb7Smrg 		    = MAX (initial_highest_arg_in_use, const_limit);
424210d565efSmrg 
424310d565efSmrg 		  free (stack_usage_map_buf);
424410d565efSmrg 		  stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
424510d565efSmrg 		  stack_usage_map = stack_usage_map_buf;
424610d565efSmrg 
424710d565efSmrg 		  if (initial_highest_arg_in_use)
424810d565efSmrg 		    memcpy (stack_usage_map, initial_stack_usage_map,
424910d565efSmrg 			    initial_highest_arg_in_use);
425010d565efSmrg 
425110d565efSmrg 		  if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
425210d565efSmrg 		    memset (&stack_usage_map[initial_highest_arg_in_use], 0,
425310d565efSmrg 			   (highest_outgoing_arg_in_use
425410d565efSmrg 			    - initial_highest_arg_in_use));
425510d565efSmrg 		  needed = 0;
425610d565efSmrg 
425710d565efSmrg 		  /* The address of the outgoing argument list must not be
425810d565efSmrg 		     copied to a register here, because argblock would be left
425910d565efSmrg 		     pointing to the wrong place after the call to
426010d565efSmrg 		     allocate_dynamic_stack_space below.  */
426110d565efSmrg 
426210d565efSmrg 		  argblock = virtual_outgoing_args_rtx;
426310d565efSmrg 		}
426410d565efSmrg 	      else
426510d565efSmrg 		{
426610d565efSmrg 		  /* Try to reuse some or all of the pending_stack_adjust
426710d565efSmrg 		     to get this space.  */
4268c7a68eb7Smrg 		  if (inhibit_defer_pop == 0
4269c7a68eb7Smrg 		      && (combine_pending_stack_adjustment_and_call
4270c7a68eb7Smrg 			  (&needed,
4271c7a68eb7Smrg 			   unadjusted_args_size,
427210d565efSmrg 			   &adjusted_args_size,
4273c7a68eb7Smrg 			   preferred_unit_stack_boundary)))
4274c7a68eb7Smrg 		    {
427510d565efSmrg 		      /* combine_pending_stack_adjustment_and_call computes
427610d565efSmrg 			 an adjustment before the arguments are allocated.
427710d565efSmrg 			 Account for them and see whether or not the stack
427810d565efSmrg 			 needs to go up or down.  */
427910d565efSmrg 		      needed = unadjusted_args_size - needed;
428010d565efSmrg 
4281c7a68eb7Smrg 		      /* Checked by
4282c7a68eb7Smrg 			 combine_pending_stack_adjustment_and_call.  */
4283c7a68eb7Smrg 		      gcc_checking_assert (ordered_p (needed, 0));
4284c7a68eb7Smrg 		      if (maybe_lt (needed, 0))
428510d565efSmrg 			{
428610d565efSmrg 			  /* We're releasing stack space.  */
428710d565efSmrg 			  /* ??? We can avoid any adjustment at all if we're
428810d565efSmrg 			     already aligned.  FIXME.  */
428910d565efSmrg 			  pending_stack_adjust = -needed;
429010d565efSmrg 			  do_pending_stack_adjust ();
429110d565efSmrg 			  needed = 0;
429210d565efSmrg 			}
429310d565efSmrg 		      else
429410d565efSmrg 			/* We need to allocate space.  We'll do that in
429510d565efSmrg 			   push_block below.  */
429610d565efSmrg 			pending_stack_adjust = 0;
429710d565efSmrg 		    }
429810d565efSmrg 
429910d565efSmrg 		  /* Special case this because overhead of `push_block' in
430010d565efSmrg 		     this case is non-trivial.  */
4301c7a68eb7Smrg 		  if (known_eq (needed, 0))
430210d565efSmrg 		    argblock = virtual_outgoing_args_rtx;
430310d565efSmrg 		  else
430410d565efSmrg 		    {
4305c7a68eb7Smrg 		      rtx needed_rtx = gen_int_mode (needed, Pmode);
4306c7a68eb7Smrg 		      argblock = push_block (needed_rtx, 0, 0);
430710d565efSmrg 		      if (ARGS_GROW_DOWNWARD)
430810d565efSmrg 			argblock = plus_constant (Pmode, argblock, needed);
430910d565efSmrg 		    }
431010d565efSmrg 
431110d565efSmrg 		  /* We only really need to call `copy_to_reg' in the case
431210d565efSmrg 		     where push insns are going to be used to pass ARGBLOCK
431310d565efSmrg 		     to a function call in ARGS.  In that case, the stack
431410d565efSmrg 		     pointer changes value from the allocation point to the
431510d565efSmrg 		     call point, and hence the value of
431610d565efSmrg 		     VIRTUAL_OUTGOING_ARGS_RTX changes as well.  But might
431710d565efSmrg 		     as well always do it.  */
431810d565efSmrg 		  argblock = copy_to_reg (argblock);
431910d565efSmrg 		}
432010d565efSmrg 	    }
432110d565efSmrg 	}
432210d565efSmrg 
432310d565efSmrg       if (ACCUMULATE_OUTGOING_ARGS)
432410d565efSmrg 	{
432510d565efSmrg 	  /* The save/restore code in store_one_arg handles all
432610d565efSmrg 	     cases except one: a constructor call (including a C
432710d565efSmrg 	     function returning a BLKmode struct) to initialize
432810d565efSmrg 	     an argument.  */
432910d565efSmrg 	  if (stack_arg_under_construction)
433010d565efSmrg 	    {
433110d565efSmrg 	      rtx push_size
4332c7a68eb7Smrg 		= (gen_int_mode
4333c7a68eb7Smrg 		   (adjusted_args_size.constant
4334c7a68eb7Smrg 		    + (OUTGOING_REG_PARM_STACK_SPACE (!fndecl ? fntype
4335c7a68eb7Smrg 						      : TREE_TYPE (fndecl))
4336c7a68eb7Smrg 		       ? 0 : reg_parm_stack_space), Pmode));
433710d565efSmrg 	      if (old_stack_level == 0)
433810d565efSmrg 		{
433910d565efSmrg 		  emit_stack_save (SAVE_BLOCK, &old_stack_level);
434010d565efSmrg 		  old_stack_pointer_delta = stack_pointer_delta;
434110d565efSmrg 		  old_pending_adj = pending_stack_adjust;
434210d565efSmrg 		  pending_stack_adjust = 0;
434310d565efSmrg 		  /* stack_arg_under_construction says whether a stack
434410d565efSmrg 		     arg is being constructed at the old stack level.
434510d565efSmrg 		     Pushing the stack gets a clean outgoing argument
434610d565efSmrg 		     block.  */
434710d565efSmrg 		  old_stack_arg_under_construction
434810d565efSmrg 		    = stack_arg_under_construction;
434910d565efSmrg 		  stack_arg_under_construction = 0;
435010d565efSmrg 		  /* Make a new map for the new argument list.  */
435110d565efSmrg 		  free (stack_usage_map_buf);
435210d565efSmrg 		  stack_usage_map_buf = XCNEWVEC (char, highest_outgoing_arg_in_use);
435310d565efSmrg 		  stack_usage_map = stack_usage_map_buf;
435410d565efSmrg 		  highest_outgoing_arg_in_use = 0;
4355c7a68eb7Smrg 		  stack_usage_watermark = HOST_WIDE_INT_M1U;
435610d565efSmrg 		}
435710d565efSmrg 	      /* We can pass TRUE as the 4th argument because we just
435810d565efSmrg 		 saved the stack pointer and will restore it right after
435910d565efSmrg 		 the call.  */
4360c7a68eb7Smrg 	      allocate_dynamic_stack_space (push_size, 0, BIGGEST_ALIGNMENT,
4361c7a68eb7Smrg 					    -1, true);
436210d565efSmrg 	    }
436310d565efSmrg 
436410d565efSmrg 	  /* If argument evaluation might modify the stack pointer,
436510d565efSmrg 	     copy the address of the argument list to a register.  */
436610d565efSmrg 	  for (i = 0; i < num_actuals; i++)
436710d565efSmrg 	    if (args[i].pass_on_stack)
436810d565efSmrg 	      {
436910d565efSmrg 		argblock = copy_addr_to_reg (argblock);
437010d565efSmrg 		break;
437110d565efSmrg 	      }
437210d565efSmrg 	}
437310d565efSmrg 
437410d565efSmrg       compute_argument_addresses (args, argblock, num_actuals);
437510d565efSmrg 
437610d565efSmrg       /* Stack is properly aligned, pops can't safely be deferred during
437710d565efSmrg 	 the evaluation of the arguments.  */
437810d565efSmrg       NO_DEFER_POP;
437910d565efSmrg 
438010d565efSmrg       /* Precompute all register parameters.  It isn't safe to compute
438110d565efSmrg 	 anything once we have started filling any specific hard regs.
438210d565efSmrg 	 TLS symbols sometimes need a call to resolve.  Precompute
438310d565efSmrg 	 register parameters before any stack pointer manipulation
438410d565efSmrg 	 to avoid unaligned stack in the called function.  */
438510d565efSmrg       precompute_register_parameters (num_actuals, args, &reg_parm_seen);
438610d565efSmrg 
438710d565efSmrg       OK_DEFER_POP;
438810d565efSmrg 
438910d565efSmrg       /* Perform stack alignment before the first push (the last arg).  */
439010d565efSmrg       if (argblock == 0
4391c7a68eb7Smrg 	  && maybe_gt (adjusted_args_size.constant, reg_parm_stack_space)
4392c7a68eb7Smrg 	  && maybe_ne (adjusted_args_size.constant, unadjusted_args_size))
439310d565efSmrg 	{
439410d565efSmrg 	  /* When the stack adjustment is pending, we get better code
439510d565efSmrg 	     by combining the adjustments.  */
4396c7a68eb7Smrg 	  if (maybe_ne (pending_stack_adjust, 0)
4397c7a68eb7Smrg 	      && ! inhibit_defer_pop
4398c7a68eb7Smrg 	      && (combine_pending_stack_adjustment_and_call
4399c7a68eb7Smrg 		  (&pending_stack_adjust,
4400c7a68eb7Smrg 		   unadjusted_args_size,
440110d565efSmrg 		   &adjusted_args_size,
4402c7a68eb7Smrg 		   preferred_unit_stack_boundary)))
440310d565efSmrg 	    do_pending_stack_adjust ();
440410d565efSmrg 	  else if (argblock == 0)
4405c7a68eb7Smrg 	    anti_adjust_stack (gen_int_mode (adjusted_args_size.constant
4406c7a68eb7Smrg 					     - unadjusted_args_size,
4407c7a68eb7Smrg 					     Pmode));
440810d565efSmrg 	}
440910d565efSmrg       /* Now that the stack is properly aligned, pops can't safely
441010d565efSmrg 	 be deferred during the evaluation of the arguments.  */
441110d565efSmrg       NO_DEFER_POP;
441210d565efSmrg 
441310d565efSmrg       /* Record the maximum pushed stack space size.  We need to delay
441410d565efSmrg 	 doing it this far to take into account the optimization done
441510d565efSmrg 	 by combine_pending_stack_adjustment_and_call.  */
441610d565efSmrg       if (flag_stack_usage_info
441710d565efSmrg 	  && !ACCUMULATE_OUTGOING_ARGS
441810d565efSmrg 	  && pass
441910d565efSmrg 	  && adjusted_args_size.var == 0)
442010d565efSmrg 	{
4421c7a68eb7Smrg 	  poly_int64 pushed = (adjusted_args_size.constant
4422c7a68eb7Smrg 			       + pending_stack_adjust);
4423c7a68eb7Smrg 	  current_function_pushed_stack_size
4424c7a68eb7Smrg 	    = upper_bound (current_function_pushed_stack_size, pushed);
442510d565efSmrg 	}
442610d565efSmrg 
442710d565efSmrg       funexp = rtx_for_function_call (fndecl, addr);
442810d565efSmrg 
442910d565efSmrg       if (CALL_EXPR_STATIC_CHAIN (exp))
443010d565efSmrg 	static_chain_value = expand_normal (CALL_EXPR_STATIC_CHAIN (exp));
443110d565efSmrg       else
443210d565efSmrg 	static_chain_value = 0;
443310d565efSmrg 
443410d565efSmrg #ifdef REG_PARM_STACK_SPACE
443510d565efSmrg       /* Save the fixed argument area if it's part of the caller's frame and
443610d565efSmrg 	 is clobbered by argument setup for this call.  */
443710d565efSmrg       if (ACCUMULATE_OUTGOING_ARGS && pass)
443810d565efSmrg 	save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
443910d565efSmrg 					      &low_to_save, &high_to_save);
444010d565efSmrg #endif
444110d565efSmrg 
444210d565efSmrg       /* Now store (and compute if necessary) all non-register parms.
444310d565efSmrg 	 These come before register parms, since they can require block-moves,
444410d565efSmrg 	 which could clobber the registers used for register parms.
444510d565efSmrg 	 Parms which have partial registers are not stored here,
444610d565efSmrg 	 but we do preallocate space here if they want that.  */
444710d565efSmrg 
444810d565efSmrg       for (i = 0; i < num_actuals; i++)
444910d565efSmrg 	{
44500fc04c29Smrg 	  if (args[i].reg == 0 || args[i].pass_on_stack)
445110d565efSmrg 	    {
445210d565efSmrg 	      rtx_insn *before_arg = get_last_insn ();
445310d565efSmrg 
445410d565efSmrg 	      /* We don't allow passing huge (> 2^30 B) arguments
445510d565efSmrg 	         by value.  It would cause an overflow later on.  */
4456c7a68eb7Smrg 	      if (constant_lower_bound (adjusted_args_size.constant)
445710d565efSmrg 		  >= (1 << (HOST_BITS_PER_INT - 2)))
445810d565efSmrg 	        {
445910d565efSmrg 	          sorry ("passing too large argument on stack");
446010d565efSmrg 		  continue;
446110d565efSmrg 		}
446210d565efSmrg 
446310d565efSmrg 	      if (store_one_arg (&args[i], argblock, flags,
446410d565efSmrg 				 adjusted_args_size.var != 0,
446510d565efSmrg 				 reg_parm_stack_space)
446610d565efSmrg 		  || (pass == 0
446710d565efSmrg 		      && check_sibcall_argument_overlap (before_arg,
446810d565efSmrg 							 &args[i], 1)))
446910d565efSmrg 		sibcall_failure = 1;
447010d565efSmrg 	      }
447110d565efSmrg 
447210d565efSmrg 	  if (args[i].stack)
447310d565efSmrg 	    call_fusage
447410d565efSmrg 	      = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[i].tree_value)),
447510d565efSmrg 				   gen_rtx_USE (VOIDmode, args[i].stack),
447610d565efSmrg 				   call_fusage);
447710d565efSmrg 	}
447810d565efSmrg 
447910d565efSmrg       /* If we have a parm that is passed in registers but not in memory
448010d565efSmrg 	 and whose alignment does not permit a direct copy into registers,
448110d565efSmrg 	 make a group of pseudos that correspond to each register that we
448210d565efSmrg 	 will later fill.  */
448310d565efSmrg       if (STRICT_ALIGNMENT)
448410d565efSmrg 	store_unaligned_arguments_into_pseudos (args, num_actuals);
448510d565efSmrg 
448610d565efSmrg       /* Now store any partially-in-registers parm.
448710d565efSmrg 	 This is the last place a block-move can happen.  */
448810d565efSmrg       if (reg_parm_seen)
448910d565efSmrg 	for (i = 0; i < num_actuals; i++)
449010d565efSmrg 	  if (args[i].partial != 0 && ! args[i].pass_on_stack)
449110d565efSmrg 	    {
449210d565efSmrg 	      rtx_insn *before_arg = get_last_insn ();
449310d565efSmrg 
449410d565efSmrg 	     /* On targets with weird calling conventions (e.g. PA) it's
449510d565efSmrg 		hard to ensure that all cases of argument overlap between
449610d565efSmrg 		stack and registers work.  Play it safe and bail out.  */
449710d565efSmrg 	      if (ARGS_GROW_DOWNWARD && !STACK_GROWS_DOWNWARD)
449810d565efSmrg 		{
449910d565efSmrg 		  sibcall_failure = 1;
450010d565efSmrg 		  break;
450110d565efSmrg 		}
450210d565efSmrg 
450310d565efSmrg 	      if (store_one_arg (&args[i], argblock, flags,
450410d565efSmrg 				 adjusted_args_size.var != 0,
450510d565efSmrg 				 reg_parm_stack_space)
450610d565efSmrg 		  || (pass == 0
450710d565efSmrg 		      && check_sibcall_argument_overlap (before_arg,
450810d565efSmrg 							 &args[i], 1)))
450910d565efSmrg 		sibcall_failure = 1;
451010d565efSmrg 	    }
451110d565efSmrg 
451210d565efSmrg       bool any_regs = false;
451310d565efSmrg       for (i = 0; i < num_actuals; i++)
451410d565efSmrg 	if (args[i].reg != NULL_RTX)
451510d565efSmrg 	  {
451610d565efSmrg 	    any_regs = true;
451710d565efSmrg 	    targetm.calls.call_args (args[i].reg, funtype);
451810d565efSmrg 	  }
451910d565efSmrg       if (!any_regs)
452010d565efSmrg 	targetm.calls.call_args (pc_rtx, funtype);
452110d565efSmrg 
452210d565efSmrg       /* Figure out the register where the value, if any, will come back.  */
452310d565efSmrg       valreg = 0;
452410d565efSmrg       if (TYPE_MODE (rettype) != VOIDmode
452510d565efSmrg 	  && ! structure_value_addr)
452610d565efSmrg 	{
452710d565efSmrg 	  if (pcc_struct_value)
452810d565efSmrg 	    valreg = hard_function_value (build_pointer_type (rettype),
452910d565efSmrg 					  fndecl, NULL, (pass == 0));
453010d565efSmrg 	  else
453110d565efSmrg 	    valreg = hard_function_value (rettype, fndecl, fntype,
453210d565efSmrg 					  (pass == 0));
453310d565efSmrg 
453410d565efSmrg 	  /* If VALREG is a PARALLEL whose first member has a zero
453510d565efSmrg 	     offset, use that.  This is for targets such as m68k that
453610d565efSmrg 	     return the same value in multiple places.  */
453710d565efSmrg 	  if (GET_CODE (valreg) == PARALLEL)
453810d565efSmrg 	    {
453910d565efSmrg 	      rtx elem = XVECEXP (valreg, 0, 0);
454010d565efSmrg 	      rtx where = XEXP (elem, 0);
454110d565efSmrg 	      rtx offset = XEXP (elem, 1);
454210d565efSmrg 	      if (offset == const0_rtx
454310d565efSmrg 		  && GET_MODE (where) == GET_MODE (valreg))
454410d565efSmrg 		valreg = where;
454510d565efSmrg 	    }
454610d565efSmrg 	}
454710d565efSmrg 
454810d565efSmrg       /* If register arguments require space on the stack and stack space
454910d565efSmrg 	 was not preallocated, allocate stack space here for arguments
455010d565efSmrg 	 passed in registers.  */
455110d565efSmrg       if (OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl)))
455210d565efSmrg           && !ACCUMULATE_OUTGOING_ARGS
455310d565efSmrg 	  && must_preallocate == 0 && reg_parm_stack_space > 0)
455410d565efSmrg 	anti_adjust_stack (GEN_INT (reg_parm_stack_space));
455510d565efSmrg 
455610d565efSmrg       /* Pass the function the address in which to return a
455710d565efSmrg 	 structure value.  */
455810d565efSmrg       if (pass != 0 && structure_value_addr && ! structure_value_addr_parm)
455910d565efSmrg 	{
456010d565efSmrg 	  structure_value_addr
456110d565efSmrg 	    = convert_memory_address (Pmode, structure_value_addr);
456210d565efSmrg 	  emit_move_insn (struct_value,
456310d565efSmrg 			  force_reg (Pmode,
456410d565efSmrg 				     force_operand (structure_value_addr,
456510d565efSmrg 						    NULL_RTX)));
456610d565efSmrg 
456710d565efSmrg 	  if (REG_P (struct_value))
456810d565efSmrg 	    use_reg (&call_fusage, struct_value);
456910d565efSmrg 	}
457010d565efSmrg 
457110d565efSmrg       after_args = get_last_insn ();
457210d565efSmrg       funexp = prepare_call_address (fndecl ? fndecl : fntype, funexp,
457310d565efSmrg 				     static_chain_value, &call_fusage,
457410d565efSmrg 				     reg_parm_seen, flags);
457510d565efSmrg 
457610d565efSmrg       load_register_parameters (args, num_actuals, &call_fusage, flags,
457710d565efSmrg 				pass == 0, &sibcall_failure);
457810d565efSmrg 
457910d565efSmrg       /* Save a pointer to the last insn before the call, so that we can
458010d565efSmrg 	 later safely search backwards to find the CALL_INSN.  */
458110d565efSmrg       before_call = get_last_insn ();
458210d565efSmrg 
458310d565efSmrg       /* Set up next argument register.  For sibling calls on machines
458410d565efSmrg 	 with register windows this should be the incoming register.  */
458510d565efSmrg       if (pass == 0)
4586*ec02198aSmrg 	next_arg_reg = targetm.calls.function_incoming_arg
4587*ec02198aSmrg 	  (args_so_far, function_arg_info::end_marker ());
458810d565efSmrg       else
4589*ec02198aSmrg 	next_arg_reg = targetm.calls.function_arg
4590*ec02198aSmrg 	  (args_so_far, function_arg_info::end_marker ());
459110d565efSmrg 
459210d565efSmrg       if (pass == 1 && (return_flags & ERF_RETURNS_ARG))
459310d565efSmrg 	{
459410d565efSmrg 	  int arg_nr = return_flags & ERF_RETURN_ARG_MASK;
459510d565efSmrg 	  arg_nr = num_actuals - arg_nr - 1;
459610d565efSmrg 	  if (arg_nr >= 0
459710d565efSmrg 	      && arg_nr < num_actuals
459810d565efSmrg 	      && args[arg_nr].reg
459910d565efSmrg 	      && valreg
460010d565efSmrg 	      && REG_P (valreg)
460110d565efSmrg 	      && GET_MODE (args[arg_nr].reg) == GET_MODE (valreg))
460210d565efSmrg 	  call_fusage
460310d565efSmrg 	    = gen_rtx_EXPR_LIST (TYPE_MODE (TREE_TYPE (args[arg_nr].tree_value)),
460410d565efSmrg 				 gen_rtx_SET (valreg, args[arg_nr].reg),
460510d565efSmrg 				 call_fusage);
460610d565efSmrg 	}
460710d565efSmrg       /* All arguments and registers used for the call must be set up by
460810d565efSmrg 	 now!  */
460910d565efSmrg 
461010d565efSmrg       /* Stack must be properly aligned now.  */
461110d565efSmrg       gcc_assert (!pass
4612c7a68eb7Smrg 		  || multiple_p (stack_pointer_delta,
4613c7a68eb7Smrg 				 preferred_unit_stack_boundary));
461410d565efSmrg 
461510d565efSmrg       /* Generate the actual call instruction.  */
461610d565efSmrg       emit_call_1 (funexp, exp, fndecl, funtype, unadjusted_args_size,
461710d565efSmrg 		   adjusted_args_size.constant, struct_value_size,
461810d565efSmrg 		   next_arg_reg, valreg, old_inhibit_defer_pop, call_fusage,
461910d565efSmrg 		   flags, args_so_far);
462010d565efSmrg 
462110d565efSmrg       if (flag_ipa_ra)
462210d565efSmrg 	{
462310d565efSmrg 	  rtx_call_insn *last;
462410d565efSmrg 	  rtx datum = NULL_RTX;
462510d565efSmrg 	  if (fndecl != NULL_TREE)
462610d565efSmrg 	    {
462710d565efSmrg 	      datum = XEXP (DECL_RTL (fndecl), 0);
462810d565efSmrg 	      gcc_assert (datum != NULL_RTX
462910d565efSmrg 			  && GET_CODE (datum) == SYMBOL_REF);
463010d565efSmrg 	    }
463110d565efSmrg 	  last = last_call_insn ();
463210d565efSmrg 	  add_reg_note (last, REG_CALL_DECL, datum);
463310d565efSmrg 	}
463410d565efSmrg 
463510d565efSmrg       /* If the call setup or the call itself overlaps with anything
463610d565efSmrg 	 of the argument setup we probably clobbered our call address.
463710d565efSmrg 	 In that case we can't do sibcalls.  */
463810d565efSmrg       if (pass == 0
463910d565efSmrg 	  && check_sibcall_argument_overlap (after_args, 0, 0))
464010d565efSmrg 	sibcall_failure = 1;
464110d565efSmrg 
464210d565efSmrg       /* If a non-BLKmode value is returned at the most significant end
464310d565efSmrg 	 of a register, shift the register right by the appropriate amount
464410d565efSmrg 	 and update VALREG accordingly.  BLKmode values are handled by the
464510d565efSmrg 	 group load/store machinery below.  */
464610d565efSmrg       if (!structure_value_addr
464710d565efSmrg 	  && !pcc_struct_value
464810d565efSmrg 	  && TYPE_MODE (rettype) != VOIDmode
464910d565efSmrg 	  && TYPE_MODE (rettype) != BLKmode
465010d565efSmrg 	  && REG_P (valreg)
465110d565efSmrg 	  && targetm.calls.return_in_msb (rettype))
465210d565efSmrg 	{
465310d565efSmrg 	  if (shift_return_value (TYPE_MODE (rettype), false, valreg))
465410d565efSmrg 	    sibcall_failure = 1;
465510d565efSmrg 	  valreg = gen_rtx_REG (TYPE_MODE (rettype), REGNO (valreg));
465610d565efSmrg 	}
465710d565efSmrg 
465810d565efSmrg       if (pass && (flags & ECF_MALLOC))
465910d565efSmrg 	{
466010d565efSmrg 	  rtx temp = gen_reg_rtx (GET_MODE (valreg));
466110d565efSmrg 	  rtx_insn *last, *insns;
466210d565efSmrg 
466310d565efSmrg 	  /* The return value from a malloc-like function is a pointer.  */
466410d565efSmrg 	  if (TREE_CODE (rettype) == POINTER_TYPE)
466510d565efSmrg 	    mark_reg_pointer (temp, MALLOC_ABI_ALIGNMENT);
466610d565efSmrg 
466710d565efSmrg 	  emit_move_insn (temp, valreg);
466810d565efSmrg 
466910d565efSmrg 	  /* The return value from a malloc-like function cannot alias
467010d565efSmrg 	     anything else.  */
467110d565efSmrg 	  last = get_last_insn ();
467210d565efSmrg 	  add_reg_note (last, REG_NOALIAS, temp);
467310d565efSmrg 
467410d565efSmrg 	  /* Write out the sequence.  */
467510d565efSmrg 	  insns = get_insns ();
467610d565efSmrg 	  end_sequence ();
467710d565efSmrg 	  emit_insn (insns);
467810d565efSmrg 	  valreg = temp;
467910d565efSmrg 	}
468010d565efSmrg 
468110d565efSmrg       /* For calls to `setjmp', etc., inform
468210d565efSmrg 	 function.c:setjmp_warnings that it should complain if
468310d565efSmrg 	 nonvolatile values are live.  For functions that cannot
468410d565efSmrg 	 return, inform flow that control does not fall through.  */
468510d565efSmrg 
468610d565efSmrg       if ((flags & ECF_NORETURN) || pass == 0)
468710d565efSmrg 	{
468810d565efSmrg 	  /* The barrier must be emitted
468910d565efSmrg 	     immediately after the CALL_INSN.  Some ports emit more
469010d565efSmrg 	     than just a CALL_INSN above, so we must search for it here.  */
469110d565efSmrg 
469210d565efSmrg 	  rtx_insn *last = get_last_insn ();
469310d565efSmrg 	  while (!CALL_P (last))
469410d565efSmrg 	    {
469510d565efSmrg 	      last = PREV_INSN (last);
469610d565efSmrg 	      /* There was no CALL_INSN?  */
469710d565efSmrg 	      gcc_assert (last != before_call);
469810d565efSmrg 	    }
469910d565efSmrg 
470010d565efSmrg 	  emit_barrier_after (last);
470110d565efSmrg 
470210d565efSmrg 	  /* Stack adjustments after a noreturn call are dead code.
470310d565efSmrg 	     However when NO_DEFER_POP is in effect, we must preserve
470410d565efSmrg 	     stack_pointer_delta.  */
470510d565efSmrg 	  if (inhibit_defer_pop == 0)
470610d565efSmrg 	    {
470710d565efSmrg 	      stack_pointer_delta = old_stack_allocated;
470810d565efSmrg 	      pending_stack_adjust = 0;
470910d565efSmrg 	    }
471010d565efSmrg 	}
471110d565efSmrg 
471210d565efSmrg       /* If value type not void, return an rtx for the value.  */
471310d565efSmrg 
471410d565efSmrg       if (TYPE_MODE (rettype) == VOIDmode
471510d565efSmrg 	  || ignore)
471610d565efSmrg 	target = const0_rtx;
471710d565efSmrg       else if (structure_value_addr)
471810d565efSmrg 	{
471910d565efSmrg 	  if (target == 0 || !MEM_P (target))
472010d565efSmrg 	    {
472110d565efSmrg 	      target
472210d565efSmrg 		= gen_rtx_MEM (TYPE_MODE (rettype),
472310d565efSmrg 			       memory_address (TYPE_MODE (rettype),
472410d565efSmrg 					       structure_value_addr));
472510d565efSmrg 	      set_mem_attributes (target, rettype, 1);
472610d565efSmrg 	    }
472710d565efSmrg 	}
472810d565efSmrg       else if (pcc_struct_value)
472910d565efSmrg 	{
473010d565efSmrg 	  /* This is the special C++ case where we need to
473110d565efSmrg 	     know what the true target was.  We take care to
473210d565efSmrg 	     never use this value more than once in one expression.  */
473310d565efSmrg 	  target = gen_rtx_MEM (TYPE_MODE (rettype),
473410d565efSmrg 				copy_to_reg (valreg));
473510d565efSmrg 	  set_mem_attributes (target, rettype, 1);
473610d565efSmrg 	}
473710d565efSmrg       /* Handle calls that return values in multiple non-contiguous locations.
473810d565efSmrg 	 The Irix 6 ABI has examples of this.  */
473910d565efSmrg       else if (GET_CODE (valreg) == PARALLEL)
474010d565efSmrg 	{
474110d565efSmrg 	  if (target == 0)
474210d565efSmrg 	    target = emit_group_move_into_temps (valreg);
474310d565efSmrg 	  else if (rtx_equal_p (target, valreg))
474410d565efSmrg 	    ;
474510d565efSmrg 	  else if (GET_CODE (target) == PARALLEL)
474610d565efSmrg 	    /* Handle the result of a emit_group_move_into_temps
474710d565efSmrg 	       call in the previous pass.  */
474810d565efSmrg 	    emit_group_move (target, valreg);
474910d565efSmrg 	  else
475010d565efSmrg 	    emit_group_store (target, valreg, rettype,
475110d565efSmrg 			      int_size_in_bytes (rettype));
475210d565efSmrg 	}
475310d565efSmrg       else if (target
475410d565efSmrg 	       && GET_MODE (target) == TYPE_MODE (rettype)
475510d565efSmrg 	       && GET_MODE (target) == GET_MODE (valreg))
475610d565efSmrg 	{
475710d565efSmrg 	  bool may_overlap = false;
475810d565efSmrg 
475910d565efSmrg 	  /* We have to copy a return value in a CLASS_LIKELY_SPILLED hard
476010d565efSmrg 	     reg to a plain register.  */
476110d565efSmrg 	  if (!REG_P (target) || HARD_REGISTER_P (target))
476210d565efSmrg 	    valreg = avoid_likely_spilled_reg (valreg);
476310d565efSmrg 
476410d565efSmrg 	  /* If TARGET is a MEM in the argument area, and we have
476510d565efSmrg 	     saved part of the argument area, then we can't store
476610d565efSmrg 	     directly into TARGET as it may get overwritten when we
476710d565efSmrg 	     restore the argument save area below.  Don't work too
476810d565efSmrg 	     hard though and simply force TARGET to a register if it
476910d565efSmrg 	     is a MEM; the optimizer is quite likely to sort it out.  */
477010d565efSmrg 	  if (ACCUMULATE_OUTGOING_ARGS && pass && MEM_P (target))
477110d565efSmrg 	    for (i = 0; i < num_actuals; i++)
477210d565efSmrg 	      if (args[i].save_area)
477310d565efSmrg 		{
477410d565efSmrg 		  may_overlap = true;
477510d565efSmrg 		  break;
477610d565efSmrg 		}
477710d565efSmrg 
477810d565efSmrg 	  if (may_overlap)
477910d565efSmrg 	    target = copy_to_reg (valreg);
478010d565efSmrg 	  else
478110d565efSmrg 	    {
478210d565efSmrg 	      /* TARGET and VALREG cannot be equal at this point
478310d565efSmrg 		 because the latter would not have
478410d565efSmrg 		 REG_FUNCTION_VALUE_P true, while the former would if
478510d565efSmrg 		 it were referring to the same register.
478610d565efSmrg 
478710d565efSmrg 		 If they refer to the same register, this move will be
478810d565efSmrg 		 a no-op, except when function inlining is being
478910d565efSmrg 		 done.  */
479010d565efSmrg 	      emit_move_insn (target, valreg);
479110d565efSmrg 
479210d565efSmrg 	      /* If we are setting a MEM, this code must be executed.
479310d565efSmrg 		 Since it is emitted after the call insn, sibcall
479410d565efSmrg 		 optimization cannot be performed in that case.  */
479510d565efSmrg 	      if (MEM_P (target))
479610d565efSmrg 		sibcall_failure = 1;
479710d565efSmrg 	    }
479810d565efSmrg 	}
479910d565efSmrg       else
480010d565efSmrg 	target = copy_to_reg (avoid_likely_spilled_reg (valreg));
480110d565efSmrg 
480210d565efSmrg       /* If we promoted this return value, make the proper SUBREG.
480310d565efSmrg          TARGET might be const0_rtx here, so be careful.  */
480410d565efSmrg       if (REG_P (target)
480510d565efSmrg 	  && TYPE_MODE (rettype) != BLKmode
480610d565efSmrg 	  && GET_MODE (target) != TYPE_MODE (rettype))
480710d565efSmrg 	{
480810d565efSmrg 	  tree type = rettype;
480910d565efSmrg 	  int unsignedp = TYPE_UNSIGNED (type);
481010d565efSmrg 	  machine_mode pmode;
481110d565efSmrg 
481210d565efSmrg 	  /* Ensure we promote as expected, and get the new unsignedness.  */
481310d565efSmrg 	  pmode = promote_function_mode (type, TYPE_MODE (type), &unsignedp,
481410d565efSmrg 					 funtype, 1);
481510d565efSmrg 	  gcc_assert (GET_MODE (target) == pmode);
481610d565efSmrg 
4817c7a68eb7Smrg 	  poly_uint64 offset = subreg_lowpart_offset (TYPE_MODE (type),
4818c7a68eb7Smrg 						      GET_MODE (target));
481910d565efSmrg 	  target = gen_rtx_SUBREG (TYPE_MODE (type), target, offset);
482010d565efSmrg 	  SUBREG_PROMOTED_VAR_P (target) = 1;
482110d565efSmrg 	  SUBREG_PROMOTED_SET (target, unsignedp);
482210d565efSmrg 	}
482310d565efSmrg 
482410d565efSmrg       /* If size of args is variable or this was a constructor call for a stack
482510d565efSmrg 	 argument, restore saved stack-pointer value.  */
482610d565efSmrg 
482710d565efSmrg       if (old_stack_level)
482810d565efSmrg 	{
482910d565efSmrg 	  rtx_insn *prev = get_last_insn ();
483010d565efSmrg 
483110d565efSmrg 	  emit_stack_restore (SAVE_BLOCK, old_stack_level);
483210d565efSmrg 	  stack_pointer_delta = old_stack_pointer_delta;
483310d565efSmrg 
483410d565efSmrg 	  fixup_args_size_notes (prev, get_last_insn (), stack_pointer_delta);
483510d565efSmrg 
483610d565efSmrg 	  pending_stack_adjust = old_pending_adj;
483710d565efSmrg 	  old_stack_allocated = stack_pointer_delta - pending_stack_adjust;
483810d565efSmrg 	  stack_arg_under_construction = old_stack_arg_under_construction;
483910d565efSmrg 	  highest_outgoing_arg_in_use = initial_highest_arg_in_use;
484010d565efSmrg 	  stack_usage_map = initial_stack_usage_map;
4841c7a68eb7Smrg 	  stack_usage_watermark = initial_stack_usage_watermark;
484210d565efSmrg 	  sibcall_failure = 1;
484310d565efSmrg 	}
484410d565efSmrg       else if (ACCUMULATE_OUTGOING_ARGS && pass)
484510d565efSmrg 	{
484610d565efSmrg #ifdef REG_PARM_STACK_SPACE
484710d565efSmrg 	  if (save_area)
484810d565efSmrg 	    restore_fixed_argument_area (save_area, argblock,
484910d565efSmrg 					 high_to_save, low_to_save);
485010d565efSmrg #endif
485110d565efSmrg 
485210d565efSmrg 	  /* If we saved any argument areas, restore them.  */
485310d565efSmrg 	  for (i = 0; i < num_actuals; i++)
485410d565efSmrg 	    if (args[i].save_area)
485510d565efSmrg 	      {
485610d565efSmrg 		machine_mode save_mode = GET_MODE (args[i].save_area);
485710d565efSmrg 		rtx stack_area
485810d565efSmrg 		  = gen_rtx_MEM (save_mode,
485910d565efSmrg 				 memory_address (save_mode,
486010d565efSmrg 						 XEXP (args[i].stack_slot, 0)));
486110d565efSmrg 
486210d565efSmrg 		if (save_mode != BLKmode)
486310d565efSmrg 		  emit_move_insn (stack_area, args[i].save_area);
486410d565efSmrg 		else
486510d565efSmrg 		  emit_block_move (stack_area, args[i].save_area,
4866c7a68eb7Smrg 				   (gen_int_mode
4867c7a68eb7Smrg 				    (args[i].locate.size.constant, Pmode)),
486810d565efSmrg 				   BLOCK_OP_CALL_PARM);
486910d565efSmrg 	      }
487010d565efSmrg 
487110d565efSmrg 	  highest_outgoing_arg_in_use = initial_highest_arg_in_use;
487210d565efSmrg 	  stack_usage_map = initial_stack_usage_map;
4873c7a68eb7Smrg 	  stack_usage_watermark = initial_stack_usage_watermark;
487410d565efSmrg 	}
487510d565efSmrg 
487610d565efSmrg       /* If this was alloca, record the new stack level.  */
487710d565efSmrg       if (flags & ECF_MAY_BE_ALLOCA)
487810d565efSmrg 	record_new_stack_level ();
487910d565efSmrg 
488010d565efSmrg       /* Free up storage we no longer need.  */
488110d565efSmrg       for (i = 0; i < num_actuals; ++i)
488210d565efSmrg 	free (args[i].aligned_regs);
488310d565efSmrg 
488410d565efSmrg       targetm.calls.end_call_args ();
488510d565efSmrg 
488610d565efSmrg       insns = get_insns ();
488710d565efSmrg       end_sequence ();
488810d565efSmrg 
488910d565efSmrg       if (pass == 0)
489010d565efSmrg 	{
489110d565efSmrg 	  tail_call_insns = insns;
489210d565efSmrg 
489310d565efSmrg 	  /* Restore the pending stack adjustment now that we have
489410d565efSmrg 	     finished generating the sibling call sequence.  */
489510d565efSmrg 
489610d565efSmrg 	  restore_pending_stack_adjust (&save);
489710d565efSmrg 
489810d565efSmrg 	  /* Prepare arg structure for next iteration.  */
489910d565efSmrg 	  for (i = 0; i < num_actuals; i++)
490010d565efSmrg 	    {
490110d565efSmrg 	      args[i].value = 0;
490210d565efSmrg 	      args[i].aligned_regs = 0;
490310d565efSmrg 	      args[i].stack = 0;
490410d565efSmrg 	    }
490510d565efSmrg 
490610d565efSmrg 	  sbitmap_free (stored_args_map);
490710d565efSmrg 	  internal_arg_pointer_exp_state.scan_start = NULL;
490810d565efSmrg 	  internal_arg_pointer_exp_state.cache.release ();
490910d565efSmrg 	}
491010d565efSmrg       else
491110d565efSmrg 	{
491210d565efSmrg 	  normal_call_insns = insns;
491310d565efSmrg 
491410d565efSmrg 	  /* Verify that we've deallocated all the stack we used.  */
491510d565efSmrg 	  gcc_assert ((flags & ECF_NORETURN)
4916c7a68eb7Smrg 		      || known_eq (old_stack_allocated,
4917c7a68eb7Smrg 				   stack_pointer_delta
4918c7a68eb7Smrg 				   - pending_stack_adjust));
491910d565efSmrg 	}
492010d565efSmrg 
492110d565efSmrg       /* If something prevents making this a sibling call,
492210d565efSmrg 	 zero out the sequence.  */
492310d565efSmrg       if (sibcall_failure)
492410d565efSmrg 	tail_call_insns = NULL;
492510d565efSmrg       else
492610d565efSmrg 	break;
492710d565efSmrg     }
492810d565efSmrg 
492910d565efSmrg   /* If tail call production succeeded, we need to remove REG_EQUIV notes on
493010d565efSmrg      arguments too, as argument area is now clobbered by the call.  */
493110d565efSmrg   if (tail_call_insns)
493210d565efSmrg     {
493310d565efSmrg       emit_insn (tail_call_insns);
493410d565efSmrg       crtl->tail_call_emit = true;
493510d565efSmrg     }
493610d565efSmrg   else
493710d565efSmrg     {
493810d565efSmrg       emit_insn (normal_call_insns);
493910d565efSmrg       if (try_tail_call)
494010d565efSmrg 	/* Ideally we'd emit a message for all of the ways that it could
494110d565efSmrg 	   have failed.  */
494210d565efSmrg 	maybe_complain_about_tail_call (exp, "tail call production failed");
494310d565efSmrg     }
494410d565efSmrg 
494510d565efSmrg   currently_expanding_call--;
494610d565efSmrg 
494710d565efSmrg   free (stack_usage_map_buf);
494810d565efSmrg   free (args);
494910d565efSmrg   return target;
495010d565efSmrg }
495110d565efSmrg 
495210d565efSmrg /* A sibling call sequence invalidates any REG_EQUIV notes made for
495310d565efSmrg    this function's incoming arguments.
495410d565efSmrg 
495510d565efSmrg    At the start of RTL generation we know the only REG_EQUIV notes
495610d565efSmrg    in the rtl chain are those for incoming arguments, so we can look
495710d565efSmrg    for REG_EQUIV notes between the start of the function and the
495810d565efSmrg    NOTE_INSN_FUNCTION_BEG.
495910d565efSmrg 
496010d565efSmrg    This is (slight) overkill.  We could keep track of the highest
496110d565efSmrg    argument we clobber and be more selective in removing notes, but it
496210d565efSmrg    does not seem to be worth the effort.  */
496310d565efSmrg 
496410d565efSmrg void
fixup_tail_calls(void)496510d565efSmrg fixup_tail_calls (void)
496610d565efSmrg {
496710d565efSmrg   rtx_insn *insn;
496810d565efSmrg 
496910d565efSmrg   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
497010d565efSmrg     {
497110d565efSmrg       rtx note;
497210d565efSmrg 
497310d565efSmrg       /* There are never REG_EQUIV notes for the incoming arguments
497410d565efSmrg 	 after the NOTE_INSN_FUNCTION_BEG note, so stop if we see it.  */
497510d565efSmrg       if (NOTE_P (insn)
497610d565efSmrg 	  && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
497710d565efSmrg 	break;
497810d565efSmrg 
497910d565efSmrg       note = find_reg_note (insn, REG_EQUIV, 0);
498010d565efSmrg       if (note)
498110d565efSmrg 	remove_note (insn, note);
498210d565efSmrg       note = find_reg_note (insn, REG_EQUIV, 0);
498310d565efSmrg       gcc_assert (!note);
498410d565efSmrg     }
498510d565efSmrg }
498610d565efSmrg 
498710d565efSmrg /* Traverse a list of TYPES and expand all complex types into their
498810d565efSmrg    components.  */
498910d565efSmrg static tree
split_complex_types(tree types)499010d565efSmrg split_complex_types (tree types)
499110d565efSmrg {
499210d565efSmrg   tree p;
499310d565efSmrg 
499410d565efSmrg   /* Before allocating memory, check for the common case of no complex.  */
499510d565efSmrg   for (p = types; p; p = TREE_CHAIN (p))
499610d565efSmrg     {
499710d565efSmrg       tree type = TREE_VALUE (p);
499810d565efSmrg       if (TREE_CODE (type) == COMPLEX_TYPE
499910d565efSmrg 	  && targetm.calls.split_complex_arg (type))
500010d565efSmrg 	goto found;
500110d565efSmrg     }
500210d565efSmrg   return types;
500310d565efSmrg 
500410d565efSmrg  found:
500510d565efSmrg   types = copy_list (types);
500610d565efSmrg 
500710d565efSmrg   for (p = types; p; p = TREE_CHAIN (p))
500810d565efSmrg     {
500910d565efSmrg       tree complex_type = TREE_VALUE (p);
501010d565efSmrg 
501110d565efSmrg       if (TREE_CODE (complex_type) == COMPLEX_TYPE
501210d565efSmrg 	  && targetm.calls.split_complex_arg (complex_type))
501310d565efSmrg 	{
501410d565efSmrg 	  tree next, imag;
501510d565efSmrg 
501610d565efSmrg 	  /* Rewrite complex type with component type.  */
501710d565efSmrg 	  TREE_VALUE (p) = TREE_TYPE (complex_type);
501810d565efSmrg 	  next = TREE_CHAIN (p);
501910d565efSmrg 
502010d565efSmrg 	  /* Add another component type for the imaginary part.  */
502110d565efSmrg 	  imag = build_tree_list (NULL_TREE, TREE_VALUE (p));
502210d565efSmrg 	  TREE_CHAIN (p) = imag;
502310d565efSmrg 	  TREE_CHAIN (imag) = next;
502410d565efSmrg 
502510d565efSmrg 	  /* Skip the newly created node.  */
502610d565efSmrg 	  p = TREE_CHAIN (p);
502710d565efSmrg 	}
502810d565efSmrg     }
502910d565efSmrg 
503010d565efSmrg   return types;
503110d565efSmrg }
503210d565efSmrg 
5033c7a68eb7Smrg /* Output a library call to function ORGFUN (a SYMBOL_REF rtx)
5034c7a68eb7Smrg    for a value of mode OUTMODE,
5035c7a68eb7Smrg    with NARGS different arguments, passed as ARGS.
5036c7a68eb7Smrg    Store the return value if RETVAL is nonzero: store it in VALUE if
5037c7a68eb7Smrg    VALUE is nonnull, otherwise pick a convenient location.  In either
5038c7a68eb7Smrg    case return the location of the stored value.
503910d565efSmrg 
5040c7a68eb7Smrg    FN_TYPE should be LCT_NORMAL for `normal' calls, LCT_CONST for
5041c7a68eb7Smrg    `const' calls, LCT_PURE for `pure' calls, or another LCT_ value for
5042c7a68eb7Smrg    other types of library calls.  */
5043c7a68eb7Smrg 
5044c7a68eb7Smrg rtx
emit_library_call_value_1(int retval,rtx orgfun,rtx value,enum libcall_type fn_type,machine_mode outmode,int nargs,rtx_mode_t * args)504510d565efSmrg emit_library_call_value_1 (int retval, rtx orgfun, rtx value,
504610d565efSmrg 			   enum libcall_type fn_type,
5047c7a68eb7Smrg 			   machine_mode outmode, int nargs, rtx_mode_t *args)
504810d565efSmrg {
504910d565efSmrg   /* Total size in bytes of all the stack-parms scanned so far.  */
505010d565efSmrg   struct args_size args_size;
505110d565efSmrg   /* Size of arguments before any adjustments (such as rounding).  */
505210d565efSmrg   struct args_size original_args_size;
505310d565efSmrg   int argnum;
505410d565efSmrg   rtx fun;
505510d565efSmrg   /* Todo, choose the correct decl type of orgfun. Sadly this information
505610d565efSmrg      isn't present here, so we default to native calling abi here.  */
505710d565efSmrg   tree fndecl ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
505810d565efSmrg   tree fntype ATTRIBUTE_UNUSED = NULL_TREE; /* library calls default to host calling abi ? */
505910d565efSmrg   int count;
506010d565efSmrg   rtx argblock = 0;
506110d565efSmrg   CUMULATIVE_ARGS args_so_far_v;
506210d565efSmrg   cumulative_args_t args_so_far;
506310d565efSmrg   struct arg
506410d565efSmrg   {
506510d565efSmrg     rtx value;
506610d565efSmrg     machine_mode mode;
506710d565efSmrg     rtx reg;
506810d565efSmrg     int partial;
506910d565efSmrg     struct locate_and_pad_arg_data locate;
507010d565efSmrg     rtx save_area;
507110d565efSmrg   };
507210d565efSmrg   struct arg *argvec;
507310d565efSmrg   int old_inhibit_defer_pop = inhibit_defer_pop;
507410d565efSmrg   rtx call_fusage = 0;
507510d565efSmrg   rtx mem_value = 0;
507610d565efSmrg   rtx valreg;
507710d565efSmrg   int pcc_struct_value = 0;
5078c7a68eb7Smrg   poly_int64 struct_value_size = 0;
507910d565efSmrg   int flags;
508010d565efSmrg   int reg_parm_stack_space = 0;
5081c7a68eb7Smrg   poly_int64 needed;
508210d565efSmrg   rtx_insn *before_call;
508310d565efSmrg   bool have_push_fusage;
508410d565efSmrg   tree tfom;			/* type_for_mode (outmode, 0) */
508510d565efSmrg 
508610d565efSmrg #ifdef REG_PARM_STACK_SPACE
508710d565efSmrg   /* Define the boundary of the register parm stack space that needs to be
508810d565efSmrg      save, if any.  */
508910d565efSmrg   int low_to_save = 0, high_to_save = 0;
509010d565efSmrg   rtx save_area = 0;            /* Place that it is saved.  */
509110d565efSmrg #endif
509210d565efSmrg 
509310d565efSmrg   /* Size of the stack reserved for parameter registers.  */
5094c7a68eb7Smrg   unsigned int initial_highest_arg_in_use = highest_outgoing_arg_in_use;
509510d565efSmrg   char *initial_stack_usage_map = stack_usage_map;
5096c7a68eb7Smrg   unsigned HOST_WIDE_INT initial_stack_usage_watermark = stack_usage_watermark;
509710d565efSmrg   char *stack_usage_map_buf = NULL;
509810d565efSmrg 
509910d565efSmrg   rtx struct_value = targetm.calls.struct_value_rtx (0, 0);
510010d565efSmrg 
510110d565efSmrg #ifdef REG_PARM_STACK_SPACE
510210d565efSmrg   reg_parm_stack_space = REG_PARM_STACK_SPACE ((tree) 0);
510310d565efSmrg #endif
510410d565efSmrg 
510510d565efSmrg   /* By default, library functions cannot throw.  */
510610d565efSmrg   flags = ECF_NOTHROW;
510710d565efSmrg 
510810d565efSmrg   switch (fn_type)
510910d565efSmrg     {
511010d565efSmrg     case LCT_NORMAL:
511110d565efSmrg       break;
511210d565efSmrg     case LCT_CONST:
511310d565efSmrg       flags |= ECF_CONST;
511410d565efSmrg       break;
511510d565efSmrg     case LCT_PURE:
511610d565efSmrg       flags |= ECF_PURE;
511710d565efSmrg       break;
511810d565efSmrg     case LCT_NORETURN:
511910d565efSmrg       flags |= ECF_NORETURN;
512010d565efSmrg       break;
512110d565efSmrg     case LCT_THROW:
512210d565efSmrg       flags &= ~ECF_NOTHROW;
512310d565efSmrg       break;
512410d565efSmrg     case LCT_RETURNS_TWICE:
512510d565efSmrg       flags = ECF_RETURNS_TWICE;
512610d565efSmrg       break;
512710d565efSmrg     }
512810d565efSmrg   fun = orgfun;
512910d565efSmrg 
513010d565efSmrg   /* Ensure current function's preferred stack boundary is at least
513110d565efSmrg      what we need.  */
513210d565efSmrg   if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
513310d565efSmrg     crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
513410d565efSmrg 
513510d565efSmrg   /* If this kind of value comes back in memory,
513610d565efSmrg      decide where in memory it should come back.  */
513710d565efSmrg   if (outmode != VOIDmode)
513810d565efSmrg     {
513910d565efSmrg       tfom = lang_hooks.types.type_for_mode (outmode, 0);
514010d565efSmrg       if (aggregate_value_p (tfom, 0))
514110d565efSmrg 	{
514210d565efSmrg #ifdef PCC_STATIC_STRUCT_RETURN
514310d565efSmrg 	  rtx pointer_reg
514410d565efSmrg 	    = hard_function_value (build_pointer_type (tfom), 0, 0, 0);
514510d565efSmrg 	  mem_value = gen_rtx_MEM (outmode, pointer_reg);
514610d565efSmrg 	  pcc_struct_value = 1;
514710d565efSmrg 	  if (value == 0)
514810d565efSmrg 	    value = gen_reg_rtx (outmode);
514910d565efSmrg #else /* not PCC_STATIC_STRUCT_RETURN */
515010d565efSmrg 	  struct_value_size = GET_MODE_SIZE (outmode);
515110d565efSmrg 	  if (value != 0 && MEM_P (value))
515210d565efSmrg 	    mem_value = value;
515310d565efSmrg 	  else
515410d565efSmrg 	    mem_value = assign_temp (tfom, 1, 1);
515510d565efSmrg #endif
515610d565efSmrg 	  /* This call returns a big structure.  */
515710d565efSmrg 	  flags &= ~(ECF_CONST | ECF_PURE | ECF_LOOPING_CONST_OR_PURE);
515810d565efSmrg 	}
515910d565efSmrg     }
516010d565efSmrg   else
516110d565efSmrg     tfom = void_type_node;
516210d565efSmrg 
516310d565efSmrg   /* ??? Unfinished: must pass the memory address as an argument.  */
516410d565efSmrg 
516510d565efSmrg   /* Copy all the libcall-arguments out of the varargs data
516610d565efSmrg      and into a vector ARGVEC.
516710d565efSmrg 
516810d565efSmrg      Compute how to pass each argument.  We only support a very small subset
516910d565efSmrg      of the full argument passing conventions to limit complexity here since
517010d565efSmrg      library functions shouldn't have many args.  */
517110d565efSmrg 
517210d565efSmrg   argvec = XALLOCAVEC (struct arg, nargs + 1);
517310d565efSmrg   memset (argvec, 0, (nargs + 1) * sizeof (struct arg));
517410d565efSmrg 
517510d565efSmrg #ifdef INIT_CUMULATIVE_LIBCALL_ARGS
517610d565efSmrg   INIT_CUMULATIVE_LIBCALL_ARGS (args_so_far_v, outmode, fun);
517710d565efSmrg #else
517810d565efSmrg   INIT_CUMULATIVE_ARGS (args_so_far_v, NULL_TREE, fun, 0, nargs);
517910d565efSmrg #endif
518010d565efSmrg   args_so_far = pack_cumulative_args (&args_so_far_v);
518110d565efSmrg 
518210d565efSmrg   args_size.constant = 0;
518310d565efSmrg   args_size.var = 0;
518410d565efSmrg 
518510d565efSmrg   count = 0;
518610d565efSmrg 
518710d565efSmrg   push_temp_slots ();
518810d565efSmrg 
518910d565efSmrg   /* If there's a structure value address to be passed,
519010d565efSmrg      either pass it in the special place, or pass it as an extra argument.  */
519110d565efSmrg   if (mem_value && struct_value == 0 && ! pcc_struct_value)
519210d565efSmrg     {
519310d565efSmrg       rtx addr = XEXP (mem_value, 0);
519410d565efSmrg 
519510d565efSmrg       nargs++;
519610d565efSmrg 
519710d565efSmrg       /* Make sure it is a reasonable operand for a move or push insn.  */
519810d565efSmrg       if (!REG_P (addr) && !MEM_P (addr)
519910d565efSmrg 	  && !(CONSTANT_P (addr)
520010d565efSmrg 	       && targetm.legitimate_constant_p (Pmode, addr)))
520110d565efSmrg 	addr = force_operand (addr, NULL_RTX);
520210d565efSmrg 
520310d565efSmrg       argvec[count].value = addr;
520410d565efSmrg       argvec[count].mode = Pmode;
520510d565efSmrg       argvec[count].partial = 0;
520610d565efSmrg 
5207*ec02198aSmrg       function_arg_info ptr_arg (Pmode, /*named=*/true);
5208*ec02198aSmrg       argvec[count].reg = targetm.calls.function_arg (args_so_far, ptr_arg);
5209*ec02198aSmrg       gcc_assert (targetm.calls.arg_partial_bytes (args_so_far, ptr_arg) == 0);
521010d565efSmrg 
521110d565efSmrg       locate_and_pad_parm (Pmode, NULL_TREE,
521210d565efSmrg #ifdef STACK_PARMS_IN_REG_PARM_AREA
521310d565efSmrg 			   1,
521410d565efSmrg #else
521510d565efSmrg 			   argvec[count].reg != 0,
521610d565efSmrg #endif
521710d565efSmrg 			   reg_parm_stack_space, 0,
521810d565efSmrg 			   NULL_TREE, &args_size, &argvec[count].locate);
521910d565efSmrg 
522010d565efSmrg       if (argvec[count].reg == 0 || argvec[count].partial != 0
522110d565efSmrg 	  || reg_parm_stack_space > 0)
522210d565efSmrg 	args_size.constant += argvec[count].locate.size.constant;
522310d565efSmrg 
5224*ec02198aSmrg       targetm.calls.function_arg_advance (args_so_far, ptr_arg);
522510d565efSmrg 
522610d565efSmrg       count++;
522710d565efSmrg     }
522810d565efSmrg 
5229c7a68eb7Smrg   for (unsigned int i = 0; count < nargs; i++, count++)
523010d565efSmrg     {
5231c7a68eb7Smrg       rtx val = args[i].first;
5232*ec02198aSmrg       function_arg_info arg (args[i].second, /*named=*/true);
523310d565efSmrg       int unsigned_p = 0;
523410d565efSmrg 
523510d565efSmrg       /* We cannot convert the arg value to the mode the library wants here;
523610d565efSmrg 	 must do it earlier where we know the signedness of the arg.  */
5237*ec02198aSmrg       gcc_assert (arg.mode != BLKmode
5238*ec02198aSmrg 		  && (GET_MODE (val) == arg.mode
5239*ec02198aSmrg 		      || GET_MODE (val) == VOIDmode));
524010d565efSmrg 
524110d565efSmrg       /* Make sure it is a reasonable operand for a move or push insn.  */
524210d565efSmrg       if (!REG_P (val) && !MEM_P (val)
5243*ec02198aSmrg 	  && !(CONSTANT_P (val)
5244*ec02198aSmrg 	       && targetm.legitimate_constant_p (arg.mode, val)))
524510d565efSmrg 	val = force_operand (val, NULL_RTX);
524610d565efSmrg 
5247*ec02198aSmrg       if (pass_by_reference (&args_so_far_v, arg))
524810d565efSmrg 	{
524910d565efSmrg 	  rtx slot;
5250*ec02198aSmrg 	  int must_copy = !reference_callee_copied (&args_so_far_v, arg);
525110d565efSmrg 
525210d565efSmrg 	  /* If this was a CONST function, it is now PURE since it now
525310d565efSmrg 	     reads memory.  */
525410d565efSmrg 	  if (flags & ECF_CONST)
525510d565efSmrg 	    {
525610d565efSmrg 	      flags &= ~ECF_CONST;
525710d565efSmrg 	      flags |= ECF_PURE;
525810d565efSmrg 	    }
525910d565efSmrg 
526010d565efSmrg 	  if (MEM_P (val) && !must_copy)
526110d565efSmrg 	    {
526210d565efSmrg 	      tree val_expr = MEM_EXPR (val);
526310d565efSmrg 	      if (val_expr)
526410d565efSmrg 		mark_addressable (val_expr);
526510d565efSmrg 	      slot = val;
526610d565efSmrg 	    }
526710d565efSmrg 	  else
526810d565efSmrg 	    {
5269*ec02198aSmrg 	      slot = assign_temp (lang_hooks.types.type_for_mode (arg.mode, 0),
527010d565efSmrg 				  1, 1);
527110d565efSmrg 	      emit_move_insn (slot, val);
527210d565efSmrg 	    }
527310d565efSmrg 
527410d565efSmrg 	  call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
527510d565efSmrg 					   gen_rtx_USE (VOIDmode, slot),
527610d565efSmrg 					   call_fusage);
527710d565efSmrg 	  if (must_copy)
527810d565efSmrg 	    call_fusage = gen_rtx_EXPR_LIST (VOIDmode,
527910d565efSmrg 					     gen_rtx_CLOBBER (VOIDmode,
528010d565efSmrg 							      slot),
528110d565efSmrg 					     call_fusage);
528210d565efSmrg 
5283*ec02198aSmrg 	  arg.mode = Pmode;
5284*ec02198aSmrg 	  arg.pass_by_reference = true;
528510d565efSmrg 	  val = force_operand (XEXP (slot, 0), NULL_RTX);
528610d565efSmrg 	}
528710d565efSmrg 
5288*ec02198aSmrg       arg.mode = promote_function_mode (NULL_TREE, arg.mode, &unsigned_p,
5289*ec02198aSmrg 					NULL_TREE, 0);
5290*ec02198aSmrg       argvec[count].mode = arg.mode;
5291*ec02198aSmrg       argvec[count].value = convert_modes (arg.mode, GET_MODE (val), val,
5292*ec02198aSmrg 					   unsigned_p);
5293*ec02198aSmrg       argvec[count].reg = targetm.calls.function_arg (args_so_far, arg);
529410d565efSmrg 
529510d565efSmrg       argvec[count].partial
5296*ec02198aSmrg 	= targetm.calls.arg_partial_bytes (args_so_far, arg);
529710d565efSmrg 
529810d565efSmrg       if (argvec[count].reg == 0
529910d565efSmrg 	  || argvec[count].partial != 0
530010d565efSmrg 	  || reg_parm_stack_space > 0)
530110d565efSmrg 	{
5302*ec02198aSmrg 	  locate_and_pad_parm (arg.mode, NULL_TREE,
530310d565efSmrg #ifdef STACK_PARMS_IN_REG_PARM_AREA
530410d565efSmrg 			       1,
530510d565efSmrg #else
530610d565efSmrg 			       argvec[count].reg != 0,
530710d565efSmrg #endif
530810d565efSmrg 			       reg_parm_stack_space, argvec[count].partial,
530910d565efSmrg 			       NULL_TREE, &args_size, &argvec[count].locate);
531010d565efSmrg 	  args_size.constant += argvec[count].locate.size.constant;
531110d565efSmrg 	  gcc_assert (!argvec[count].locate.size.var);
531210d565efSmrg 	}
531310d565efSmrg #ifdef BLOCK_REG_PADDING
531410d565efSmrg       else
531510d565efSmrg 	/* The argument is passed entirely in registers.  See at which
531610d565efSmrg 	   end it should be padded.  */
531710d565efSmrg 	argvec[count].locate.where_pad =
5318*ec02198aSmrg 	  BLOCK_REG_PADDING (arg.mode, NULL_TREE,
5319*ec02198aSmrg 			     known_le (GET_MODE_SIZE (arg.mode),
5320*ec02198aSmrg 				       UNITS_PER_WORD));
532110d565efSmrg #endif
532210d565efSmrg 
5323*ec02198aSmrg       targetm.calls.function_arg_advance (args_so_far, arg);
532410d565efSmrg     }
532510d565efSmrg 
5326*ec02198aSmrg   for (int i = 0; i < nargs; i++)
5327*ec02198aSmrg     if (reg_parm_stack_space > 0
5328*ec02198aSmrg 	|| argvec[i].reg == 0
5329*ec02198aSmrg 	|| argvec[i].partial != 0)
5330*ec02198aSmrg       update_stack_alignment_for_call (&argvec[i].locate);
5331*ec02198aSmrg 
533210d565efSmrg   /* If this machine requires an external definition for library
533310d565efSmrg      functions, write one out.  */
533410d565efSmrg   assemble_external_libcall (fun);
533510d565efSmrg 
533610d565efSmrg   original_args_size = args_size;
5337c7a68eb7Smrg   args_size.constant = (aligned_upper_bound (args_size.constant
5338c7a68eb7Smrg 					     + stack_pointer_delta,
5339c7a68eb7Smrg 					     STACK_BYTES)
534010d565efSmrg 			- stack_pointer_delta);
534110d565efSmrg 
5342c7a68eb7Smrg   args_size.constant = upper_bound (args_size.constant,
534310d565efSmrg 				    reg_parm_stack_space);
534410d565efSmrg 
534510d565efSmrg   if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
534610d565efSmrg     args_size.constant -= reg_parm_stack_space;
534710d565efSmrg 
5348c7a68eb7Smrg   crtl->outgoing_args_size = upper_bound (crtl->outgoing_args_size,
5349c7a68eb7Smrg 					  args_size.constant);
535010d565efSmrg 
535110d565efSmrg   if (flag_stack_usage_info && !ACCUMULATE_OUTGOING_ARGS)
535210d565efSmrg     {
5353c7a68eb7Smrg       poly_int64 pushed = args_size.constant + pending_stack_adjust;
5354c7a68eb7Smrg       current_function_pushed_stack_size
5355c7a68eb7Smrg 	= upper_bound (current_function_pushed_stack_size, pushed);
535610d565efSmrg     }
535710d565efSmrg 
535810d565efSmrg   if (ACCUMULATE_OUTGOING_ARGS)
535910d565efSmrg     {
536010d565efSmrg       /* Since the stack pointer will never be pushed, it is possible for
536110d565efSmrg 	 the evaluation of a parm to clobber something we have already
536210d565efSmrg 	 written to the stack.  Since most function calls on RISC machines
536310d565efSmrg 	 do not use the stack, this is uncommon, but must work correctly.
536410d565efSmrg 
536510d565efSmrg 	 Therefore, we save any area of the stack that was already written
536610d565efSmrg 	 and that we are using.  Here we set up to do this by making a new
536710d565efSmrg 	 stack usage map from the old one.
536810d565efSmrg 
536910d565efSmrg 	 Another approach might be to try to reorder the argument
537010d565efSmrg 	 evaluations to avoid this conflicting stack usage.  */
537110d565efSmrg 
537210d565efSmrg       needed = args_size.constant;
537310d565efSmrg 
537410d565efSmrg       /* Since we will be writing into the entire argument area, the
537510d565efSmrg 	 map must be allocated for its entire size, not just the part that
537610d565efSmrg 	 is the responsibility of the caller.  */
537710d565efSmrg       if (! OUTGOING_REG_PARM_STACK_SPACE ((!fndecl ? fntype : TREE_TYPE (fndecl))))
537810d565efSmrg 	needed += reg_parm_stack_space;
537910d565efSmrg 
5380c7a68eb7Smrg       poly_int64 limit = needed;
538110d565efSmrg       if (ARGS_GROW_DOWNWARD)
5382c7a68eb7Smrg 	limit += 1;
5383c7a68eb7Smrg 
5384c7a68eb7Smrg       /* For polynomial sizes, this is the maximum possible size needed
5385c7a68eb7Smrg 	 for arguments with a constant size and offset.  */
5386c7a68eb7Smrg       HOST_WIDE_INT const_limit = constant_lower_bound (limit);
538710d565efSmrg       highest_outgoing_arg_in_use = MAX (initial_highest_arg_in_use,
5388c7a68eb7Smrg 					 const_limit);
538910d565efSmrg 
539010d565efSmrg       stack_usage_map_buf = XNEWVEC (char, highest_outgoing_arg_in_use);
539110d565efSmrg       stack_usage_map = stack_usage_map_buf;
539210d565efSmrg 
539310d565efSmrg       if (initial_highest_arg_in_use)
539410d565efSmrg 	memcpy (stack_usage_map, initial_stack_usage_map,
539510d565efSmrg 		initial_highest_arg_in_use);
539610d565efSmrg 
539710d565efSmrg       if (initial_highest_arg_in_use != highest_outgoing_arg_in_use)
539810d565efSmrg 	memset (&stack_usage_map[initial_highest_arg_in_use], 0,
539910d565efSmrg 	       highest_outgoing_arg_in_use - initial_highest_arg_in_use);
540010d565efSmrg       needed = 0;
540110d565efSmrg 
540210d565efSmrg       /* We must be careful to use virtual regs before they're instantiated,
540310d565efSmrg 	 and real regs afterwards.  Loop optimization, for example, can create
540410d565efSmrg 	 new libcalls after we've instantiated the virtual regs, and if we
540510d565efSmrg 	 use virtuals anyway, they won't match the rtl patterns.  */
540610d565efSmrg 
540710d565efSmrg       if (virtuals_instantiated)
540810d565efSmrg 	argblock = plus_constant (Pmode, stack_pointer_rtx,
540910d565efSmrg 				  STACK_POINTER_OFFSET);
541010d565efSmrg       else
541110d565efSmrg 	argblock = virtual_outgoing_args_rtx;
541210d565efSmrg     }
541310d565efSmrg   else
541410d565efSmrg     {
541510d565efSmrg       if (!PUSH_ARGS)
5416c7a68eb7Smrg 	argblock = push_block (gen_int_mode (args_size.constant, Pmode), 0, 0);
541710d565efSmrg     }
541810d565efSmrg 
541910d565efSmrg   /* We push args individually in reverse order, perform stack alignment
542010d565efSmrg      before the first push (the last arg).  */
542110d565efSmrg   if (argblock == 0)
5422c7a68eb7Smrg     anti_adjust_stack (gen_int_mode (args_size.constant
5423c7a68eb7Smrg 				     - original_args_size.constant,
5424c7a68eb7Smrg 				     Pmode));
542510d565efSmrg 
542610d565efSmrg   argnum = nargs - 1;
542710d565efSmrg 
542810d565efSmrg #ifdef REG_PARM_STACK_SPACE
542910d565efSmrg   if (ACCUMULATE_OUTGOING_ARGS)
543010d565efSmrg     {
543110d565efSmrg       /* The argument list is the property of the called routine and it
543210d565efSmrg 	 may clobber it.  If the fixed area has been used for previous
543310d565efSmrg 	 parameters, we must save and restore it.  */
543410d565efSmrg       save_area = save_fixed_argument_area (reg_parm_stack_space, argblock,
543510d565efSmrg 					    &low_to_save, &high_to_save);
543610d565efSmrg     }
543710d565efSmrg #endif
543810d565efSmrg 
543910d565efSmrg   /* When expanding a normal call, args are stored in push order,
544010d565efSmrg      which is the reverse of what we have here.  */
544110d565efSmrg   bool any_regs = false;
544210d565efSmrg   for (int i = nargs; i-- > 0; )
544310d565efSmrg     if (argvec[i].reg != NULL_RTX)
544410d565efSmrg       {
544510d565efSmrg 	targetm.calls.call_args (argvec[i].reg, NULL_TREE);
544610d565efSmrg 	any_regs = true;
544710d565efSmrg       }
544810d565efSmrg   if (!any_regs)
544910d565efSmrg     targetm.calls.call_args (pc_rtx, NULL_TREE);
545010d565efSmrg 
545110d565efSmrg   /* Push the args that need to be pushed.  */
545210d565efSmrg 
545310d565efSmrg   have_push_fusage = false;
545410d565efSmrg 
545510d565efSmrg   /* ARGNUM indexes the ARGVEC array in the order in which the arguments
545610d565efSmrg      are to be pushed.  */
545710d565efSmrg   for (count = 0; count < nargs; count++, argnum--)
545810d565efSmrg     {
545910d565efSmrg       machine_mode mode = argvec[argnum].mode;
546010d565efSmrg       rtx val = argvec[argnum].value;
546110d565efSmrg       rtx reg = argvec[argnum].reg;
546210d565efSmrg       int partial = argvec[argnum].partial;
546310d565efSmrg       unsigned int parm_align = argvec[argnum].locate.boundary;
5464c7a68eb7Smrg       poly_int64 lower_bound = 0, upper_bound = 0;
546510d565efSmrg 
546610d565efSmrg       if (! (reg != 0 && partial == 0))
546710d565efSmrg 	{
546810d565efSmrg 	  rtx use;
546910d565efSmrg 
547010d565efSmrg 	  if (ACCUMULATE_OUTGOING_ARGS)
547110d565efSmrg 	    {
547210d565efSmrg 	      /* If this is being stored into a pre-allocated, fixed-size,
547310d565efSmrg 		 stack area, save any previous data at that location.  */
547410d565efSmrg 
547510d565efSmrg 	      if (ARGS_GROW_DOWNWARD)
547610d565efSmrg 		{
547710d565efSmrg 		  /* stack_slot is negative, but we want to index stack_usage_map
547810d565efSmrg 		     with positive values.  */
547910d565efSmrg 		  upper_bound = -argvec[argnum].locate.slot_offset.constant + 1;
548010d565efSmrg 		  lower_bound = upper_bound - argvec[argnum].locate.size.constant;
548110d565efSmrg 		}
548210d565efSmrg 	      else
548310d565efSmrg 		{
548410d565efSmrg 		  lower_bound = argvec[argnum].locate.slot_offset.constant;
548510d565efSmrg 		  upper_bound = lower_bound + argvec[argnum].locate.size.constant;
548610d565efSmrg 		}
548710d565efSmrg 
5488c7a68eb7Smrg 	      if (stack_region_maybe_used_p (lower_bound, upper_bound,
5489c7a68eb7Smrg 					     reg_parm_stack_space))
549010d565efSmrg 		{
549110d565efSmrg 		  /* We need to make a save area.  */
5492c7a68eb7Smrg 		  poly_uint64 size
549310d565efSmrg 		    = argvec[argnum].locate.size.constant * BITS_PER_UNIT;
549410d565efSmrg 		  machine_mode save_mode
5495c7a68eb7Smrg 		    = int_mode_for_size (size, 1).else_blk ();
549610d565efSmrg 		  rtx adr
549710d565efSmrg 		    = plus_constant (Pmode, argblock,
549810d565efSmrg 				     argvec[argnum].locate.offset.constant);
549910d565efSmrg 		  rtx stack_area
550010d565efSmrg 		    = gen_rtx_MEM (save_mode, memory_address (save_mode, adr));
550110d565efSmrg 
550210d565efSmrg 		  if (save_mode == BLKmode)
550310d565efSmrg 		    {
550410d565efSmrg 		      argvec[argnum].save_area
550510d565efSmrg 			= assign_stack_temp (BLKmode,
550610d565efSmrg 					     argvec[argnum].locate.size.constant
550710d565efSmrg 					     );
550810d565efSmrg 
550910d565efSmrg 		      emit_block_move (validize_mem
551010d565efSmrg 				         (copy_rtx (argvec[argnum].save_area)),
551110d565efSmrg 				       stack_area,
5512c7a68eb7Smrg 				       (gen_int_mode
5513c7a68eb7Smrg 					(argvec[argnum].locate.size.constant,
5514c7a68eb7Smrg 					 Pmode)),
551510d565efSmrg 				       BLOCK_OP_CALL_PARM);
551610d565efSmrg 		    }
551710d565efSmrg 		  else
551810d565efSmrg 		    {
551910d565efSmrg 		      argvec[argnum].save_area = gen_reg_rtx (save_mode);
552010d565efSmrg 
552110d565efSmrg 		      emit_move_insn (argvec[argnum].save_area, stack_area);
552210d565efSmrg 		    }
552310d565efSmrg 		}
552410d565efSmrg 	    }
552510d565efSmrg 
552610d565efSmrg 	  emit_push_insn (val, mode, NULL_TREE, NULL_RTX, parm_align,
552710d565efSmrg 			  partial, reg, 0, argblock,
5528c7a68eb7Smrg 			  (gen_int_mode
5529c7a68eb7Smrg 			   (argvec[argnum].locate.offset.constant, Pmode)),
553010d565efSmrg 			  reg_parm_stack_space,
553110d565efSmrg 			  ARGS_SIZE_RTX (argvec[argnum].locate.alignment_pad), false);
553210d565efSmrg 
553310d565efSmrg 	  /* Now mark the segment we just used.  */
553410d565efSmrg 	  if (ACCUMULATE_OUTGOING_ARGS)
5535c7a68eb7Smrg 	    mark_stack_region_used (lower_bound, upper_bound);
553610d565efSmrg 
553710d565efSmrg 	  NO_DEFER_POP;
553810d565efSmrg 
553910d565efSmrg 	  /* Indicate argument access so that alias.c knows that these
554010d565efSmrg 	     values are live.  */
554110d565efSmrg 	  if (argblock)
554210d565efSmrg 	    use = plus_constant (Pmode, argblock,
554310d565efSmrg 				 argvec[argnum].locate.offset.constant);
554410d565efSmrg 	  else if (have_push_fusage)
554510d565efSmrg 	    continue;
554610d565efSmrg 	  else
554710d565efSmrg 	    {
554810d565efSmrg 	      /* When arguments are pushed, trying to tell alias.c where
554910d565efSmrg 		 exactly this argument is won't work, because the
555010d565efSmrg 		 auto-increment causes confusion.  So we merely indicate
555110d565efSmrg 		 that we access something with a known mode somewhere on
555210d565efSmrg 		 the stack.  */
555310d565efSmrg 	      use = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
555410d565efSmrg 				  gen_rtx_SCRATCH (Pmode));
555510d565efSmrg 	      have_push_fusage = true;
555610d565efSmrg 	    }
555710d565efSmrg 	  use = gen_rtx_MEM (argvec[argnum].mode, use);
555810d565efSmrg 	  use = gen_rtx_USE (VOIDmode, use);
555910d565efSmrg 	  call_fusage = gen_rtx_EXPR_LIST (VOIDmode, use, call_fusage);
556010d565efSmrg 	}
556110d565efSmrg     }
556210d565efSmrg 
556310d565efSmrg   argnum = nargs - 1;
556410d565efSmrg 
556510d565efSmrg   fun = prepare_call_address (NULL, fun, NULL, &call_fusage, 0, 0);
556610d565efSmrg 
556710d565efSmrg   /* Now load any reg parms into their regs.  */
556810d565efSmrg 
556910d565efSmrg   /* ARGNUM indexes the ARGVEC array in the order in which the arguments
557010d565efSmrg      are to be pushed.  */
557110d565efSmrg   for (count = 0; count < nargs; count++, argnum--)
557210d565efSmrg     {
557310d565efSmrg       machine_mode mode = argvec[argnum].mode;
557410d565efSmrg       rtx val = argvec[argnum].value;
557510d565efSmrg       rtx reg = argvec[argnum].reg;
557610d565efSmrg       int partial = argvec[argnum].partial;
557710d565efSmrg 
557810d565efSmrg       /* Handle calls that pass values in multiple non-contiguous
557910d565efSmrg 	 locations.  The PA64 has examples of this for library calls.  */
558010d565efSmrg       if (reg != 0 && GET_CODE (reg) == PARALLEL)
558110d565efSmrg 	emit_group_load (reg, val, NULL_TREE, GET_MODE_SIZE (mode));
558210d565efSmrg       else if (reg != 0 && partial == 0)
558310d565efSmrg         {
558410d565efSmrg 	  emit_move_insn (reg, val);
558510d565efSmrg #ifdef BLOCK_REG_PADDING
5586c7a68eb7Smrg 	  poly_int64 size = GET_MODE_SIZE (argvec[argnum].mode);
558710d565efSmrg 
558810d565efSmrg 	  /* Copied from load_register_parameters.  */
558910d565efSmrg 
559010d565efSmrg 	  /* Handle case where we have a value that needs shifting
559110d565efSmrg 	     up to the msb.  eg. a QImode value and we're padding
559210d565efSmrg 	     upward on a BYTES_BIG_ENDIAN machine.  */
5593c7a68eb7Smrg 	  if (known_lt (size, UNITS_PER_WORD)
559410d565efSmrg 	      && (argvec[argnum].locate.where_pad
5595c7a68eb7Smrg 		  == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
559610d565efSmrg 	    {
559710d565efSmrg 	      rtx x;
5598c7a68eb7Smrg 	      poly_int64 shift = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
559910d565efSmrg 
560010d565efSmrg 	      /* Assigning REG here rather than a temp makes CALL_FUSAGE
560110d565efSmrg 		 report the whole reg as used.  Strictly speaking, the
560210d565efSmrg 		 call only uses SIZE bytes at the msb end, but it doesn't
560310d565efSmrg 		 seem worth generating rtl to say that.  */
560410d565efSmrg 	      reg = gen_rtx_REG (word_mode, REGNO (reg));
560510d565efSmrg 	      x = expand_shift (LSHIFT_EXPR, word_mode, reg, shift, reg, 1);
560610d565efSmrg 	      if (x != reg)
560710d565efSmrg 		emit_move_insn (reg, x);
560810d565efSmrg 	    }
560910d565efSmrg #endif
561010d565efSmrg 	}
561110d565efSmrg 
561210d565efSmrg       NO_DEFER_POP;
561310d565efSmrg     }
561410d565efSmrg 
561510d565efSmrg   /* Any regs containing parms remain in use through the call.  */
561610d565efSmrg   for (count = 0; count < nargs; count++)
561710d565efSmrg     {
561810d565efSmrg       rtx reg = argvec[count].reg;
561910d565efSmrg       if (reg != 0 && GET_CODE (reg) == PARALLEL)
562010d565efSmrg 	use_group_regs (&call_fusage, reg);
562110d565efSmrg       else if (reg != 0)
562210d565efSmrg         {
562310d565efSmrg 	  int partial = argvec[count].partial;
562410d565efSmrg 	  if (partial)
562510d565efSmrg 	    {
562610d565efSmrg 	      int nregs;
562710d565efSmrg               gcc_assert (partial % UNITS_PER_WORD == 0);
562810d565efSmrg 	      nregs = partial / UNITS_PER_WORD;
562910d565efSmrg 	      use_regs (&call_fusage, REGNO (reg), nregs);
563010d565efSmrg 	    }
563110d565efSmrg 	  else
563210d565efSmrg 	    use_reg (&call_fusage, reg);
563310d565efSmrg 	}
563410d565efSmrg     }
563510d565efSmrg 
563610d565efSmrg   /* Pass the function the address in which to return a structure value.  */
563710d565efSmrg   if (mem_value != 0 && struct_value != 0 && ! pcc_struct_value)
563810d565efSmrg     {
563910d565efSmrg       emit_move_insn (struct_value,
564010d565efSmrg 		      force_reg (Pmode,
564110d565efSmrg 				 force_operand (XEXP (mem_value, 0),
564210d565efSmrg 						NULL_RTX)));
564310d565efSmrg       if (REG_P (struct_value))
564410d565efSmrg 	use_reg (&call_fusage, struct_value);
564510d565efSmrg     }
564610d565efSmrg 
564710d565efSmrg   /* Don't allow popping to be deferred, since then
564810d565efSmrg      cse'ing of library calls could delete a call and leave the pop.  */
564910d565efSmrg   NO_DEFER_POP;
565010d565efSmrg   valreg = (mem_value == 0 && outmode != VOIDmode
565110d565efSmrg 	    ? hard_libcall_value (outmode, orgfun) : NULL_RTX);
565210d565efSmrg 
565310d565efSmrg   /* Stack must be properly aligned now.  */
5654c7a68eb7Smrg   gcc_assert (multiple_p (stack_pointer_delta,
5655c7a68eb7Smrg 			  PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
565610d565efSmrg 
565710d565efSmrg   before_call = get_last_insn ();
565810d565efSmrg 
5659*ec02198aSmrg   if (flag_callgraph_info)
5660*ec02198aSmrg     record_final_call (SYMBOL_REF_DECL (orgfun), UNKNOWN_LOCATION);
5661*ec02198aSmrg 
566210d565efSmrg   /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
566310d565efSmrg      will set inhibit_defer_pop to that value.  */
566410d565efSmrg   /* The return type is needed to decide how many bytes the function pops.
566510d565efSmrg      Signedness plays no role in that, so for simplicity, we pretend it's
566610d565efSmrg      always signed.  We also assume that the list of arguments passed has
566710d565efSmrg      no impact, so we pretend it is unknown.  */
566810d565efSmrg 
566910d565efSmrg   emit_call_1 (fun, NULL,
567010d565efSmrg 	       get_identifier (XSTR (orgfun, 0)),
567110d565efSmrg 	       build_function_type (tfom, NULL_TREE),
567210d565efSmrg 	       original_args_size.constant, args_size.constant,
567310d565efSmrg 	       struct_value_size,
567410d565efSmrg 	       targetm.calls.function_arg (args_so_far,
5675*ec02198aSmrg 					   function_arg_info::end_marker ()),
567610d565efSmrg 	       valreg,
567710d565efSmrg 	       old_inhibit_defer_pop + 1, call_fusage, flags, args_so_far);
567810d565efSmrg 
567910d565efSmrg   if (flag_ipa_ra)
568010d565efSmrg     {
568110d565efSmrg       rtx datum = orgfun;
568210d565efSmrg       gcc_assert (GET_CODE (datum) == SYMBOL_REF);
568310d565efSmrg       rtx_call_insn *last = last_call_insn ();
568410d565efSmrg       add_reg_note (last, REG_CALL_DECL, datum);
568510d565efSmrg     }
568610d565efSmrg 
568710d565efSmrg   /* Right-shift returned value if necessary.  */
568810d565efSmrg   if (!pcc_struct_value
568910d565efSmrg       && TYPE_MODE (tfom) != BLKmode
569010d565efSmrg       && targetm.calls.return_in_msb (tfom))
569110d565efSmrg     {
569210d565efSmrg       shift_return_value (TYPE_MODE (tfom), false, valreg);
569310d565efSmrg       valreg = gen_rtx_REG (TYPE_MODE (tfom), REGNO (valreg));
569410d565efSmrg     }
569510d565efSmrg 
569610d565efSmrg   targetm.calls.end_call_args ();
569710d565efSmrg 
569810d565efSmrg   /* For calls to `setjmp', etc., inform function.c:setjmp_warnings
569910d565efSmrg      that it should complain if nonvolatile values are live.  For
570010d565efSmrg      functions that cannot return, inform flow that control does not
570110d565efSmrg      fall through.  */
570210d565efSmrg   if (flags & ECF_NORETURN)
570310d565efSmrg     {
570410d565efSmrg       /* The barrier note must be emitted
570510d565efSmrg 	 immediately after the CALL_INSN.  Some ports emit more than
570610d565efSmrg 	 just a CALL_INSN above, so we must search for it here.  */
570710d565efSmrg       rtx_insn *last = get_last_insn ();
570810d565efSmrg       while (!CALL_P (last))
570910d565efSmrg 	{
571010d565efSmrg 	  last = PREV_INSN (last);
571110d565efSmrg 	  /* There was no CALL_INSN?  */
571210d565efSmrg 	  gcc_assert (last != before_call);
571310d565efSmrg 	}
571410d565efSmrg 
571510d565efSmrg       emit_barrier_after (last);
571610d565efSmrg     }
571710d565efSmrg 
571810d565efSmrg   /* Consider that "regular" libcalls, i.e. all of them except for LCT_THROW
571910d565efSmrg      and LCT_RETURNS_TWICE, cannot perform non-local gotos.  */
572010d565efSmrg   if (flags & ECF_NOTHROW)
572110d565efSmrg     {
572210d565efSmrg       rtx_insn *last = get_last_insn ();
572310d565efSmrg       while (!CALL_P (last))
572410d565efSmrg 	{
572510d565efSmrg 	  last = PREV_INSN (last);
572610d565efSmrg 	  /* There was no CALL_INSN?  */
572710d565efSmrg 	  gcc_assert (last != before_call);
572810d565efSmrg 	}
572910d565efSmrg 
573010d565efSmrg       make_reg_eh_region_note_nothrow_nononlocal (last);
573110d565efSmrg     }
573210d565efSmrg 
573310d565efSmrg   /* Now restore inhibit_defer_pop to its actual original value.  */
573410d565efSmrg   OK_DEFER_POP;
573510d565efSmrg 
573610d565efSmrg   pop_temp_slots ();
573710d565efSmrg 
573810d565efSmrg   /* Copy the value to the right place.  */
573910d565efSmrg   if (outmode != VOIDmode && retval)
574010d565efSmrg     {
574110d565efSmrg       if (mem_value)
574210d565efSmrg 	{
574310d565efSmrg 	  if (value == 0)
574410d565efSmrg 	    value = mem_value;
574510d565efSmrg 	  if (value != mem_value)
574610d565efSmrg 	    emit_move_insn (value, mem_value);
574710d565efSmrg 	}
574810d565efSmrg       else if (GET_CODE (valreg) == PARALLEL)
574910d565efSmrg 	{
575010d565efSmrg 	  if (value == 0)
575110d565efSmrg 	    value = gen_reg_rtx (outmode);
575210d565efSmrg 	  emit_group_store (value, valreg, NULL_TREE, GET_MODE_SIZE (outmode));
575310d565efSmrg 	}
575410d565efSmrg       else
575510d565efSmrg 	{
575610d565efSmrg 	  /* Convert to the proper mode if a promotion has been active.  */
575710d565efSmrg 	  if (GET_MODE (valreg) != outmode)
575810d565efSmrg 	    {
575910d565efSmrg 	      int unsignedp = TYPE_UNSIGNED (tfom);
576010d565efSmrg 
576110d565efSmrg 	      gcc_assert (promote_function_mode (tfom, outmode, &unsignedp,
576210d565efSmrg 						 fndecl ? TREE_TYPE (fndecl) : fntype, 1)
576310d565efSmrg 			  == GET_MODE (valreg));
576410d565efSmrg 	      valreg = convert_modes (outmode, GET_MODE (valreg), valreg, 0);
576510d565efSmrg 	    }
576610d565efSmrg 
576710d565efSmrg 	  if (value != 0)
576810d565efSmrg 	    emit_move_insn (value, valreg);
576910d565efSmrg 	  else
577010d565efSmrg 	    value = valreg;
577110d565efSmrg 	}
577210d565efSmrg     }
577310d565efSmrg 
577410d565efSmrg   if (ACCUMULATE_OUTGOING_ARGS)
577510d565efSmrg     {
577610d565efSmrg #ifdef REG_PARM_STACK_SPACE
577710d565efSmrg       if (save_area)
577810d565efSmrg 	restore_fixed_argument_area (save_area, argblock,
577910d565efSmrg 				     high_to_save, low_to_save);
578010d565efSmrg #endif
578110d565efSmrg 
578210d565efSmrg       /* If we saved any argument areas, restore them.  */
578310d565efSmrg       for (count = 0; count < nargs; count++)
578410d565efSmrg 	if (argvec[count].save_area)
578510d565efSmrg 	  {
578610d565efSmrg 	    machine_mode save_mode = GET_MODE (argvec[count].save_area);
578710d565efSmrg 	    rtx adr = plus_constant (Pmode, argblock,
578810d565efSmrg 				     argvec[count].locate.offset.constant);
578910d565efSmrg 	    rtx stack_area = gen_rtx_MEM (save_mode,
579010d565efSmrg 					  memory_address (save_mode, adr));
579110d565efSmrg 
579210d565efSmrg 	    if (save_mode == BLKmode)
579310d565efSmrg 	      emit_block_move (stack_area,
579410d565efSmrg 			       validize_mem
579510d565efSmrg 			         (copy_rtx (argvec[count].save_area)),
5796c7a68eb7Smrg 			       (gen_int_mode
5797c7a68eb7Smrg 				(argvec[count].locate.size.constant, Pmode)),
579810d565efSmrg 			       BLOCK_OP_CALL_PARM);
579910d565efSmrg 	    else
580010d565efSmrg 	      emit_move_insn (stack_area, argvec[count].save_area);
580110d565efSmrg 	  }
580210d565efSmrg 
580310d565efSmrg       highest_outgoing_arg_in_use = initial_highest_arg_in_use;
580410d565efSmrg       stack_usage_map = initial_stack_usage_map;
5805c7a68eb7Smrg       stack_usage_watermark = initial_stack_usage_watermark;
580610d565efSmrg     }
580710d565efSmrg 
580810d565efSmrg   free (stack_usage_map_buf);
580910d565efSmrg 
581010d565efSmrg   return value;
581110d565efSmrg 
581210d565efSmrg }
581310d565efSmrg 
581410d565efSmrg 
581510d565efSmrg /* Store a single argument for a function call
581610d565efSmrg    into the register or memory area where it must be passed.
581710d565efSmrg    *ARG describes the argument value and where to pass it.
581810d565efSmrg 
581910d565efSmrg    ARGBLOCK is the address of the stack-block for all the arguments,
582010d565efSmrg    or 0 on a machine where arguments are pushed individually.
582110d565efSmrg 
582210d565efSmrg    MAY_BE_ALLOCA nonzero says this could be a call to `alloca'
582310d565efSmrg    so must be careful about how the stack is used.
582410d565efSmrg 
582510d565efSmrg    VARIABLE_SIZE nonzero says that this was a variable-sized outgoing
582610d565efSmrg    argument stack.  This is used if ACCUMULATE_OUTGOING_ARGS to indicate
582710d565efSmrg    that we need not worry about saving and restoring the stack.
582810d565efSmrg 
582910d565efSmrg    FNDECL is the declaration of the function we are calling.
583010d565efSmrg 
583110d565efSmrg    Return nonzero if this arg should cause sibcall failure,
583210d565efSmrg    zero otherwise.  */
583310d565efSmrg 
583410d565efSmrg static int
store_one_arg(struct arg_data * arg,rtx argblock,int flags,int variable_size ATTRIBUTE_UNUSED,int reg_parm_stack_space)583510d565efSmrg store_one_arg (struct arg_data *arg, rtx argblock, int flags,
583610d565efSmrg 	       int variable_size ATTRIBUTE_UNUSED, int reg_parm_stack_space)
583710d565efSmrg {
583810d565efSmrg   tree pval = arg->tree_value;
583910d565efSmrg   rtx reg = 0;
584010d565efSmrg   int partial = 0;
5841c7a68eb7Smrg   poly_int64 used = 0;
5842c7a68eb7Smrg   poly_int64 lower_bound = 0, upper_bound = 0;
584310d565efSmrg   int sibcall_failure = 0;
584410d565efSmrg 
584510d565efSmrg   if (TREE_CODE (pval) == ERROR_MARK)
584610d565efSmrg     return 1;
584710d565efSmrg 
584810d565efSmrg   /* Push a new temporary level for any temporaries we make for
584910d565efSmrg      this argument.  */
585010d565efSmrg   push_temp_slots ();
585110d565efSmrg 
585210d565efSmrg   if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL))
585310d565efSmrg     {
585410d565efSmrg       /* If this is being stored into a pre-allocated, fixed-size, stack area,
585510d565efSmrg 	 save any previous data at that location.  */
585610d565efSmrg       if (argblock && ! variable_size && arg->stack)
585710d565efSmrg 	{
585810d565efSmrg 	  if (ARGS_GROW_DOWNWARD)
585910d565efSmrg 	    {
586010d565efSmrg 	      /* stack_slot is negative, but we want to index stack_usage_map
586110d565efSmrg 		 with positive values.  */
586210d565efSmrg 	      if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5863c7a68eb7Smrg 		{
5864c7a68eb7Smrg 		  rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5865c7a68eb7Smrg 		  upper_bound = -rtx_to_poly_int64 (offset) + 1;
5866c7a68eb7Smrg 		}
586710d565efSmrg 	      else
586810d565efSmrg 		upper_bound = 0;
586910d565efSmrg 
587010d565efSmrg 	      lower_bound = upper_bound - arg->locate.size.constant;
587110d565efSmrg 	    }
587210d565efSmrg 	  else
587310d565efSmrg 	    {
587410d565efSmrg 	      if (GET_CODE (XEXP (arg->stack_slot, 0)) == PLUS)
5875c7a68eb7Smrg 		{
5876c7a68eb7Smrg 		  rtx offset = XEXP (XEXP (arg->stack_slot, 0), 1);
5877c7a68eb7Smrg 		  lower_bound = rtx_to_poly_int64 (offset);
5878c7a68eb7Smrg 		}
587910d565efSmrg 	      else
588010d565efSmrg 		lower_bound = 0;
588110d565efSmrg 
588210d565efSmrg 	      upper_bound = lower_bound + arg->locate.size.constant;
588310d565efSmrg 	    }
588410d565efSmrg 
5885c7a68eb7Smrg 	  if (stack_region_maybe_used_p (lower_bound, upper_bound,
5886c7a68eb7Smrg 					 reg_parm_stack_space))
588710d565efSmrg 	    {
588810d565efSmrg 	      /* We need to make a save area.  */
5889c7a68eb7Smrg 	      poly_uint64 size = arg->locate.size.constant * BITS_PER_UNIT;
5890c7a68eb7Smrg 	      machine_mode save_mode
5891c7a68eb7Smrg 		= int_mode_for_size (size, 1).else_blk ();
589210d565efSmrg 	      rtx adr = memory_address (save_mode, XEXP (arg->stack_slot, 0));
589310d565efSmrg 	      rtx stack_area = gen_rtx_MEM (save_mode, adr);
589410d565efSmrg 
589510d565efSmrg 	      if (save_mode == BLKmode)
589610d565efSmrg 		{
589710d565efSmrg 		  arg->save_area
589810d565efSmrg 		    = assign_temp (TREE_TYPE (arg->tree_value), 1, 1);
589910d565efSmrg 		  preserve_temp_slots (arg->save_area);
590010d565efSmrg 		  emit_block_move (validize_mem (copy_rtx (arg->save_area)),
590110d565efSmrg 				   stack_area,
5902c7a68eb7Smrg 				   (gen_int_mode
5903c7a68eb7Smrg 				    (arg->locate.size.constant, Pmode)),
590410d565efSmrg 				   BLOCK_OP_CALL_PARM);
590510d565efSmrg 		}
590610d565efSmrg 	      else
590710d565efSmrg 		{
590810d565efSmrg 		  arg->save_area = gen_reg_rtx (save_mode);
590910d565efSmrg 		  emit_move_insn (arg->save_area, stack_area);
591010d565efSmrg 		}
591110d565efSmrg 	    }
591210d565efSmrg 	}
591310d565efSmrg     }
591410d565efSmrg 
591510d565efSmrg   /* If this isn't going to be placed on both the stack and in registers,
591610d565efSmrg      set up the register and number of words.  */
591710d565efSmrg   if (! arg->pass_on_stack)
591810d565efSmrg     {
591910d565efSmrg       if (flags & ECF_SIBCALL)
592010d565efSmrg 	reg = arg->tail_call_reg;
592110d565efSmrg       else
592210d565efSmrg 	reg = arg->reg;
592310d565efSmrg       partial = arg->partial;
592410d565efSmrg     }
592510d565efSmrg 
592610d565efSmrg   /* Being passed entirely in a register.  We shouldn't be called in
592710d565efSmrg      this case.  */
592810d565efSmrg   gcc_assert (reg == 0 || partial != 0);
592910d565efSmrg 
593010d565efSmrg   /* If this arg needs special alignment, don't load the registers
593110d565efSmrg      here.  */
593210d565efSmrg   if (arg->n_aligned_regs != 0)
593310d565efSmrg     reg = 0;
593410d565efSmrg 
593510d565efSmrg   /* If this is being passed partially in a register, we can't evaluate
593610d565efSmrg      it directly into its stack slot.  Otherwise, we can.  */
593710d565efSmrg   if (arg->value == 0)
593810d565efSmrg     {
593910d565efSmrg       /* stack_arg_under_construction is nonzero if a function argument is
594010d565efSmrg 	 being evaluated directly into the outgoing argument list and
594110d565efSmrg 	 expand_call must take special action to preserve the argument list
594210d565efSmrg 	 if it is called recursively.
594310d565efSmrg 
594410d565efSmrg 	 For scalar function arguments stack_usage_map is sufficient to
594510d565efSmrg 	 determine which stack slots must be saved and restored.  Scalar
594610d565efSmrg 	 arguments in general have pass_on_stack == 0.
594710d565efSmrg 
594810d565efSmrg 	 If this argument is initialized by a function which takes the
594910d565efSmrg 	 address of the argument (a C++ constructor or a C function
595010d565efSmrg 	 returning a BLKmode structure), then stack_usage_map is
595110d565efSmrg 	 insufficient and expand_call must push the stack around the
595210d565efSmrg 	 function call.  Such arguments have pass_on_stack == 1.
595310d565efSmrg 
595410d565efSmrg 	 Note that it is always safe to set stack_arg_under_construction,
595510d565efSmrg 	 but this generates suboptimal code if set when not needed.  */
595610d565efSmrg 
595710d565efSmrg       if (arg->pass_on_stack)
595810d565efSmrg 	stack_arg_under_construction++;
595910d565efSmrg 
596010d565efSmrg       arg->value = expand_expr (pval,
596110d565efSmrg 				(partial
596210d565efSmrg 				 || TYPE_MODE (TREE_TYPE (pval)) != arg->mode)
596310d565efSmrg 				? NULL_RTX : arg->stack,
596410d565efSmrg 				VOIDmode, EXPAND_STACK_PARM);
596510d565efSmrg 
596610d565efSmrg       /* If we are promoting object (or for any other reason) the mode
596710d565efSmrg 	 doesn't agree, convert the mode.  */
596810d565efSmrg 
596910d565efSmrg       if (arg->mode != TYPE_MODE (TREE_TYPE (pval)))
597010d565efSmrg 	arg->value = convert_modes (arg->mode, TYPE_MODE (TREE_TYPE (pval)),
597110d565efSmrg 				    arg->value, arg->unsignedp);
597210d565efSmrg 
597310d565efSmrg       if (arg->pass_on_stack)
597410d565efSmrg 	stack_arg_under_construction--;
597510d565efSmrg     }
597610d565efSmrg 
597710d565efSmrg   /* Check for overlap with already clobbered argument area.  */
597810d565efSmrg   if ((flags & ECF_SIBCALL)
597910d565efSmrg       && MEM_P (arg->value)
5980c7a68eb7Smrg       && mem_might_overlap_already_clobbered_arg_p (XEXP (arg->value, 0),
598110d565efSmrg 						    arg->locate.size.constant))
598210d565efSmrg     sibcall_failure = 1;
598310d565efSmrg 
598410d565efSmrg   /* Don't allow anything left on stack from computation
598510d565efSmrg      of argument to alloca.  */
598610d565efSmrg   if (flags & ECF_MAY_BE_ALLOCA)
598710d565efSmrg     do_pending_stack_adjust ();
598810d565efSmrg 
598910d565efSmrg   if (arg->value == arg->stack)
599010d565efSmrg     /* If the value is already in the stack slot, we are done.  */
599110d565efSmrg     ;
599210d565efSmrg   else if (arg->mode != BLKmode)
599310d565efSmrg     {
599410d565efSmrg       unsigned int parm_align;
599510d565efSmrg 
599610d565efSmrg       /* Argument is a scalar, not entirely passed in registers.
599710d565efSmrg 	 (If part is passed in registers, arg->partial says how much
599810d565efSmrg 	 and emit_push_insn will take care of putting it there.)
599910d565efSmrg 
600010d565efSmrg 	 Push it, and if its size is less than the
600110d565efSmrg 	 amount of space allocated to it,
600210d565efSmrg 	 also bump stack pointer by the additional space.
600310d565efSmrg 	 Note that in C the default argument promotions
600410d565efSmrg 	 will prevent such mismatches.  */
600510d565efSmrg 
6006c7a68eb7Smrg       poly_int64 size = (TYPE_EMPTY_P (TREE_TYPE (pval))
6007c7a68eb7Smrg 			 ? 0 : GET_MODE_SIZE (arg->mode));
6008c7a68eb7Smrg 
600910d565efSmrg       /* Compute how much space the push instruction will push.
601010d565efSmrg 	 On many machines, pushing a byte will advance the stack
601110d565efSmrg 	 pointer by a halfword.  */
601210d565efSmrg #ifdef PUSH_ROUNDING
601310d565efSmrg       size = PUSH_ROUNDING (size);
601410d565efSmrg #endif
601510d565efSmrg       used = size;
601610d565efSmrg 
601710d565efSmrg       /* Compute how much space the argument should get:
601810d565efSmrg 	 round up to a multiple of the alignment for arguments.  */
6019c7a68eb7Smrg       if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6020c7a68eb7Smrg 	  != PAD_NONE)
6021c7a68eb7Smrg 	/* At the moment we don't (need to) support ABIs for which the
6022c7a68eb7Smrg 	   padding isn't known at compile time.  In principle it should
6023c7a68eb7Smrg 	   be easy to add though.  */
6024c7a68eb7Smrg 	used = force_align_up (size, PARM_BOUNDARY / BITS_PER_UNIT);
602510d565efSmrg 
602610d565efSmrg       /* Compute the alignment of the pushed argument.  */
602710d565efSmrg       parm_align = arg->locate.boundary;
6028c7a68eb7Smrg       if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6029c7a68eb7Smrg 	  == PAD_DOWNWARD)
603010d565efSmrg 	{
6031c7a68eb7Smrg 	  poly_int64 pad = used - size;
6032c7a68eb7Smrg 	  unsigned int pad_align = known_alignment (pad) * BITS_PER_UNIT;
6033c7a68eb7Smrg 	  if (pad_align != 0)
603410d565efSmrg 	    parm_align = MIN (parm_align, pad_align);
603510d565efSmrg 	}
603610d565efSmrg 
603710d565efSmrg       /* This isn't already where we want it on the stack, so put it there.
603810d565efSmrg 	 This can either be done with push or copy insns.  */
6039c7a68eb7Smrg       if (maybe_ne (used, 0)
6040c7a68eb7Smrg 	  && !emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval),
6041c7a68eb7Smrg 			      NULL_RTX, parm_align, partial, reg, used - size,
6042c7a68eb7Smrg 			      argblock, ARGS_SIZE_RTX (arg->locate.offset),
6043c7a68eb7Smrg 			      reg_parm_stack_space,
604410d565efSmrg 			      ARGS_SIZE_RTX (arg->locate.alignment_pad), true))
604510d565efSmrg 	sibcall_failure = 1;
604610d565efSmrg 
604710d565efSmrg       /* Unless this is a partially-in-register argument, the argument is now
604810d565efSmrg 	 in the stack.  */
604910d565efSmrg       if (partial == 0)
605010d565efSmrg 	arg->value = arg->stack;
605110d565efSmrg     }
605210d565efSmrg   else
605310d565efSmrg     {
605410d565efSmrg       /* BLKmode, at least partly to be pushed.  */
605510d565efSmrg 
605610d565efSmrg       unsigned int parm_align;
6057c7a68eb7Smrg       poly_int64 excess;
605810d565efSmrg       rtx size_rtx;
605910d565efSmrg 
606010d565efSmrg       /* Pushing a nonscalar.
606110d565efSmrg 	 If part is passed in registers, PARTIAL says how much
606210d565efSmrg 	 and emit_push_insn will take care of putting it there.  */
606310d565efSmrg 
606410d565efSmrg       /* Round its size up to a multiple
606510d565efSmrg 	 of the allocation unit for arguments.  */
606610d565efSmrg 
606710d565efSmrg       if (arg->locate.size.var != 0)
606810d565efSmrg 	{
606910d565efSmrg 	  excess = 0;
607010d565efSmrg 	  size_rtx = ARGS_SIZE_RTX (arg->locate.size);
607110d565efSmrg 	}
607210d565efSmrg       else
607310d565efSmrg 	{
607410d565efSmrg 	  /* PUSH_ROUNDING has no effect on us, because emit_push_insn
607510d565efSmrg 	     for BLKmode is careful to avoid it.  */
607610d565efSmrg 	  excess = (arg->locate.size.constant
6077c7a68eb7Smrg 		    - arg_int_size_in_bytes (TREE_TYPE (pval))
607810d565efSmrg 		    + partial);
6079c7a68eb7Smrg 	  size_rtx = expand_expr (arg_size_in_bytes (TREE_TYPE (pval)),
608010d565efSmrg 				  NULL_RTX, TYPE_MODE (sizetype),
608110d565efSmrg 				  EXPAND_NORMAL);
608210d565efSmrg 	}
608310d565efSmrg 
608410d565efSmrg       parm_align = arg->locate.boundary;
608510d565efSmrg 
608610d565efSmrg       /* When an argument is padded down, the block is aligned to
608710d565efSmrg 	 PARM_BOUNDARY, but the actual argument isn't.  */
6088c7a68eb7Smrg       if (targetm.calls.function_arg_padding (arg->mode, TREE_TYPE (pval))
6089c7a68eb7Smrg 	  == PAD_DOWNWARD)
609010d565efSmrg 	{
609110d565efSmrg 	  if (arg->locate.size.var)
609210d565efSmrg 	    parm_align = BITS_PER_UNIT;
6093c7a68eb7Smrg 	  else
609410d565efSmrg 	    {
6095c7a68eb7Smrg 	      unsigned int excess_align
6096c7a68eb7Smrg 		= known_alignment (excess) * BITS_PER_UNIT;
6097c7a68eb7Smrg 	      if (excess_align != 0)
609810d565efSmrg 		parm_align = MIN (parm_align, excess_align);
609910d565efSmrg 	    }
610010d565efSmrg 	}
610110d565efSmrg 
610210d565efSmrg       if ((flags & ECF_SIBCALL) && MEM_P (arg->value))
610310d565efSmrg 	{
610410d565efSmrg 	  /* emit_push_insn might not work properly if arg->value and
610510d565efSmrg 	     argblock + arg->locate.offset areas overlap.  */
610610d565efSmrg 	  rtx x = arg->value;
6107c7a68eb7Smrg 	  poly_int64 i = 0;
610810d565efSmrg 
61090fc04c29Smrg 	  if (strip_offset (XEXP (x, 0), &i)
61100fc04c29Smrg 	      == crtl->args.internal_arg_pointer)
611110d565efSmrg 	    {
611210d565efSmrg 	      /* arg.locate doesn't contain the pretend_args_size offset,
611310d565efSmrg 		 it's part of argblock.  Ensure we don't count it in I.  */
611410d565efSmrg 	      if (STACK_GROWS_DOWNWARD)
611510d565efSmrg 		i -= crtl->args.pretend_args_size;
611610d565efSmrg 	      else
611710d565efSmrg 		i += crtl->args.pretend_args_size;
611810d565efSmrg 
611910d565efSmrg 	      /* expand_call should ensure this.  */
612010d565efSmrg 	      gcc_assert (!arg->locate.offset.var
6121c7a68eb7Smrg 			  && arg->locate.size.var == 0);
6122c7a68eb7Smrg 	      poly_int64 size_val = rtx_to_poly_int64 (size_rtx);
612310d565efSmrg 
6124c7a68eb7Smrg 	      if (known_eq (arg->locate.offset.constant, i))
612510d565efSmrg 		{
612610d565efSmrg 		  /* Even though they appear to be at the same location,
612710d565efSmrg 		     if part of the outgoing argument is in registers,
612810d565efSmrg 		     they aren't really at the same location.  Check for
612910d565efSmrg 		     this by making sure that the incoming size is the
613010d565efSmrg 		     same as the outgoing size.  */
6131c7a68eb7Smrg 		  if (maybe_ne (arg->locate.size.constant, size_val))
6132c7a68eb7Smrg 		    sibcall_failure = 1;
6133c7a68eb7Smrg 		}
6134c7a68eb7Smrg 	      else if (maybe_in_range_p (arg->locate.offset.constant,
6135c7a68eb7Smrg 					 i, size_val))
6136c7a68eb7Smrg 		sibcall_failure = 1;
6137c7a68eb7Smrg 	      /* Use arg->locate.size.constant instead of size_rtx
6138c7a68eb7Smrg 		 because we only care about the part of the argument
6139c7a68eb7Smrg 		 on the stack.  */
6140c7a68eb7Smrg 	      else if (maybe_in_range_p (i, arg->locate.offset.constant,
6141c7a68eb7Smrg 					 arg->locate.size.constant))
614210d565efSmrg 		sibcall_failure = 1;
614310d565efSmrg 	    }
614410d565efSmrg 	}
614510d565efSmrg 
6146c7a68eb7Smrg       if (!CONST_INT_P (size_rtx) || INTVAL (size_rtx) != 0)
614710d565efSmrg 	emit_push_insn (arg->value, arg->mode, TREE_TYPE (pval), size_rtx,
614810d565efSmrg 			parm_align, partial, reg, excess, argblock,
6149c7a68eb7Smrg 			ARGS_SIZE_RTX (arg->locate.offset),
6150c7a68eb7Smrg 			reg_parm_stack_space,
615110d565efSmrg 			ARGS_SIZE_RTX (arg->locate.alignment_pad), false);
615210d565efSmrg 
615310d565efSmrg       /* Unless this is a partially-in-register argument, the argument is now
615410d565efSmrg 	 in the stack.
615510d565efSmrg 
615610d565efSmrg 	 ??? Unlike the case above, in which we want the actual
615710d565efSmrg 	 address of the data, so that we can load it directly into a
615810d565efSmrg 	 register, here we want the address of the stack slot, so that
615910d565efSmrg 	 it's properly aligned for word-by-word copying or something
616010d565efSmrg 	 like that.  It's not clear that this is always correct.  */
616110d565efSmrg       if (partial == 0)
616210d565efSmrg 	arg->value = arg->stack_slot;
616310d565efSmrg     }
616410d565efSmrg 
616510d565efSmrg   if (arg->reg && GET_CODE (arg->reg) == PARALLEL)
616610d565efSmrg     {
616710d565efSmrg       tree type = TREE_TYPE (arg->tree_value);
616810d565efSmrg       arg->parallel_value
616910d565efSmrg 	= emit_group_load_into_temps (arg->reg, arg->value, type,
617010d565efSmrg 				      int_size_in_bytes (type));
617110d565efSmrg     }
617210d565efSmrg 
617310d565efSmrg   /* Mark all slots this store used.  */
617410d565efSmrg   if (ACCUMULATE_OUTGOING_ARGS && !(flags & ECF_SIBCALL)
617510d565efSmrg       && argblock && ! variable_size && arg->stack)
6176c7a68eb7Smrg     mark_stack_region_used (lower_bound, upper_bound);
617710d565efSmrg 
617810d565efSmrg   /* Once we have pushed something, pops can't safely
617910d565efSmrg      be deferred during the rest of the arguments.  */
618010d565efSmrg   NO_DEFER_POP;
618110d565efSmrg 
618210d565efSmrg   /* Free any temporary slots made in processing this argument.  */
618310d565efSmrg   pop_temp_slots ();
618410d565efSmrg 
618510d565efSmrg   return sibcall_failure;
618610d565efSmrg }
618710d565efSmrg 
6188*ec02198aSmrg /* Nonzero if we do not know how to pass ARG solely in registers.  */
618910d565efSmrg 
619010d565efSmrg bool
must_pass_in_stack_var_size(const function_arg_info & arg)6191*ec02198aSmrg must_pass_in_stack_var_size (const function_arg_info &arg)
619210d565efSmrg {
6193*ec02198aSmrg   if (!arg.type)
619410d565efSmrg     return false;
619510d565efSmrg 
619610d565efSmrg   /* If the type has variable size...  */
6197*ec02198aSmrg   if (!poly_int_tree_p (TYPE_SIZE (arg.type)))
619810d565efSmrg     return true;
619910d565efSmrg 
620010d565efSmrg   /* If the type is marked as addressable (it is required
620110d565efSmrg      to be constructed into the stack)...  */
6202*ec02198aSmrg   if (TREE_ADDRESSABLE (arg.type))
620310d565efSmrg     return true;
620410d565efSmrg 
620510d565efSmrg   return false;
620610d565efSmrg }
620710d565efSmrg 
620810d565efSmrg /* Another version of the TARGET_MUST_PASS_IN_STACK hook.  This one
620910d565efSmrg    takes trailing padding of a structure into account.  */
621010d565efSmrg /* ??? Should be able to merge these two by examining BLOCK_REG_PADDING.  */
621110d565efSmrg 
621210d565efSmrg bool
must_pass_in_stack_var_size_or_pad(const function_arg_info & arg)6213*ec02198aSmrg must_pass_in_stack_var_size_or_pad (const function_arg_info &arg)
621410d565efSmrg {
6215*ec02198aSmrg   if (!arg.type)
621610d565efSmrg     return false;
621710d565efSmrg 
621810d565efSmrg   /* If the type has variable size...  */
6219*ec02198aSmrg   if (TREE_CODE (TYPE_SIZE (arg.type)) != INTEGER_CST)
622010d565efSmrg     return true;
622110d565efSmrg 
622210d565efSmrg   /* If the type is marked as addressable (it is required
622310d565efSmrg      to be constructed into the stack)...  */
6224*ec02198aSmrg   if (TREE_ADDRESSABLE (arg.type))
622510d565efSmrg     return true;
622610d565efSmrg 
6227*ec02198aSmrg   if (TYPE_EMPTY_P (arg.type))
6228c7a68eb7Smrg     return false;
6229c7a68eb7Smrg 
623010d565efSmrg   /* If the padding and mode of the type is such that a copy into
623110d565efSmrg      a register would put it into the wrong part of the register.  */
6232*ec02198aSmrg   if (arg.mode == BLKmode
6233*ec02198aSmrg       && int_size_in_bytes (arg.type) % (PARM_BOUNDARY / BITS_PER_UNIT)
6234*ec02198aSmrg       && (targetm.calls.function_arg_padding (arg.mode, arg.type)
6235c7a68eb7Smrg 	  == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
623610d565efSmrg     return true;
623710d565efSmrg 
623810d565efSmrg   return false;
623910d565efSmrg }
624010d565efSmrg 
6241*ec02198aSmrg /* Return true if TYPE must be passed on the stack when passed to
6242*ec02198aSmrg    the "..." arguments of a function.  */
6243*ec02198aSmrg 
6244*ec02198aSmrg bool
must_pass_va_arg_in_stack(tree type)6245*ec02198aSmrg must_pass_va_arg_in_stack (tree type)
6246*ec02198aSmrg {
6247*ec02198aSmrg   function_arg_info arg (type, /*named=*/false);
6248*ec02198aSmrg   return targetm.calls.must_pass_in_stack (arg);
6249*ec02198aSmrg }
6250*ec02198aSmrg 
6251*ec02198aSmrg /* Return true if FIELD is the C++17 empty base field that should
6252*ec02198aSmrg    be ignored for ABI calling convention decisions in order to
6253*ec02198aSmrg    maintain ABI compatibility between C++14 and earlier, which doesn't
6254*ec02198aSmrg    add this FIELD to classes with empty bases, and C++17 and later
6255*ec02198aSmrg    which does.  */
6256*ec02198aSmrg 
6257*ec02198aSmrg bool
cxx17_empty_base_field_p(const_tree field)6258*ec02198aSmrg cxx17_empty_base_field_p (const_tree field)
6259*ec02198aSmrg {
6260*ec02198aSmrg   return (DECL_FIELD_ABI_IGNORED (field)
6261*ec02198aSmrg 	  && DECL_ARTIFICIAL (field)
6262*ec02198aSmrg 	  && RECORD_OR_UNION_TYPE_P (TREE_TYPE (field))
6263*ec02198aSmrg 	  && !lookup_attribute ("no_unique_address", DECL_ATTRIBUTES (field)));
6264*ec02198aSmrg }
6265*ec02198aSmrg 
626610d565efSmrg /* Tell the garbage collector about GTY markers in this source file.  */
626710d565efSmrg #include "gt-calls.h"
6268