xref: /dragonfly/contrib/gcc-8.0/gcc/function.c (revision 58e805e6)
138fd1498Szrj /* Expands front end tree to back end RTL for GCC.
238fd1498Szrj    Copyright (C) 1987-2018 Free Software Foundation, Inc.
338fd1498Szrj 
438fd1498Szrj This file is part of GCC.
538fd1498Szrj 
638fd1498Szrj GCC is free software; you can redistribute it and/or modify it under
738fd1498Szrj the terms of the GNU General Public License as published by the Free
838fd1498Szrj Software Foundation; either version 3, or (at your option) any later
938fd1498Szrj version.
1038fd1498Szrj 
1138fd1498Szrj GCC is distributed in the hope that it will be useful, but WITHOUT ANY
1238fd1498Szrj WARRANTY; without even the implied warranty of MERCHANTABILITY or
1338fd1498Szrj FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1438fd1498Szrj for more details.
1538fd1498Szrj 
1638fd1498Szrj You should have received a copy of the GNU General Public License
1738fd1498Szrj along with GCC; see the file COPYING3.  If not see
1838fd1498Szrj <http://www.gnu.org/licenses/>.  */
1938fd1498Szrj 
2038fd1498Szrj /* This file handles the generation of rtl code from tree structure
2138fd1498Szrj    at the level of the function as a whole.
2238fd1498Szrj    It creates the rtl expressions for parameters and auto variables
2338fd1498Szrj    and has full responsibility for allocating stack slots.
2438fd1498Szrj 
2538fd1498Szrj    `expand_function_start' is called at the beginning of a function,
2638fd1498Szrj    before the function body is parsed, and `expand_function_end' is
2738fd1498Szrj    called after parsing the body.
2838fd1498Szrj 
2938fd1498Szrj    Call `assign_stack_local' to allocate a stack slot for a local variable.
3038fd1498Szrj    This is usually done during the RTL generation for the function body,
3138fd1498Szrj    but it can also be done in the reload pass when a pseudo-register does
3238fd1498Szrj    not get a hard register.  */
3338fd1498Szrj 
3438fd1498Szrj #include "config.h"
3538fd1498Szrj #include "system.h"
3638fd1498Szrj #include "coretypes.h"
3738fd1498Szrj #include "backend.h"
3838fd1498Szrj #include "target.h"
3938fd1498Szrj #include "rtl.h"
4038fd1498Szrj #include "tree.h"
4138fd1498Szrj #include "gimple-expr.h"
4238fd1498Szrj #include "cfghooks.h"
4338fd1498Szrj #include "df.h"
4438fd1498Szrj #include "memmodel.h"
4538fd1498Szrj #include "tm_p.h"
4638fd1498Szrj #include "stringpool.h"
4738fd1498Szrj #include "expmed.h"
4838fd1498Szrj #include "optabs.h"
4938fd1498Szrj #include "regs.h"
5038fd1498Szrj #include "emit-rtl.h"
5138fd1498Szrj #include "recog.h"
5238fd1498Szrj #include "rtl-error.h"
5338fd1498Szrj #include "alias.h"
5438fd1498Szrj #include "fold-const.h"
5538fd1498Szrj #include "stor-layout.h"
5638fd1498Szrj #include "varasm.h"
5738fd1498Szrj #include "except.h"
5838fd1498Szrj #include "dojump.h"
5938fd1498Szrj #include "explow.h"
6038fd1498Szrj #include "calls.h"
6138fd1498Szrj #include "expr.h"
6238fd1498Szrj #include "optabs-tree.h"
6338fd1498Szrj #include "output.h"
6438fd1498Szrj #include "langhooks.h"
6538fd1498Szrj #include "common/common-target.h"
6638fd1498Szrj #include "gimplify.h"
6738fd1498Szrj #include "tree-pass.h"
6838fd1498Szrj #include "cfgrtl.h"
6938fd1498Szrj #include "cfganal.h"
7038fd1498Szrj #include "cfgbuild.h"
7138fd1498Szrj #include "cfgcleanup.h"
7238fd1498Szrj #include "cfgexpand.h"
7338fd1498Szrj #include "shrink-wrap.h"
7438fd1498Szrj #include "toplev.h"
7538fd1498Szrj #include "rtl-iter.h"
7638fd1498Szrj #include "tree-chkp.h"
7738fd1498Szrj #include "rtl-chkp.h"
7838fd1498Szrj #include "tree-dfa.h"
7938fd1498Szrj #include "tree-ssa.h"
8038fd1498Szrj #include "stringpool.h"
8138fd1498Szrj #include "attribs.h"
8238fd1498Szrj #include "gimple.h"
8338fd1498Szrj #include "options.h"
8438fd1498Szrj 
8538fd1498Szrj /* So we can assign to cfun in this file.  */
8638fd1498Szrj #undef cfun
8738fd1498Szrj 
8838fd1498Szrj #ifndef STACK_ALIGNMENT_NEEDED
8938fd1498Szrj #define STACK_ALIGNMENT_NEEDED 1
9038fd1498Szrj #endif
9138fd1498Szrj 
9238fd1498Szrj #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
9338fd1498Szrj 
9438fd1498Szrj /* Round a value to the lowest integer less than it that is a multiple of
9538fd1498Szrj    the required alignment.  Avoid using division in case the value is
9638fd1498Szrj    negative.  Assume the alignment is a power of two.  */
9738fd1498Szrj #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
9838fd1498Szrj 
9938fd1498Szrj /* Similar, but round to the next highest integer that meets the
10038fd1498Szrj    alignment.  */
10138fd1498Szrj #define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
10238fd1498Szrj 
10338fd1498Szrj /* Nonzero once virtual register instantiation has been done.
10438fd1498Szrj    assign_stack_local uses frame_pointer_rtx when this is nonzero.
10538fd1498Szrj    calls.c:emit_library_call_value_1 uses it to set up
10638fd1498Szrj    post-instantiation libcalls.  */
10738fd1498Szrj int virtuals_instantiated;
10838fd1498Szrj 
10938fd1498Szrj /* Assign unique numbers to labels generated for profiling, debugging, etc.  */
11038fd1498Szrj static GTY(()) int funcdef_no;
11138fd1498Szrj 
11238fd1498Szrj /* These variables hold pointers to functions to create and destroy
11338fd1498Szrj    target specific, per-function data structures.  */
11438fd1498Szrj struct machine_function * (*init_machine_status) (void);
11538fd1498Szrj 
11638fd1498Szrj /* The currently compiled function.  */
11738fd1498Szrj struct function *cfun = 0;
11838fd1498Szrj 
11938fd1498Szrj /* These hashes record the prologue and epilogue insns.  */
12038fd1498Szrj 
12138fd1498Szrj struct insn_cache_hasher : ggc_cache_ptr_hash<rtx_def>
12238fd1498Szrj {
hashinsn_cache_hasher12338fd1498Szrj   static hashval_t hash (rtx x) { return htab_hash_pointer (x); }
equalinsn_cache_hasher12438fd1498Szrj   static bool equal (rtx a, rtx b) { return a == b; }
12538fd1498Szrj };
12638fd1498Szrj 
12738fd1498Szrj static GTY((cache))
12838fd1498Szrj   hash_table<insn_cache_hasher> *prologue_insn_hash;
12938fd1498Szrj static GTY((cache))
13038fd1498Szrj   hash_table<insn_cache_hasher> *epilogue_insn_hash;
13138fd1498Szrj 
13238fd1498Szrj 
13338fd1498Szrj hash_table<used_type_hasher> *types_used_by_vars_hash = NULL;
13438fd1498Szrj vec<tree, va_gc> *types_used_by_cur_var_decl;
13538fd1498Szrj 
13638fd1498Szrj /* Forward declarations.  */
13738fd1498Szrj 
13838fd1498Szrj static struct temp_slot *find_temp_slot_from_address (rtx);
13938fd1498Szrj static void pad_to_arg_alignment (struct args_size *, int, struct args_size *);
14038fd1498Szrj static void pad_below (struct args_size *, machine_mode, tree);
14138fd1498Szrj static void reorder_blocks_1 (rtx_insn *, tree, vec<tree> *);
14238fd1498Szrj static int all_blocks (tree, tree *);
14338fd1498Szrj static tree *get_block_vector (tree, int *);
14438fd1498Szrj extern tree debug_find_var_in_block_tree (tree, tree);
14538fd1498Szrj /* We always define `record_insns' even if it's not used so that we
14638fd1498Szrj    can always export `prologue_epilogue_contains'.  */
14738fd1498Szrj static void record_insns (rtx_insn *, rtx, hash_table<insn_cache_hasher> **)
14838fd1498Szrj      ATTRIBUTE_UNUSED;
14938fd1498Szrj static bool contains (const rtx_insn *, hash_table<insn_cache_hasher> *);
15038fd1498Szrj static void prepare_function_start (void);
15138fd1498Szrj static void do_clobber_return_reg (rtx, void *);
15238fd1498Szrj static void do_use_return_reg (rtx, void *);
15338fd1498Szrj 
15438fd1498Szrj 
15538fd1498Szrj /* Stack of nested functions.  */
15638fd1498Szrj /* Keep track of the cfun stack.  */
15738fd1498Szrj 
15838fd1498Szrj static vec<function *> function_context_stack;
15938fd1498Szrj 
16038fd1498Szrj /* Save the current context for compilation of a nested function.
16138fd1498Szrj    This is called from language-specific code.  */
16238fd1498Szrj 
16338fd1498Szrj void
push_function_context(void)16438fd1498Szrj push_function_context (void)
16538fd1498Szrj {
16638fd1498Szrj   if (cfun == 0)
16738fd1498Szrj     allocate_struct_function (NULL, false);
16838fd1498Szrj 
16938fd1498Szrj   function_context_stack.safe_push (cfun);
17038fd1498Szrj   set_cfun (NULL);
17138fd1498Szrj }
17238fd1498Szrj 
17338fd1498Szrj /* Restore the last saved context, at the end of a nested function.
17438fd1498Szrj    This function is called from language-specific code.  */
17538fd1498Szrj 
17638fd1498Szrj void
pop_function_context(void)17738fd1498Szrj pop_function_context (void)
17838fd1498Szrj {
17938fd1498Szrj   struct function *p = function_context_stack.pop ();
18038fd1498Szrj   set_cfun (p);
18138fd1498Szrj   current_function_decl = p->decl;
18238fd1498Szrj 
18338fd1498Szrj   /* Reset variables that have known state during rtx generation.  */
18438fd1498Szrj   virtuals_instantiated = 0;
18538fd1498Szrj   generating_concat_p = 1;
18638fd1498Szrj }
18738fd1498Szrj 
18838fd1498Szrj /* Clear out all parts of the state in F that can safely be discarded
18938fd1498Szrj    after the function has been parsed, but not compiled, to let
19038fd1498Szrj    garbage collection reclaim the memory.  */
19138fd1498Szrj 
19238fd1498Szrj void
free_after_parsing(struct function * f)19338fd1498Szrj free_after_parsing (struct function *f)
19438fd1498Szrj {
19538fd1498Szrj   f->language = 0;
19638fd1498Szrj }
19738fd1498Szrj 
19838fd1498Szrj /* Clear out all parts of the state in F that can safely be discarded
19938fd1498Szrj    after the function has been compiled, to let garbage collection
20038fd1498Szrj    reclaim the memory.  */
20138fd1498Szrj 
20238fd1498Szrj void
free_after_compilation(struct function * f)20338fd1498Szrj free_after_compilation (struct function *f)
20438fd1498Szrj {
20538fd1498Szrj   prologue_insn_hash = NULL;
20638fd1498Szrj   epilogue_insn_hash = NULL;
20738fd1498Szrj 
20838fd1498Szrj   free (crtl->emit.regno_pointer_align);
20938fd1498Szrj 
21038fd1498Szrj   memset (crtl, 0, sizeof (struct rtl_data));
21138fd1498Szrj   f->eh = NULL;
21238fd1498Szrj   f->machine = NULL;
21338fd1498Szrj   f->cfg = NULL;
21438fd1498Szrj   f->curr_properties &= ~PROP_cfg;
21538fd1498Szrj 
21638fd1498Szrj   regno_reg_rtx = NULL;
21738fd1498Szrj }
21838fd1498Szrj 
21938fd1498Szrj /* Return size needed for stack frame based on slots so far allocated.
22038fd1498Szrj    This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
22138fd1498Szrj    the caller may have to do that.  */
22238fd1498Szrj 
22338fd1498Szrj poly_int64
get_frame_size(void)22438fd1498Szrj get_frame_size (void)
22538fd1498Szrj {
22638fd1498Szrj   if (FRAME_GROWS_DOWNWARD)
22738fd1498Szrj     return -frame_offset;
22838fd1498Szrj   else
22938fd1498Szrj     return frame_offset;
23038fd1498Szrj }
23138fd1498Szrj 
23238fd1498Szrj /* Issue an error message and return TRUE if frame OFFSET overflows in
23338fd1498Szrj    the signed target pointer arithmetics for function FUNC.  Otherwise
23438fd1498Szrj    return FALSE.  */
23538fd1498Szrj 
23638fd1498Szrj bool
frame_offset_overflow(poly_int64 offset,tree func)23738fd1498Szrj frame_offset_overflow (poly_int64 offset, tree func)
23838fd1498Szrj {
23938fd1498Szrj   poly_uint64 size = FRAME_GROWS_DOWNWARD ? -offset : offset;
24038fd1498Szrj   unsigned HOST_WIDE_INT limit
24138fd1498Szrj     = ((HOST_WIDE_INT_1U << (GET_MODE_BITSIZE (Pmode) - 1))
24238fd1498Szrj        /* Leave room for the fixed part of the frame.  */
24338fd1498Szrj        - 64 * UNITS_PER_WORD);
24438fd1498Szrj 
24538fd1498Szrj   if (!coeffs_in_range_p (size, 0U, limit))
24638fd1498Szrj     {
24738fd1498Szrj       error_at (DECL_SOURCE_LOCATION (func),
24838fd1498Szrj 		"total size of local objects too large");
24938fd1498Szrj       return true;
25038fd1498Szrj     }
25138fd1498Szrj 
25238fd1498Szrj   return false;
25338fd1498Szrj }
25438fd1498Szrj 
25538fd1498Szrj /* Return the minimum spill slot alignment for a register of mode MODE.  */
25638fd1498Szrj 
25738fd1498Szrj unsigned int
spill_slot_alignment(machine_mode mode ATTRIBUTE_UNUSED)25838fd1498Szrj spill_slot_alignment (machine_mode mode ATTRIBUTE_UNUSED)
25938fd1498Szrj {
26038fd1498Szrj   return STACK_SLOT_ALIGNMENT (NULL_TREE, mode, GET_MODE_ALIGNMENT (mode));
26138fd1498Szrj }
26238fd1498Szrj 
26338fd1498Szrj /* Return stack slot alignment in bits for TYPE and MODE.  */
26438fd1498Szrj 
26538fd1498Szrj static unsigned int
get_stack_local_alignment(tree type,machine_mode mode)26638fd1498Szrj get_stack_local_alignment (tree type, machine_mode mode)
26738fd1498Szrj {
26838fd1498Szrj   unsigned int alignment;
26938fd1498Szrj 
27038fd1498Szrj   if (mode == BLKmode)
27138fd1498Szrj     alignment = BIGGEST_ALIGNMENT;
27238fd1498Szrj   else
27338fd1498Szrj     alignment = GET_MODE_ALIGNMENT (mode);
27438fd1498Szrj 
27538fd1498Szrj   /* Allow the frond-end to (possibly) increase the alignment of this
27638fd1498Szrj      stack slot.  */
27738fd1498Szrj   if (! type)
27838fd1498Szrj     type = lang_hooks.types.type_for_mode (mode, 0);
27938fd1498Szrj 
28038fd1498Szrj   return STACK_SLOT_ALIGNMENT (type, mode, alignment);
28138fd1498Szrj }
28238fd1498Szrj 
28338fd1498Szrj /* Determine whether it is possible to fit a stack slot of size SIZE and
28438fd1498Szrj    alignment ALIGNMENT into an area in the stack frame that starts at
28538fd1498Szrj    frame offset START and has a length of LENGTH.  If so, store the frame
28638fd1498Szrj    offset to be used for the stack slot in *POFFSET and return true;
28738fd1498Szrj    return false otherwise.  This function will extend the frame size when
28838fd1498Szrj    given a start/length pair that lies at the end of the frame.  */
28938fd1498Szrj 
29038fd1498Szrj static bool
try_fit_stack_local(poly_int64 start,poly_int64 length,poly_int64 size,unsigned int alignment,poly_int64_pod * poffset)29138fd1498Szrj try_fit_stack_local (poly_int64 start, poly_int64 length,
29238fd1498Szrj 		     poly_int64 size, unsigned int alignment,
29338fd1498Szrj 		     poly_int64_pod *poffset)
29438fd1498Szrj {
29538fd1498Szrj   poly_int64 this_frame_offset;
29638fd1498Szrj   int frame_off, frame_alignment, frame_phase;
29738fd1498Szrj 
29838fd1498Szrj   /* Calculate how many bytes the start of local variables is off from
29938fd1498Szrj      stack alignment.  */
30038fd1498Szrj   frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
30138fd1498Szrj   frame_off = targetm.starting_frame_offset () % frame_alignment;
30238fd1498Szrj   frame_phase = frame_off ? frame_alignment - frame_off : 0;
30338fd1498Szrj 
30438fd1498Szrj   /* Round the frame offset to the specified alignment.  */
30538fd1498Szrj 
30638fd1498Szrj   if (FRAME_GROWS_DOWNWARD)
30738fd1498Szrj     this_frame_offset
30838fd1498Szrj       = (aligned_lower_bound (start + length - size - frame_phase, alignment)
30938fd1498Szrj 	 + frame_phase);
31038fd1498Szrj   else
31138fd1498Szrj     this_frame_offset
31238fd1498Szrj       = aligned_upper_bound (start - frame_phase, alignment) + frame_phase;
31338fd1498Szrj 
31438fd1498Szrj   /* See if it fits.  If this space is at the edge of the frame,
31538fd1498Szrj      consider extending the frame to make it fit.  Our caller relies on
31638fd1498Szrj      this when allocating a new slot.  */
31738fd1498Szrj   if (maybe_lt (this_frame_offset, start))
31838fd1498Szrj     {
31938fd1498Szrj       if (known_eq (frame_offset, start))
32038fd1498Szrj 	frame_offset = this_frame_offset;
32138fd1498Szrj       else
32238fd1498Szrj 	return false;
32338fd1498Szrj     }
32438fd1498Szrj   else if (maybe_gt (this_frame_offset + size, start + length))
32538fd1498Szrj     {
32638fd1498Szrj       if (known_eq (frame_offset, start + length))
32738fd1498Szrj 	frame_offset = this_frame_offset + size;
32838fd1498Szrj       else
32938fd1498Szrj 	return false;
33038fd1498Szrj     }
33138fd1498Szrj 
33238fd1498Szrj   *poffset = this_frame_offset;
33338fd1498Szrj   return true;
33438fd1498Szrj }
33538fd1498Szrj 
33638fd1498Szrj /* Create a new frame_space structure describing free space in the stack
33738fd1498Szrj    frame beginning at START and ending at END, and chain it into the
33838fd1498Szrj    function's frame_space_list.  */
33938fd1498Szrj 
34038fd1498Szrj static void
add_frame_space(poly_int64 start,poly_int64 end)34138fd1498Szrj add_frame_space (poly_int64 start, poly_int64 end)
34238fd1498Szrj {
34338fd1498Szrj   struct frame_space *space = ggc_alloc<frame_space> ();
34438fd1498Szrj   space->next = crtl->frame_space_list;
34538fd1498Szrj   crtl->frame_space_list = space;
34638fd1498Szrj   space->start = start;
34738fd1498Szrj   space->length = end - start;
34838fd1498Szrj }
34938fd1498Szrj 
35038fd1498Szrj /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
35138fd1498Szrj    with machine mode MODE.
35238fd1498Szrj 
35338fd1498Szrj    ALIGN controls the amount of alignment for the address of the slot:
35438fd1498Szrj    0 means according to MODE,
35538fd1498Szrj    -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
35638fd1498Szrj    -2 means use BITS_PER_UNIT,
35738fd1498Szrj    positive specifies alignment boundary in bits.
35838fd1498Szrj 
35938fd1498Szrj    KIND has ASLK_REDUCE_ALIGN bit set if it is OK to reduce
36038fd1498Szrj    alignment and ASLK_RECORD_PAD bit set if we should remember
36138fd1498Szrj    extra space we allocated for alignment purposes.  When we are
36238fd1498Szrj    called from assign_stack_temp_for_type, it is not set so we don't
36338fd1498Szrj    track the same stack slot in two independent lists.
36438fd1498Szrj 
36538fd1498Szrj    We do not round to stack_boundary here.  */
36638fd1498Szrj 
36738fd1498Szrj rtx
assign_stack_local_1(machine_mode mode,poly_int64 size,int align,int kind)36838fd1498Szrj assign_stack_local_1 (machine_mode mode, poly_int64 size,
36938fd1498Szrj 		      int align, int kind)
37038fd1498Szrj {
37138fd1498Szrj   rtx x, addr;
37238fd1498Szrj   poly_int64 bigend_correction = 0;
37338fd1498Szrj   poly_int64 slot_offset = 0, old_frame_offset;
37438fd1498Szrj   unsigned int alignment, alignment_in_bits;
37538fd1498Szrj 
37638fd1498Szrj   if (align == 0)
37738fd1498Szrj     {
37838fd1498Szrj       alignment = get_stack_local_alignment (NULL, mode);
37938fd1498Szrj       alignment /= BITS_PER_UNIT;
38038fd1498Szrj     }
38138fd1498Szrj   else if (align == -1)
38238fd1498Szrj     {
38338fd1498Szrj       alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
38438fd1498Szrj       size = aligned_upper_bound (size, alignment);
38538fd1498Szrj     }
38638fd1498Szrj   else if (align == -2)
38738fd1498Szrj     alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
38838fd1498Szrj   else
38938fd1498Szrj     alignment = align / BITS_PER_UNIT;
39038fd1498Szrj 
39138fd1498Szrj   alignment_in_bits = alignment * BITS_PER_UNIT;
39238fd1498Szrj 
39338fd1498Szrj   /* Ignore alignment if it exceeds MAX_SUPPORTED_STACK_ALIGNMENT.  */
39438fd1498Szrj   if (alignment_in_bits > MAX_SUPPORTED_STACK_ALIGNMENT)
39538fd1498Szrj     {
39638fd1498Szrj       alignment_in_bits = MAX_SUPPORTED_STACK_ALIGNMENT;
39738fd1498Szrj       alignment = alignment_in_bits / BITS_PER_UNIT;
39838fd1498Szrj     }
39938fd1498Szrj 
40038fd1498Szrj   if (SUPPORTS_STACK_ALIGNMENT)
40138fd1498Szrj     {
40238fd1498Szrj       if (crtl->stack_alignment_estimated < alignment_in_bits)
40338fd1498Szrj 	{
40438fd1498Szrj           if (!crtl->stack_realign_processed)
40538fd1498Szrj 	    crtl->stack_alignment_estimated = alignment_in_bits;
40638fd1498Szrj           else
40738fd1498Szrj 	    {
40838fd1498Szrj 	      /* If stack is realigned and stack alignment value
40938fd1498Szrj 		 hasn't been finalized, it is OK not to increase
41038fd1498Szrj 		 stack_alignment_estimated.  The bigger alignment
41138fd1498Szrj 		 requirement is recorded in stack_alignment_needed
41238fd1498Szrj 		 below.  */
41338fd1498Szrj 	      gcc_assert (!crtl->stack_realign_finalized);
41438fd1498Szrj 	      if (!crtl->stack_realign_needed)
41538fd1498Szrj 		{
41638fd1498Szrj 		  /* It is OK to reduce the alignment as long as the
41738fd1498Szrj 		     requested size is 0 or the estimated stack
41838fd1498Szrj 		     alignment >= mode alignment.  */
41938fd1498Szrj 		  gcc_assert ((kind & ASLK_REDUCE_ALIGN)
42038fd1498Szrj 			      || known_eq (size, 0)
42138fd1498Szrj 			      || (crtl->stack_alignment_estimated
42238fd1498Szrj 				  >= GET_MODE_ALIGNMENT (mode)));
42338fd1498Szrj 		  alignment_in_bits = crtl->stack_alignment_estimated;
42438fd1498Szrj 		  alignment = alignment_in_bits / BITS_PER_UNIT;
42538fd1498Szrj 		}
42638fd1498Szrj 	    }
42738fd1498Szrj 	}
42838fd1498Szrj     }
42938fd1498Szrj 
43038fd1498Szrj   if (crtl->stack_alignment_needed < alignment_in_bits)
43138fd1498Szrj     crtl->stack_alignment_needed = alignment_in_bits;
43238fd1498Szrj   if (crtl->max_used_stack_slot_alignment < alignment_in_bits)
43338fd1498Szrj     crtl->max_used_stack_slot_alignment = alignment_in_bits;
43438fd1498Szrj 
43538fd1498Szrj   if (mode != BLKmode || maybe_ne (size, 0))
43638fd1498Szrj     {
43738fd1498Szrj       if (kind & ASLK_RECORD_PAD)
43838fd1498Szrj 	{
43938fd1498Szrj 	  struct frame_space **psp;
44038fd1498Szrj 
44138fd1498Szrj 	  for (psp = &crtl->frame_space_list; *psp; psp = &(*psp)->next)
44238fd1498Szrj 	    {
44338fd1498Szrj 	      struct frame_space *space = *psp;
44438fd1498Szrj 	      if (!try_fit_stack_local (space->start, space->length, size,
44538fd1498Szrj 					alignment, &slot_offset))
44638fd1498Szrj 		continue;
44738fd1498Szrj 	      *psp = space->next;
44838fd1498Szrj 	      if (known_gt (slot_offset, space->start))
44938fd1498Szrj 		add_frame_space (space->start, slot_offset);
45038fd1498Szrj 	      if (known_lt (slot_offset + size, space->start + space->length))
45138fd1498Szrj 		add_frame_space (slot_offset + size,
45238fd1498Szrj 				 space->start + space->length);
45338fd1498Szrj 	      goto found_space;
45438fd1498Szrj 	    }
45538fd1498Szrj 	}
45638fd1498Szrj     }
45738fd1498Szrj   else if (!STACK_ALIGNMENT_NEEDED)
45838fd1498Szrj     {
45938fd1498Szrj       slot_offset = frame_offset;
46038fd1498Szrj       goto found_space;
46138fd1498Szrj     }
46238fd1498Szrj 
46338fd1498Szrj   old_frame_offset = frame_offset;
46438fd1498Szrj 
46538fd1498Szrj   if (FRAME_GROWS_DOWNWARD)
46638fd1498Szrj     {
46738fd1498Szrj       frame_offset -= size;
46838fd1498Szrj       try_fit_stack_local (frame_offset, size, size, alignment, &slot_offset);
46938fd1498Szrj 
47038fd1498Szrj       if (kind & ASLK_RECORD_PAD)
47138fd1498Szrj 	{
47238fd1498Szrj 	  if (known_gt (slot_offset, frame_offset))
47338fd1498Szrj 	    add_frame_space (frame_offset, slot_offset);
47438fd1498Szrj 	  if (known_lt (slot_offset + size, old_frame_offset))
47538fd1498Szrj 	    add_frame_space (slot_offset + size, old_frame_offset);
47638fd1498Szrj 	}
47738fd1498Szrj     }
47838fd1498Szrj   else
47938fd1498Szrj     {
48038fd1498Szrj       frame_offset += size;
48138fd1498Szrj       try_fit_stack_local (old_frame_offset, size, size, alignment, &slot_offset);
48238fd1498Szrj 
48338fd1498Szrj       if (kind & ASLK_RECORD_PAD)
48438fd1498Szrj 	{
48538fd1498Szrj 	  if (known_gt (slot_offset, old_frame_offset))
48638fd1498Szrj 	    add_frame_space (old_frame_offset, slot_offset);
48738fd1498Szrj 	  if (known_lt (slot_offset + size, frame_offset))
48838fd1498Szrj 	    add_frame_space (slot_offset + size, frame_offset);
48938fd1498Szrj 	}
49038fd1498Szrj     }
49138fd1498Szrj 
49238fd1498Szrj  found_space:
49338fd1498Szrj   /* On a big-endian machine, if we are allocating more space than we will use,
49438fd1498Szrj      use the least significant bytes of those that are allocated.  */
49538fd1498Szrj   if (mode != BLKmode)
49638fd1498Szrj     {
49738fd1498Szrj       /* The slot size can sometimes be smaller than the mode size;
49838fd1498Szrj 	 e.g. the rs6000 port allocates slots with a vector mode
49938fd1498Szrj 	 that have the size of only one element.  However, the slot
50038fd1498Szrj 	 size must always be ordered wrt to the mode size, in the
50138fd1498Szrj 	 same way as for a subreg.  */
50238fd1498Szrj       gcc_checking_assert (ordered_p (GET_MODE_SIZE (mode), size));
50338fd1498Szrj       if (BYTES_BIG_ENDIAN && maybe_lt (GET_MODE_SIZE (mode), size))
50438fd1498Szrj 	bigend_correction = size - GET_MODE_SIZE (mode);
50538fd1498Szrj     }
50638fd1498Szrj 
50738fd1498Szrj   /* If we have already instantiated virtual registers, return the actual
50838fd1498Szrj      address relative to the frame pointer.  */
50938fd1498Szrj   if (virtuals_instantiated)
51038fd1498Szrj     addr = plus_constant (Pmode, frame_pointer_rtx,
51138fd1498Szrj 			  trunc_int_for_mode
51238fd1498Szrj 			  (slot_offset + bigend_correction
51338fd1498Szrj 			   + targetm.starting_frame_offset (), Pmode));
51438fd1498Szrj   else
51538fd1498Szrj     addr = plus_constant (Pmode, virtual_stack_vars_rtx,
51638fd1498Szrj 			  trunc_int_for_mode
51738fd1498Szrj 			  (slot_offset + bigend_correction,
51838fd1498Szrj 			   Pmode));
51938fd1498Szrj 
52038fd1498Szrj   x = gen_rtx_MEM (mode, addr);
52138fd1498Szrj   set_mem_align (x, alignment_in_bits);
52238fd1498Szrj   MEM_NOTRAP_P (x) = 1;
52338fd1498Szrj 
52438fd1498Szrj   vec_safe_push (stack_slot_list, x);
52538fd1498Szrj 
52638fd1498Szrj   if (frame_offset_overflow (frame_offset, current_function_decl))
52738fd1498Szrj     frame_offset = 0;
52838fd1498Szrj 
52938fd1498Szrj   return x;
53038fd1498Szrj }
53138fd1498Szrj 
53238fd1498Szrj /* Wrap up assign_stack_local_1 with last parameter as false.  */
53338fd1498Szrj 
53438fd1498Szrj rtx
assign_stack_local(machine_mode mode,poly_int64 size,int align)53538fd1498Szrj assign_stack_local (machine_mode mode, poly_int64 size, int align)
53638fd1498Szrj {
53738fd1498Szrj   return assign_stack_local_1 (mode, size, align, ASLK_RECORD_PAD);
53838fd1498Szrj }
53938fd1498Szrj 
54038fd1498Szrj /* In order to evaluate some expressions, such as function calls returning
54138fd1498Szrj    structures in memory, we need to temporarily allocate stack locations.
54238fd1498Szrj    We record each allocated temporary in the following structure.
54338fd1498Szrj 
54438fd1498Szrj    Associated with each temporary slot is a nesting level.  When we pop up
54538fd1498Szrj    one level, all temporaries associated with the previous level are freed.
54638fd1498Szrj    Normally, all temporaries are freed after the execution of the statement
54738fd1498Szrj    in which they were created.  However, if we are inside a ({...}) grouping,
54838fd1498Szrj    the result may be in a temporary and hence must be preserved.  If the
54938fd1498Szrj    result could be in a temporary, we preserve it if we can determine which
55038fd1498Szrj    one it is in.  If we cannot determine which temporary may contain the
55138fd1498Szrj    result, all temporaries are preserved.  A temporary is preserved by
55238fd1498Szrj    pretending it was allocated at the previous nesting level.  */
55338fd1498Szrj 
55438fd1498Szrj struct GTY(()) temp_slot {
55538fd1498Szrj   /* Points to next temporary slot.  */
55638fd1498Szrj   struct temp_slot *next;
55738fd1498Szrj   /* Points to previous temporary slot.  */
55838fd1498Szrj   struct temp_slot *prev;
55938fd1498Szrj   /* The rtx to used to reference the slot.  */
56038fd1498Szrj   rtx slot;
56138fd1498Szrj   /* The size, in units, of the slot.  */
56238fd1498Szrj   poly_int64 size;
56338fd1498Szrj   /* The type of the object in the slot, or zero if it doesn't correspond
56438fd1498Szrj      to a type.  We use this to determine whether a slot can be reused.
56538fd1498Szrj      It can be reused if objects of the type of the new slot will always
56638fd1498Szrj      conflict with objects of the type of the old slot.  */
56738fd1498Szrj   tree type;
56838fd1498Szrj   /* The alignment (in bits) of the slot.  */
56938fd1498Szrj   unsigned int align;
57038fd1498Szrj   /* Nonzero if this temporary is currently in use.  */
57138fd1498Szrj   char in_use;
57238fd1498Szrj   /* Nesting level at which this slot is being used.  */
57338fd1498Szrj   int level;
57438fd1498Szrj   /* The offset of the slot from the frame_pointer, including extra space
57538fd1498Szrj      for alignment.  This info is for combine_temp_slots.  */
57638fd1498Szrj   poly_int64 base_offset;
57738fd1498Szrj   /* The size of the slot, including extra space for alignment.  This
57838fd1498Szrj      info is for combine_temp_slots.  */
57938fd1498Szrj   poly_int64 full_size;
58038fd1498Szrj };
58138fd1498Szrj 
58238fd1498Szrj /* Entry for the below hash table.  */
58338fd1498Szrj struct GTY((for_user)) temp_slot_address_entry {
58438fd1498Szrj   hashval_t hash;
58538fd1498Szrj   rtx address;
58638fd1498Szrj   struct temp_slot *temp_slot;
58738fd1498Szrj };
58838fd1498Szrj 
58938fd1498Szrj struct temp_address_hasher : ggc_ptr_hash<temp_slot_address_entry>
59038fd1498Szrj {
59138fd1498Szrj   static hashval_t hash (temp_slot_address_entry *);
59238fd1498Szrj   static bool equal (temp_slot_address_entry *, temp_slot_address_entry *);
59338fd1498Szrj };
59438fd1498Szrj 
59538fd1498Szrj /* A table of addresses that represent a stack slot.  The table is a mapping
59638fd1498Szrj    from address RTXen to a temp slot.  */
59738fd1498Szrj static GTY(()) hash_table<temp_address_hasher> *temp_slot_address_table;
59838fd1498Szrj static size_t n_temp_slots_in_use;
59938fd1498Szrj 
60038fd1498Szrj /* Removes temporary slot TEMP from LIST.  */
60138fd1498Szrj 
60238fd1498Szrj static void
cut_slot_from_list(struct temp_slot * temp,struct temp_slot ** list)60338fd1498Szrj cut_slot_from_list (struct temp_slot *temp, struct temp_slot **list)
60438fd1498Szrj {
60538fd1498Szrj   if (temp->next)
60638fd1498Szrj     temp->next->prev = temp->prev;
60738fd1498Szrj   if (temp->prev)
60838fd1498Szrj     temp->prev->next = temp->next;
60938fd1498Szrj   else
61038fd1498Szrj     *list = temp->next;
61138fd1498Szrj 
61238fd1498Szrj   temp->prev = temp->next = NULL;
61338fd1498Szrj }
61438fd1498Szrj 
61538fd1498Szrj /* Inserts temporary slot TEMP to LIST.  */
61638fd1498Szrj 
61738fd1498Szrj static void
insert_slot_to_list(struct temp_slot * temp,struct temp_slot ** list)61838fd1498Szrj insert_slot_to_list (struct temp_slot *temp, struct temp_slot **list)
61938fd1498Szrj {
62038fd1498Szrj   temp->next = *list;
62138fd1498Szrj   if (*list)
62238fd1498Szrj     (*list)->prev = temp;
62338fd1498Szrj   temp->prev = NULL;
62438fd1498Szrj   *list = temp;
62538fd1498Szrj }
62638fd1498Szrj 
62738fd1498Szrj /* Returns the list of used temp slots at LEVEL.  */
62838fd1498Szrj 
62938fd1498Szrj static struct temp_slot **
temp_slots_at_level(int level)63038fd1498Szrj temp_slots_at_level (int level)
63138fd1498Szrj {
63238fd1498Szrj   if (level >= (int) vec_safe_length (used_temp_slots))
63338fd1498Szrj     vec_safe_grow_cleared (used_temp_slots, level + 1);
63438fd1498Szrj 
63538fd1498Szrj   return &(*used_temp_slots)[level];
63638fd1498Szrj }
63738fd1498Szrj 
63838fd1498Szrj /* Returns the maximal temporary slot level.  */
63938fd1498Szrj 
64038fd1498Szrj static int
max_slot_level(void)64138fd1498Szrj max_slot_level (void)
64238fd1498Szrj {
64338fd1498Szrj   if (!used_temp_slots)
64438fd1498Szrj     return -1;
64538fd1498Szrj 
64638fd1498Szrj   return used_temp_slots->length () - 1;
64738fd1498Szrj }
64838fd1498Szrj 
64938fd1498Szrj /* Moves temporary slot TEMP to LEVEL.  */
65038fd1498Szrj 
65138fd1498Szrj static void
move_slot_to_level(struct temp_slot * temp,int level)65238fd1498Szrj move_slot_to_level (struct temp_slot *temp, int level)
65338fd1498Szrj {
65438fd1498Szrj   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
65538fd1498Szrj   insert_slot_to_list (temp, temp_slots_at_level (level));
65638fd1498Szrj   temp->level = level;
65738fd1498Szrj }
65838fd1498Szrj 
65938fd1498Szrj /* Make temporary slot TEMP available.  */
66038fd1498Szrj 
66138fd1498Szrj static void
make_slot_available(struct temp_slot * temp)66238fd1498Szrj make_slot_available (struct temp_slot *temp)
66338fd1498Szrj {
66438fd1498Szrj   cut_slot_from_list (temp, temp_slots_at_level (temp->level));
66538fd1498Szrj   insert_slot_to_list (temp, &avail_temp_slots);
66638fd1498Szrj   temp->in_use = 0;
66738fd1498Szrj   temp->level = -1;
66838fd1498Szrj   n_temp_slots_in_use--;
66938fd1498Szrj }
67038fd1498Szrj 
67138fd1498Szrj /* Compute the hash value for an address -> temp slot mapping.
67238fd1498Szrj    The value is cached on the mapping entry.  */
67338fd1498Szrj static hashval_t
temp_slot_address_compute_hash(struct temp_slot_address_entry * t)67438fd1498Szrj temp_slot_address_compute_hash (struct temp_slot_address_entry *t)
67538fd1498Szrj {
67638fd1498Szrj   int do_not_record = 0;
67738fd1498Szrj   return hash_rtx (t->address, GET_MODE (t->address),
67838fd1498Szrj 		   &do_not_record, NULL, false);
67938fd1498Szrj }
68038fd1498Szrj 
68138fd1498Szrj /* Return the hash value for an address -> temp slot mapping.  */
68238fd1498Szrj hashval_t
hash(temp_slot_address_entry * t)68338fd1498Szrj temp_address_hasher::hash (temp_slot_address_entry *t)
68438fd1498Szrj {
68538fd1498Szrj   return t->hash;
68638fd1498Szrj }
68738fd1498Szrj 
68838fd1498Szrj /* Compare two address -> temp slot mapping entries.  */
68938fd1498Szrj bool
equal(temp_slot_address_entry * t1,temp_slot_address_entry * t2)69038fd1498Szrj temp_address_hasher::equal (temp_slot_address_entry *t1,
69138fd1498Szrj 			    temp_slot_address_entry *t2)
69238fd1498Szrj {
69338fd1498Szrj   return exp_equiv_p (t1->address, t2->address, 0, true);
69438fd1498Szrj }
69538fd1498Szrj 
69638fd1498Szrj /* Add ADDRESS as an alias of TEMP_SLOT to the addess -> temp slot mapping.  */
69738fd1498Szrj static void
insert_temp_slot_address(rtx address,struct temp_slot * temp_slot)69838fd1498Szrj insert_temp_slot_address (rtx address, struct temp_slot *temp_slot)
69938fd1498Szrj {
70038fd1498Szrj   struct temp_slot_address_entry *t = ggc_alloc<temp_slot_address_entry> ();
70138fd1498Szrj   t->address = address;
70238fd1498Szrj   t->temp_slot = temp_slot;
70338fd1498Szrj   t->hash = temp_slot_address_compute_hash (t);
70438fd1498Szrj   *temp_slot_address_table->find_slot_with_hash (t, t->hash, INSERT) = t;
70538fd1498Szrj }
70638fd1498Szrj 
70738fd1498Szrj /* Remove an address -> temp slot mapping entry if the temp slot is
70838fd1498Szrj    not in use anymore.  Callback for remove_unused_temp_slot_addresses.  */
70938fd1498Szrj int
remove_unused_temp_slot_addresses_1(temp_slot_address_entry ** slot,void *)71038fd1498Szrj remove_unused_temp_slot_addresses_1 (temp_slot_address_entry **slot, void *)
71138fd1498Szrj {
71238fd1498Szrj   const struct temp_slot_address_entry *t = *slot;
71338fd1498Szrj   if (! t->temp_slot->in_use)
71438fd1498Szrj     temp_slot_address_table->clear_slot (slot);
71538fd1498Szrj   return 1;
71638fd1498Szrj }
71738fd1498Szrj 
71838fd1498Szrj /* Remove all mappings of addresses to unused temp slots.  */
71938fd1498Szrj static void
remove_unused_temp_slot_addresses(void)72038fd1498Szrj remove_unused_temp_slot_addresses (void)
72138fd1498Szrj {
72238fd1498Szrj   /* Use quicker clearing if there aren't any active temp slots.  */
72338fd1498Szrj   if (n_temp_slots_in_use)
72438fd1498Szrj     temp_slot_address_table->traverse
72538fd1498Szrj       <void *, remove_unused_temp_slot_addresses_1> (NULL);
72638fd1498Szrj   else
72738fd1498Szrj     temp_slot_address_table->empty ();
72838fd1498Szrj }
72938fd1498Szrj 
73038fd1498Szrj /* Find the temp slot corresponding to the object at address X.  */
73138fd1498Szrj 
73238fd1498Szrj static struct temp_slot *
find_temp_slot_from_address(rtx x)73338fd1498Szrj find_temp_slot_from_address (rtx x)
73438fd1498Szrj {
73538fd1498Szrj   struct temp_slot *p;
73638fd1498Szrj   struct temp_slot_address_entry tmp, *t;
73738fd1498Szrj 
73838fd1498Szrj   /* First try the easy way:
73938fd1498Szrj      See if X exists in the address -> temp slot mapping.  */
74038fd1498Szrj   tmp.address = x;
74138fd1498Szrj   tmp.temp_slot = NULL;
74238fd1498Szrj   tmp.hash = temp_slot_address_compute_hash (&tmp);
74338fd1498Szrj   t = temp_slot_address_table->find_with_hash (&tmp, tmp.hash);
74438fd1498Szrj   if (t)
74538fd1498Szrj     return t->temp_slot;
74638fd1498Szrj 
74738fd1498Szrj   /* If we have a sum involving a register, see if it points to a temp
74838fd1498Szrj      slot.  */
74938fd1498Szrj   if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 0))
75038fd1498Szrj       && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
75138fd1498Szrj     return p;
75238fd1498Szrj   else if (GET_CODE (x) == PLUS && REG_P (XEXP (x, 1))
75338fd1498Szrj 	   && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
75438fd1498Szrj     return p;
75538fd1498Szrj 
75638fd1498Szrj   /* Last resort: Address is a virtual stack var address.  */
75738fd1498Szrj   poly_int64 offset;
75838fd1498Szrj   if (strip_offset (x, &offset) == virtual_stack_vars_rtx)
75938fd1498Szrj     {
76038fd1498Szrj       int i;
76138fd1498Szrj       for (i = max_slot_level (); i >= 0; i--)
76238fd1498Szrj 	for (p = *temp_slots_at_level (i); p; p = p->next)
76338fd1498Szrj 	  if (known_in_range_p (offset, p->base_offset, p->full_size))
76438fd1498Szrj 	    return p;
76538fd1498Szrj     }
76638fd1498Szrj 
76738fd1498Szrj   return NULL;
76838fd1498Szrj }
76938fd1498Szrj 
77038fd1498Szrj /* Allocate a temporary stack slot and record it for possible later
77138fd1498Szrj    reuse.
77238fd1498Szrj 
77338fd1498Szrj    MODE is the machine mode to be given to the returned rtx.
77438fd1498Szrj 
77538fd1498Szrj    SIZE is the size in units of the space required.  We do no rounding here
77638fd1498Szrj    since assign_stack_local will do any required rounding.
77738fd1498Szrj 
77838fd1498Szrj    TYPE is the type that will be used for the stack slot.  */
77938fd1498Szrj 
78038fd1498Szrj rtx
assign_stack_temp_for_type(machine_mode mode,poly_int64 size,tree type)78138fd1498Szrj assign_stack_temp_for_type (machine_mode mode, poly_int64 size, tree type)
78238fd1498Szrj {
78338fd1498Szrj   unsigned int align;
78438fd1498Szrj   struct temp_slot *p, *best_p = 0, *selected = NULL, **pp;
78538fd1498Szrj   rtx slot;
78638fd1498Szrj 
78738fd1498Szrj   gcc_assert (known_size_p (size));
78838fd1498Szrj 
78938fd1498Szrj   align = get_stack_local_alignment (type, mode);
79038fd1498Szrj 
79138fd1498Szrj   /* Try to find an available, already-allocated temporary of the proper
79238fd1498Szrj      mode which meets the size and alignment requirements.  Choose the
79338fd1498Szrj      smallest one with the closest alignment.
79438fd1498Szrj 
79538fd1498Szrj      If assign_stack_temp is called outside of the tree->rtl expansion,
79638fd1498Szrj      we cannot reuse the stack slots (that may still refer to
79738fd1498Szrj      VIRTUAL_STACK_VARS_REGNUM).  */
79838fd1498Szrj   if (!virtuals_instantiated)
79938fd1498Szrj     {
80038fd1498Szrj       for (p = avail_temp_slots; p; p = p->next)
80138fd1498Szrj 	{
80238fd1498Szrj 	  if (p->align >= align
80338fd1498Szrj 	      && known_ge (p->size, size)
80438fd1498Szrj 	      && GET_MODE (p->slot) == mode
80538fd1498Szrj 	      && objects_must_conflict_p (p->type, type)
80638fd1498Szrj 	      && (best_p == 0
80738fd1498Szrj 		  || (known_eq (best_p->size, p->size)
80838fd1498Szrj 		      ? best_p->align > p->align
80938fd1498Szrj 		      : known_ge (best_p->size, p->size))))
81038fd1498Szrj 	    {
81138fd1498Szrj 	      if (p->align == align && known_eq (p->size, size))
81238fd1498Szrj 		{
81338fd1498Szrj 		  selected = p;
81438fd1498Szrj 		  cut_slot_from_list (selected, &avail_temp_slots);
81538fd1498Szrj 		  best_p = 0;
81638fd1498Szrj 		  break;
81738fd1498Szrj 		}
81838fd1498Szrj 	      best_p = p;
81938fd1498Szrj 	    }
82038fd1498Szrj 	}
82138fd1498Szrj     }
82238fd1498Szrj 
82338fd1498Szrj   /* Make our best, if any, the one to use.  */
82438fd1498Szrj   if (best_p)
82538fd1498Szrj     {
82638fd1498Szrj       selected = best_p;
82738fd1498Szrj       cut_slot_from_list (selected, &avail_temp_slots);
82838fd1498Szrj 
82938fd1498Szrj       /* If there are enough aligned bytes left over, make them into a new
83038fd1498Szrj 	 temp_slot so that the extra bytes don't get wasted.  Do this only
83138fd1498Szrj 	 for BLKmode slots, so that we can be sure of the alignment.  */
83238fd1498Szrj       if (GET_MODE (best_p->slot) == BLKmode)
83338fd1498Szrj 	{
83438fd1498Szrj 	  int alignment = best_p->align / BITS_PER_UNIT;
83538fd1498Szrj 	  poly_int64 rounded_size = aligned_upper_bound (size, alignment);
83638fd1498Szrj 
83738fd1498Szrj 	  if (known_ge (best_p->size - rounded_size, alignment))
83838fd1498Szrj 	    {
83938fd1498Szrj 	      p = ggc_alloc<temp_slot> ();
84038fd1498Szrj 	      p->in_use = 0;
84138fd1498Szrj 	      p->size = best_p->size - rounded_size;
84238fd1498Szrj 	      p->base_offset = best_p->base_offset + rounded_size;
84338fd1498Szrj 	      p->full_size = best_p->full_size - rounded_size;
84438fd1498Szrj 	      p->slot = adjust_address_nv (best_p->slot, BLKmode, rounded_size);
84538fd1498Szrj 	      p->align = best_p->align;
84638fd1498Szrj 	      p->type = best_p->type;
84738fd1498Szrj 	      insert_slot_to_list (p, &avail_temp_slots);
84838fd1498Szrj 
84938fd1498Szrj 	      vec_safe_push (stack_slot_list, p->slot);
85038fd1498Szrj 
85138fd1498Szrj 	      best_p->size = rounded_size;
85238fd1498Szrj 	      best_p->full_size = rounded_size;
85338fd1498Szrj 	    }
85438fd1498Szrj 	}
85538fd1498Szrj     }
85638fd1498Szrj 
85738fd1498Szrj   /* If we still didn't find one, make a new temporary.  */
85838fd1498Szrj   if (selected == 0)
85938fd1498Szrj     {
86038fd1498Szrj       poly_int64 frame_offset_old = frame_offset;
86138fd1498Szrj 
86238fd1498Szrj       p = ggc_alloc<temp_slot> ();
86338fd1498Szrj 
86438fd1498Szrj       /* We are passing an explicit alignment request to assign_stack_local.
86538fd1498Szrj 	 One side effect of that is assign_stack_local will not round SIZE
86638fd1498Szrj 	 to ensure the frame offset remains suitably aligned.
86738fd1498Szrj 
86838fd1498Szrj 	 So for requests which depended on the rounding of SIZE, we go ahead
86938fd1498Szrj 	 and round it now.  We also make sure ALIGNMENT is at least
87038fd1498Szrj 	 BIGGEST_ALIGNMENT.  */
87138fd1498Szrj       gcc_assert (mode != BLKmode || align == BIGGEST_ALIGNMENT);
87238fd1498Szrj       p->slot = assign_stack_local_1 (mode,
87338fd1498Szrj 				      (mode == BLKmode
87438fd1498Szrj 				       ? aligned_upper_bound (size,
87538fd1498Szrj 							      (int) align
87638fd1498Szrj 							      / BITS_PER_UNIT)
87738fd1498Szrj 				       : size),
87838fd1498Szrj 				      align, 0);
87938fd1498Szrj 
88038fd1498Szrj       p->align = align;
88138fd1498Szrj 
88238fd1498Szrj       /* The following slot size computation is necessary because we don't
88338fd1498Szrj 	 know the actual size of the temporary slot until assign_stack_local
88438fd1498Szrj 	 has performed all the frame alignment and size rounding for the
88538fd1498Szrj 	 requested temporary.  Note that extra space added for alignment
88638fd1498Szrj 	 can be either above or below this stack slot depending on which
88738fd1498Szrj 	 way the frame grows.  We include the extra space if and only if it
88838fd1498Szrj 	 is above this slot.  */
88938fd1498Szrj       if (FRAME_GROWS_DOWNWARD)
89038fd1498Szrj 	p->size = frame_offset_old - frame_offset;
89138fd1498Szrj       else
89238fd1498Szrj 	p->size = size;
89338fd1498Szrj 
89438fd1498Szrj       /* Now define the fields used by combine_temp_slots.  */
89538fd1498Szrj       if (FRAME_GROWS_DOWNWARD)
89638fd1498Szrj 	{
89738fd1498Szrj 	  p->base_offset = frame_offset;
89838fd1498Szrj 	  p->full_size = frame_offset_old - frame_offset;
89938fd1498Szrj 	}
90038fd1498Szrj       else
90138fd1498Szrj 	{
90238fd1498Szrj 	  p->base_offset = frame_offset_old;
90338fd1498Szrj 	  p->full_size = frame_offset - frame_offset_old;
90438fd1498Szrj 	}
90538fd1498Szrj 
90638fd1498Szrj       selected = p;
90738fd1498Szrj     }
90838fd1498Szrj 
90938fd1498Szrj   p = selected;
91038fd1498Szrj   p->in_use = 1;
91138fd1498Szrj   p->type = type;
91238fd1498Szrj   p->level = temp_slot_level;
91338fd1498Szrj   n_temp_slots_in_use++;
91438fd1498Szrj 
91538fd1498Szrj   pp = temp_slots_at_level (p->level);
91638fd1498Szrj   insert_slot_to_list (p, pp);
91738fd1498Szrj   insert_temp_slot_address (XEXP (p->slot, 0), p);
91838fd1498Szrj 
91938fd1498Szrj   /* Create a new MEM rtx to avoid clobbering MEM flags of old slots.  */
92038fd1498Szrj   slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
92138fd1498Szrj   vec_safe_push (stack_slot_list, slot);
92238fd1498Szrj 
92338fd1498Szrj   /* If we know the alias set for the memory that will be used, use
92438fd1498Szrj      it.  If there's no TYPE, then we don't know anything about the
92538fd1498Szrj      alias set for the memory.  */
92638fd1498Szrj   set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
92738fd1498Szrj   set_mem_align (slot, align);
92838fd1498Szrj 
92938fd1498Szrj   /* If a type is specified, set the relevant flags.  */
93038fd1498Szrj   if (type != 0)
93138fd1498Szrj     MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
93238fd1498Szrj   MEM_NOTRAP_P (slot) = 1;
93338fd1498Szrj 
93438fd1498Szrj   return slot;
93538fd1498Szrj }
93638fd1498Szrj 
93738fd1498Szrj /* Allocate a temporary stack slot and record it for possible later
93838fd1498Szrj    reuse.  First two arguments are same as in preceding function.  */
93938fd1498Szrj 
94038fd1498Szrj rtx
assign_stack_temp(machine_mode mode,poly_int64 size)94138fd1498Szrj assign_stack_temp (machine_mode mode, poly_int64 size)
94238fd1498Szrj {
94338fd1498Szrj   return assign_stack_temp_for_type (mode, size, NULL_TREE);
94438fd1498Szrj }
94538fd1498Szrj 
94638fd1498Szrj /* Assign a temporary.
94738fd1498Szrj    If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
94838fd1498Szrj    and so that should be used in error messages.  In either case, we
94938fd1498Szrj    allocate of the given type.
95038fd1498Szrj    MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
95138fd1498Szrj    it is 0 if a register is OK.
95238fd1498Szrj    DONT_PROMOTE is 1 if we should not promote values in register
95338fd1498Szrj    to wider modes.  */
95438fd1498Szrj 
95538fd1498Szrj rtx
assign_temp(tree type_or_decl,int memory_required,int dont_promote ATTRIBUTE_UNUSED)95638fd1498Szrj assign_temp (tree type_or_decl, int memory_required,
95738fd1498Szrj 	     int dont_promote ATTRIBUTE_UNUSED)
95838fd1498Szrj {
95938fd1498Szrj   tree type, decl;
96038fd1498Szrj   machine_mode mode;
96138fd1498Szrj #ifdef PROMOTE_MODE
96238fd1498Szrj   int unsignedp;
96338fd1498Szrj #endif
96438fd1498Szrj 
96538fd1498Szrj   if (DECL_P (type_or_decl))
96638fd1498Szrj     decl = type_or_decl, type = TREE_TYPE (decl);
96738fd1498Szrj   else
96838fd1498Szrj     decl = NULL, type = type_or_decl;
96938fd1498Szrj 
97038fd1498Szrj   mode = TYPE_MODE (type);
97138fd1498Szrj #ifdef PROMOTE_MODE
97238fd1498Szrj   unsignedp = TYPE_UNSIGNED (type);
97338fd1498Szrj #endif
97438fd1498Szrj 
97538fd1498Szrj   /* Allocating temporaries of TREE_ADDRESSABLE type must be done in the front
97638fd1498Szrj      end.  See also create_tmp_var for the gimplification-time check.  */
97738fd1498Szrj   gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
97838fd1498Szrj 
97938fd1498Szrj   if (mode == BLKmode || memory_required)
98038fd1498Szrj     {
98138fd1498Szrj       HOST_WIDE_INT size = int_size_in_bytes (type);
98238fd1498Szrj       rtx tmp;
98338fd1498Szrj 
98438fd1498Szrj       /* Zero sized arrays are GNU C extension.  Set size to 1 to avoid
98538fd1498Szrj 	 problems with allocating the stack space.  */
98638fd1498Szrj       if (size == 0)
98738fd1498Szrj 	size = 1;
98838fd1498Szrj 
98938fd1498Szrj       /* Unfortunately, we don't yet know how to allocate variable-sized
99038fd1498Szrj 	 temporaries.  However, sometimes we can find a fixed upper limit on
99138fd1498Szrj 	 the size, so try that instead.  */
99238fd1498Szrj       else if (size == -1)
99338fd1498Szrj 	size = max_int_size_in_bytes (type);
99438fd1498Szrj 
99538fd1498Szrj       /* The size of the temporary may be too large to fit into an integer.  */
99638fd1498Szrj       /* ??? Not sure this should happen except for user silliness, so limit
99738fd1498Szrj 	 this to things that aren't compiler-generated temporaries.  The
99838fd1498Szrj 	 rest of the time we'll die in assign_stack_temp_for_type.  */
99938fd1498Szrj       if (decl && size == -1
100038fd1498Szrj 	  && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
100138fd1498Szrj 	{
100238fd1498Szrj 	  error ("size of variable %q+D is too large", decl);
100338fd1498Szrj 	  size = 1;
100438fd1498Szrj 	}
100538fd1498Szrj 
100638fd1498Szrj       tmp = assign_stack_temp_for_type (mode, size, type);
100738fd1498Szrj       return tmp;
100838fd1498Szrj     }
100938fd1498Szrj 
101038fd1498Szrj #ifdef PROMOTE_MODE
101138fd1498Szrj   if (! dont_promote)
101238fd1498Szrj     mode = promote_mode (type, mode, &unsignedp);
101338fd1498Szrj #endif
101438fd1498Szrj 
101538fd1498Szrj   return gen_reg_rtx (mode);
101638fd1498Szrj }
101738fd1498Szrj 
101838fd1498Szrj /* Combine temporary stack slots which are adjacent on the stack.
101938fd1498Szrj 
102038fd1498Szrj    This allows for better use of already allocated stack space.  This is only
102138fd1498Szrj    done for BLKmode slots because we can be sure that we won't have alignment
102238fd1498Szrj    problems in this case.  */
102338fd1498Szrj 
102438fd1498Szrj static void
combine_temp_slots(void)102538fd1498Szrj combine_temp_slots (void)
102638fd1498Szrj {
102738fd1498Szrj   struct temp_slot *p, *q, *next, *next_q;
102838fd1498Szrj   int num_slots;
102938fd1498Szrj 
103038fd1498Szrj   /* We can't combine slots, because the information about which slot
103138fd1498Szrj      is in which alias set will be lost.  */
103238fd1498Szrj   if (flag_strict_aliasing)
103338fd1498Szrj     return;
103438fd1498Szrj 
103538fd1498Szrj   /* If there are a lot of temp slots, don't do anything unless
103638fd1498Szrj      high levels of optimization.  */
103738fd1498Szrj   if (! flag_expensive_optimizations)
103838fd1498Szrj     for (p = avail_temp_slots, num_slots = 0; p; p = p->next, num_slots++)
103938fd1498Szrj       if (num_slots > 100 || (num_slots > 10 && optimize == 0))
104038fd1498Szrj 	return;
104138fd1498Szrj 
104238fd1498Szrj   for (p = avail_temp_slots; p; p = next)
104338fd1498Szrj     {
104438fd1498Szrj       int delete_p = 0;
104538fd1498Szrj 
104638fd1498Szrj       next = p->next;
104738fd1498Szrj 
104838fd1498Szrj       if (GET_MODE (p->slot) != BLKmode)
104938fd1498Szrj 	continue;
105038fd1498Szrj 
105138fd1498Szrj       for (q = p->next; q; q = next_q)
105238fd1498Szrj 	{
105338fd1498Szrj        	  int delete_q = 0;
105438fd1498Szrj 
105538fd1498Szrj 	  next_q = q->next;
105638fd1498Szrj 
105738fd1498Szrj 	  if (GET_MODE (q->slot) != BLKmode)
105838fd1498Szrj 	    continue;
105938fd1498Szrj 
106038fd1498Szrj 	  if (known_eq (p->base_offset + p->full_size, q->base_offset))
106138fd1498Szrj 	    {
106238fd1498Szrj 	      /* Q comes after P; combine Q into P.  */
106338fd1498Szrj 	      p->size += q->size;
106438fd1498Szrj 	      p->full_size += q->full_size;
106538fd1498Szrj 	      delete_q = 1;
106638fd1498Szrj 	    }
106738fd1498Szrj 	  else if (known_eq (q->base_offset + q->full_size, p->base_offset))
106838fd1498Szrj 	    {
106938fd1498Szrj 	      /* P comes after Q; combine P into Q.  */
107038fd1498Szrj 	      q->size += p->size;
107138fd1498Szrj 	      q->full_size += p->full_size;
107238fd1498Szrj 	      delete_p = 1;
107338fd1498Szrj 	      break;
107438fd1498Szrj 	    }
107538fd1498Szrj 	  if (delete_q)
107638fd1498Szrj 	    cut_slot_from_list (q, &avail_temp_slots);
107738fd1498Szrj 	}
107838fd1498Szrj 
107938fd1498Szrj       /* Either delete P or advance past it.  */
108038fd1498Szrj       if (delete_p)
108138fd1498Szrj 	cut_slot_from_list (p, &avail_temp_slots);
108238fd1498Szrj     }
108338fd1498Szrj }
108438fd1498Szrj 
108538fd1498Szrj /* Indicate that NEW_RTX is an alternate way of referring to the temp
108638fd1498Szrj    slot that previously was known by OLD_RTX.  */
108738fd1498Szrj 
108838fd1498Szrj void
update_temp_slot_address(rtx old_rtx,rtx new_rtx)108938fd1498Szrj update_temp_slot_address (rtx old_rtx, rtx new_rtx)
109038fd1498Szrj {
109138fd1498Szrj   struct temp_slot *p;
109238fd1498Szrj 
109338fd1498Szrj   if (rtx_equal_p (old_rtx, new_rtx))
109438fd1498Szrj     return;
109538fd1498Szrj 
109638fd1498Szrj   p = find_temp_slot_from_address (old_rtx);
109738fd1498Szrj 
109838fd1498Szrj   /* If we didn't find one, see if both OLD_RTX is a PLUS.  If so, and
109938fd1498Szrj      NEW_RTX is a register, see if one operand of the PLUS is a
110038fd1498Szrj      temporary location.  If so, NEW_RTX points into it.  Otherwise,
110138fd1498Szrj      if both OLD_RTX and NEW_RTX are a PLUS and if there is a register
110238fd1498Szrj      in common between them.  If so, try a recursive call on those
110338fd1498Szrj      values.  */
110438fd1498Szrj   if (p == 0)
110538fd1498Szrj     {
110638fd1498Szrj       if (GET_CODE (old_rtx) != PLUS)
110738fd1498Szrj 	return;
110838fd1498Szrj 
110938fd1498Szrj       if (REG_P (new_rtx))
111038fd1498Szrj 	{
111138fd1498Szrj 	  update_temp_slot_address (XEXP (old_rtx, 0), new_rtx);
111238fd1498Szrj 	  update_temp_slot_address (XEXP (old_rtx, 1), new_rtx);
111338fd1498Szrj 	  return;
111438fd1498Szrj 	}
111538fd1498Szrj       else if (GET_CODE (new_rtx) != PLUS)
111638fd1498Szrj 	return;
111738fd1498Szrj 
111838fd1498Szrj       if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 0)))
111938fd1498Szrj 	update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 1));
112038fd1498Szrj       else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 0)))
112138fd1498Szrj 	update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 1));
112238fd1498Szrj       else if (rtx_equal_p (XEXP (old_rtx, 0), XEXP (new_rtx, 1)))
112338fd1498Szrj 	update_temp_slot_address (XEXP (old_rtx, 1), XEXP (new_rtx, 0));
112438fd1498Szrj       else if (rtx_equal_p (XEXP (old_rtx, 1), XEXP (new_rtx, 1)))
112538fd1498Szrj 	update_temp_slot_address (XEXP (old_rtx, 0), XEXP (new_rtx, 0));
112638fd1498Szrj 
112738fd1498Szrj       return;
112838fd1498Szrj     }
112938fd1498Szrj 
113038fd1498Szrj   /* Otherwise add an alias for the temp's address.  */
113138fd1498Szrj   insert_temp_slot_address (new_rtx, p);
113238fd1498Szrj }
113338fd1498Szrj 
113438fd1498Szrj /* If X could be a reference to a temporary slot, mark that slot as
113538fd1498Szrj    belonging to the to one level higher than the current level.  If X
113638fd1498Szrj    matched one of our slots, just mark that one.  Otherwise, we can't
113738fd1498Szrj    easily predict which it is, so upgrade all of them.
113838fd1498Szrj 
113938fd1498Szrj    This is called when an ({...}) construct occurs and a statement
114038fd1498Szrj    returns a value in memory.  */
114138fd1498Szrj 
114238fd1498Szrj void
preserve_temp_slots(rtx x)114338fd1498Szrj preserve_temp_slots (rtx x)
114438fd1498Szrj {
114538fd1498Szrj   struct temp_slot *p = 0, *next;
114638fd1498Szrj 
114738fd1498Szrj   if (x == 0)
114838fd1498Szrj     return;
114938fd1498Szrj 
115038fd1498Szrj   /* If X is a register that is being used as a pointer, see if we have
115138fd1498Szrj      a temporary slot we know it points to.  */
115238fd1498Szrj   if (REG_P (x) && REG_POINTER (x))
115338fd1498Szrj     p = find_temp_slot_from_address (x);
115438fd1498Szrj 
115538fd1498Szrj   /* If X is not in memory or is at a constant address, it cannot be in
115638fd1498Szrj      a temporary slot.  */
115738fd1498Szrj   if (p == 0 && (!MEM_P (x) || CONSTANT_P (XEXP (x, 0))))
115838fd1498Szrj     return;
115938fd1498Szrj 
116038fd1498Szrj   /* First see if we can find a match.  */
116138fd1498Szrj   if (p == 0)
116238fd1498Szrj     p = find_temp_slot_from_address (XEXP (x, 0));
116338fd1498Szrj 
116438fd1498Szrj   if (p != 0)
116538fd1498Szrj     {
116638fd1498Szrj       if (p->level == temp_slot_level)
116738fd1498Szrj 	move_slot_to_level (p, temp_slot_level - 1);
116838fd1498Szrj       return;
116938fd1498Szrj     }
117038fd1498Szrj 
117138fd1498Szrj   /* Otherwise, preserve all non-kept slots at this level.  */
117238fd1498Szrj   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
117338fd1498Szrj     {
117438fd1498Szrj       next = p->next;
117538fd1498Szrj       move_slot_to_level (p, temp_slot_level - 1);
117638fd1498Szrj     }
117738fd1498Szrj }
117838fd1498Szrj 
117938fd1498Szrj /* Free all temporaries used so far.  This is normally called at the
118038fd1498Szrj    end of generating code for a statement.  */
118138fd1498Szrj 
118238fd1498Szrj void
free_temp_slots(void)118338fd1498Szrj free_temp_slots (void)
118438fd1498Szrj {
118538fd1498Szrj   struct temp_slot *p, *next;
118638fd1498Szrj   bool some_available = false;
118738fd1498Szrj 
118838fd1498Szrj   for (p = *temp_slots_at_level (temp_slot_level); p; p = next)
118938fd1498Szrj     {
119038fd1498Szrj       next = p->next;
119138fd1498Szrj       make_slot_available (p);
119238fd1498Szrj       some_available = true;
119338fd1498Szrj     }
119438fd1498Szrj 
119538fd1498Szrj   if (some_available)
119638fd1498Szrj     {
119738fd1498Szrj       remove_unused_temp_slot_addresses ();
119838fd1498Szrj       combine_temp_slots ();
119938fd1498Szrj     }
120038fd1498Szrj }
120138fd1498Szrj 
120238fd1498Szrj /* Push deeper into the nesting level for stack temporaries.  */
120338fd1498Szrj 
120438fd1498Szrj void
push_temp_slots(void)120538fd1498Szrj push_temp_slots (void)
120638fd1498Szrj {
120738fd1498Szrj   temp_slot_level++;
120838fd1498Szrj }
120938fd1498Szrj 
121038fd1498Szrj /* Pop a temporary nesting level.  All slots in use in the current level
121138fd1498Szrj    are freed.  */
121238fd1498Szrj 
121338fd1498Szrj void
pop_temp_slots(void)121438fd1498Szrj pop_temp_slots (void)
121538fd1498Szrj {
121638fd1498Szrj   free_temp_slots ();
121738fd1498Szrj   temp_slot_level--;
121838fd1498Szrj }
121938fd1498Szrj 
122038fd1498Szrj /* Initialize temporary slots.  */
122138fd1498Szrj 
122238fd1498Szrj void
init_temp_slots(void)122338fd1498Szrj init_temp_slots (void)
122438fd1498Szrj {
122538fd1498Szrj   /* We have not allocated any temporaries yet.  */
122638fd1498Szrj   avail_temp_slots = 0;
122738fd1498Szrj   vec_alloc (used_temp_slots, 0);
122838fd1498Szrj   temp_slot_level = 0;
122938fd1498Szrj   n_temp_slots_in_use = 0;
123038fd1498Szrj 
123138fd1498Szrj   /* Set up the table to map addresses to temp slots.  */
123238fd1498Szrj   if (! temp_slot_address_table)
123338fd1498Szrj     temp_slot_address_table = hash_table<temp_address_hasher>::create_ggc (32);
123438fd1498Szrj   else
123538fd1498Szrj     temp_slot_address_table->empty ();
123638fd1498Szrj }
123738fd1498Szrj 
123838fd1498Szrj /* Functions and data structures to keep track of the values hard regs
123938fd1498Szrj    had at the start of the function.  */
124038fd1498Szrj 
124138fd1498Szrj /* Private type used by get_hard_reg_initial_reg, get_hard_reg_initial_val,
124238fd1498Szrj    and has_hard_reg_initial_val..  */
124338fd1498Szrj struct GTY(()) initial_value_pair {
124438fd1498Szrj   rtx hard_reg;
124538fd1498Szrj   rtx pseudo;
124638fd1498Szrj };
124738fd1498Szrj /* ???  This could be a VEC but there is currently no way to define an
124838fd1498Szrj    opaque VEC type.  This could be worked around by defining struct
124938fd1498Szrj    initial_value_pair in function.h.  */
125038fd1498Szrj struct GTY(()) initial_value_struct {
125138fd1498Szrj   int num_entries;
125238fd1498Szrj   int max_entries;
125338fd1498Szrj   initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
125438fd1498Szrj };
125538fd1498Szrj 
125638fd1498Szrj /* If a pseudo represents an initial hard reg (or expression), return
125738fd1498Szrj    it, else return NULL_RTX.  */
125838fd1498Szrj 
125938fd1498Szrj rtx
get_hard_reg_initial_reg(rtx reg)126038fd1498Szrj get_hard_reg_initial_reg (rtx reg)
126138fd1498Szrj {
126238fd1498Szrj   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
126338fd1498Szrj   int i;
126438fd1498Szrj 
126538fd1498Szrj   if (ivs == 0)
126638fd1498Szrj     return NULL_RTX;
126738fd1498Szrj 
126838fd1498Szrj   for (i = 0; i < ivs->num_entries; i++)
126938fd1498Szrj     if (rtx_equal_p (ivs->entries[i].pseudo, reg))
127038fd1498Szrj       return ivs->entries[i].hard_reg;
127138fd1498Szrj 
127238fd1498Szrj   return NULL_RTX;
127338fd1498Szrj }
127438fd1498Szrj 
127538fd1498Szrj /* Make sure that there's a pseudo register of mode MODE that stores the
127638fd1498Szrj    initial value of hard register REGNO.  Return an rtx for such a pseudo.  */
127738fd1498Szrj 
127838fd1498Szrj rtx
get_hard_reg_initial_val(machine_mode mode,unsigned int regno)127938fd1498Szrj get_hard_reg_initial_val (machine_mode mode, unsigned int regno)
128038fd1498Szrj {
128138fd1498Szrj   struct initial_value_struct *ivs;
128238fd1498Szrj   rtx rv;
128338fd1498Szrj 
128438fd1498Szrj   rv = has_hard_reg_initial_val (mode, regno);
128538fd1498Szrj   if (rv)
128638fd1498Szrj     return rv;
128738fd1498Szrj 
128838fd1498Szrj   ivs = crtl->hard_reg_initial_vals;
128938fd1498Szrj   if (ivs == 0)
129038fd1498Szrj     {
129138fd1498Szrj       ivs = ggc_alloc<initial_value_struct> ();
129238fd1498Szrj       ivs->num_entries = 0;
129338fd1498Szrj       ivs->max_entries = 5;
129438fd1498Szrj       ivs->entries = ggc_vec_alloc<initial_value_pair> (5);
129538fd1498Szrj       crtl->hard_reg_initial_vals = ivs;
129638fd1498Szrj     }
129738fd1498Szrj 
129838fd1498Szrj   if (ivs->num_entries >= ivs->max_entries)
129938fd1498Szrj     {
130038fd1498Szrj       ivs->max_entries += 5;
130138fd1498Szrj       ivs->entries = GGC_RESIZEVEC (initial_value_pair, ivs->entries,
130238fd1498Szrj 				    ivs->max_entries);
130338fd1498Szrj     }
130438fd1498Szrj 
130538fd1498Szrj   ivs->entries[ivs->num_entries].hard_reg = gen_rtx_REG (mode, regno);
130638fd1498Szrj   ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (mode);
130738fd1498Szrj 
130838fd1498Szrj   return ivs->entries[ivs->num_entries++].pseudo;
130938fd1498Szrj }
131038fd1498Szrj 
131138fd1498Szrj /* See if get_hard_reg_initial_val has been used to create a pseudo
131238fd1498Szrj    for the initial value of hard register REGNO in mode MODE.  Return
131338fd1498Szrj    the associated pseudo if so, otherwise return NULL.  */
131438fd1498Szrj 
131538fd1498Szrj rtx
has_hard_reg_initial_val(machine_mode mode,unsigned int regno)131638fd1498Szrj has_hard_reg_initial_val (machine_mode mode, unsigned int regno)
131738fd1498Szrj {
131838fd1498Szrj   struct initial_value_struct *ivs;
131938fd1498Szrj   int i;
132038fd1498Szrj 
132138fd1498Szrj   ivs = crtl->hard_reg_initial_vals;
132238fd1498Szrj   if (ivs != 0)
132338fd1498Szrj     for (i = 0; i < ivs->num_entries; i++)
132438fd1498Szrj       if (GET_MODE (ivs->entries[i].hard_reg) == mode
132538fd1498Szrj 	  && REGNO (ivs->entries[i].hard_reg) == regno)
132638fd1498Szrj 	return ivs->entries[i].pseudo;
132738fd1498Szrj 
132838fd1498Szrj   return NULL_RTX;
132938fd1498Szrj }
133038fd1498Szrj 
133138fd1498Szrj unsigned int
emit_initial_value_sets(void)133238fd1498Szrj emit_initial_value_sets (void)
133338fd1498Szrj {
133438fd1498Szrj   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
133538fd1498Szrj   int i;
133638fd1498Szrj   rtx_insn *seq;
133738fd1498Szrj 
133838fd1498Szrj   if (ivs == 0)
133938fd1498Szrj     return 0;
134038fd1498Szrj 
134138fd1498Szrj   start_sequence ();
134238fd1498Szrj   for (i = 0; i < ivs->num_entries; i++)
134338fd1498Szrj     emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
134438fd1498Szrj   seq = get_insns ();
134538fd1498Szrj   end_sequence ();
134638fd1498Szrj 
134738fd1498Szrj   emit_insn_at_entry (seq);
134838fd1498Szrj   return 0;
134938fd1498Szrj }
135038fd1498Szrj 
135138fd1498Szrj /* Return the hardreg-pseudoreg initial values pair entry I and
135238fd1498Szrj    TRUE if I is a valid entry, or FALSE if I is not a valid entry.  */
135338fd1498Szrj bool
initial_value_entry(int i,rtx * hreg,rtx * preg)135438fd1498Szrj initial_value_entry (int i, rtx *hreg, rtx *preg)
135538fd1498Szrj {
135638fd1498Szrj   struct initial_value_struct *ivs = crtl->hard_reg_initial_vals;
135738fd1498Szrj   if (!ivs || i >= ivs->num_entries)
135838fd1498Szrj     return false;
135938fd1498Szrj 
136038fd1498Szrj   *hreg = ivs->entries[i].hard_reg;
136138fd1498Szrj   *preg = ivs->entries[i].pseudo;
136238fd1498Szrj   return true;
136338fd1498Szrj }
136438fd1498Szrj 
136538fd1498Szrj /* These routines are responsible for converting virtual register references
136638fd1498Szrj    to the actual hard register references once RTL generation is complete.
136738fd1498Szrj 
136838fd1498Szrj    The following four variables are used for communication between the
136938fd1498Szrj    routines.  They contain the offsets of the virtual registers from their
137038fd1498Szrj    respective hard registers.  */
137138fd1498Szrj 
137238fd1498Szrj static poly_int64 in_arg_offset;
137338fd1498Szrj static poly_int64 var_offset;
137438fd1498Szrj static poly_int64 dynamic_offset;
137538fd1498Szrj static poly_int64 out_arg_offset;
137638fd1498Szrj static poly_int64 cfa_offset;
137738fd1498Szrj 
137838fd1498Szrj /* In most machines, the stack pointer register is equivalent to the bottom
137938fd1498Szrj    of the stack.  */
138038fd1498Szrj 
138138fd1498Szrj #ifndef STACK_POINTER_OFFSET
138238fd1498Szrj #define STACK_POINTER_OFFSET	0
138338fd1498Szrj #endif
138438fd1498Szrj 
138538fd1498Szrj #if defined (REG_PARM_STACK_SPACE) && !defined (INCOMING_REG_PARM_STACK_SPACE)
138638fd1498Szrj #define INCOMING_REG_PARM_STACK_SPACE REG_PARM_STACK_SPACE
138738fd1498Szrj #endif
138838fd1498Szrj 
138938fd1498Szrj /* If not defined, pick an appropriate default for the offset of dynamically
139038fd1498Szrj    allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
139138fd1498Szrj    INCOMING_REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE.  */
139238fd1498Szrj 
139338fd1498Szrj #ifndef STACK_DYNAMIC_OFFSET
139438fd1498Szrj 
139538fd1498Szrj /* The bottom of the stack points to the actual arguments.  If
139638fd1498Szrj    REG_PARM_STACK_SPACE is defined, this includes the space for the register
139738fd1498Szrj    parameters.  However, if OUTGOING_REG_PARM_STACK space is not defined,
139838fd1498Szrj    stack space for register parameters is not pushed by the caller, but
139938fd1498Szrj    rather part of the fixed stack areas and hence not included in
140038fd1498Szrj    `crtl->outgoing_args_size'.  Nevertheless, we must allow
140138fd1498Szrj    for it when allocating stack dynamic objects.  */
140238fd1498Szrj 
140338fd1498Szrj #ifdef INCOMING_REG_PARM_STACK_SPACE
140438fd1498Szrj #define STACK_DYNAMIC_OFFSET(FNDECL)	\
140538fd1498Szrj ((ACCUMULATE_OUTGOING_ARGS						      \
140638fd1498Szrj   ? (crtl->outgoing_args_size				      \
140738fd1498Szrj      + (OUTGOING_REG_PARM_STACK_SPACE ((!(FNDECL) ? NULL_TREE : TREE_TYPE (FNDECL))) ? 0 \
140838fd1498Szrj 					       : INCOMING_REG_PARM_STACK_SPACE (FNDECL))) \
140938fd1498Szrj   : 0) + (STACK_POINTER_OFFSET))
141038fd1498Szrj #else
141138fd1498Szrj #define STACK_DYNAMIC_OFFSET(FNDECL)	\
141238fd1498Szrj   ((ACCUMULATE_OUTGOING_ARGS ? crtl->outgoing_args_size : poly_int64 (0)) \
141338fd1498Szrj  + (STACK_POINTER_OFFSET))
141438fd1498Szrj #endif
141538fd1498Szrj #endif
141638fd1498Szrj 
141738fd1498Szrj 
141838fd1498Szrj /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
141938fd1498Szrj    is a virtual register, return the equivalent hard register and set the
142038fd1498Szrj    offset indirectly through the pointer.  Otherwise, return 0.  */
142138fd1498Szrj 
142238fd1498Szrj static rtx
instantiate_new_reg(rtx x,poly_int64_pod * poffset)142338fd1498Szrj instantiate_new_reg (rtx x, poly_int64_pod *poffset)
142438fd1498Szrj {
142538fd1498Szrj   rtx new_rtx;
142638fd1498Szrj   poly_int64 offset;
142738fd1498Szrj 
142838fd1498Szrj   if (x == virtual_incoming_args_rtx)
142938fd1498Szrj     {
143038fd1498Szrj       if (stack_realign_drap)
143138fd1498Szrj         {
143238fd1498Szrj 	  /* Replace virtual_incoming_args_rtx with internal arg
143338fd1498Szrj 	     pointer if DRAP is used to realign stack.  */
143438fd1498Szrj           new_rtx = crtl->args.internal_arg_pointer;
143538fd1498Szrj           offset = 0;
143638fd1498Szrj         }
143738fd1498Szrj       else
143838fd1498Szrj         new_rtx = arg_pointer_rtx, offset = in_arg_offset;
143938fd1498Szrj     }
144038fd1498Szrj   else if (x == virtual_stack_vars_rtx)
144138fd1498Szrj     new_rtx = frame_pointer_rtx, offset = var_offset;
144238fd1498Szrj   else if (x == virtual_stack_dynamic_rtx)
144338fd1498Szrj     new_rtx = stack_pointer_rtx, offset = dynamic_offset;
144438fd1498Szrj   else if (x == virtual_outgoing_args_rtx)
144538fd1498Szrj     new_rtx = stack_pointer_rtx, offset = out_arg_offset;
144638fd1498Szrj   else if (x == virtual_cfa_rtx)
144738fd1498Szrj     {
144838fd1498Szrj #ifdef FRAME_POINTER_CFA_OFFSET
144938fd1498Szrj       new_rtx = frame_pointer_rtx;
145038fd1498Szrj #else
145138fd1498Szrj       new_rtx = arg_pointer_rtx;
145238fd1498Szrj #endif
145338fd1498Szrj       offset = cfa_offset;
145438fd1498Szrj     }
145538fd1498Szrj   else if (x == virtual_preferred_stack_boundary_rtx)
145638fd1498Szrj     {
145738fd1498Szrj       new_rtx = GEN_INT (crtl->preferred_stack_boundary / BITS_PER_UNIT);
145838fd1498Szrj       offset = 0;
145938fd1498Szrj     }
146038fd1498Szrj   else
146138fd1498Szrj     return NULL_RTX;
146238fd1498Szrj 
146338fd1498Szrj   *poffset = offset;
146438fd1498Szrj   return new_rtx;
146538fd1498Szrj }
146638fd1498Szrj 
146738fd1498Szrj /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
146838fd1498Szrj    registers present inside of *LOC.  The expression is simplified,
146938fd1498Szrj    as much as possible, but is not to be considered "valid" in any sense
147038fd1498Szrj    implied by the target.  Return true if any change is made.  */
147138fd1498Szrj 
147238fd1498Szrj static bool
instantiate_virtual_regs_in_rtx(rtx * loc)147338fd1498Szrj instantiate_virtual_regs_in_rtx (rtx *loc)
147438fd1498Szrj {
147538fd1498Szrj   if (!*loc)
147638fd1498Szrj     return false;
147738fd1498Szrj   bool changed = false;
147838fd1498Szrj   subrtx_ptr_iterator::array_type array;
147938fd1498Szrj   FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
148038fd1498Szrj     {
148138fd1498Szrj       rtx *loc = *iter;
148238fd1498Szrj       if (rtx x = *loc)
148338fd1498Szrj 	{
148438fd1498Szrj 	  rtx new_rtx;
148538fd1498Szrj 	  poly_int64 offset;
148638fd1498Szrj 	  switch (GET_CODE (x))
148738fd1498Szrj 	    {
148838fd1498Szrj 	    case REG:
148938fd1498Szrj 	      new_rtx = instantiate_new_reg (x, &offset);
149038fd1498Szrj 	      if (new_rtx)
149138fd1498Szrj 		{
149238fd1498Szrj 		  *loc = plus_constant (GET_MODE (x), new_rtx, offset);
149338fd1498Szrj 		  changed = true;
149438fd1498Szrj 		}
149538fd1498Szrj 	      iter.skip_subrtxes ();
149638fd1498Szrj 	      break;
149738fd1498Szrj 
149838fd1498Szrj 	    case PLUS:
149938fd1498Szrj 	      new_rtx = instantiate_new_reg (XEXP (x, 0), &offset);
150038fd1498Szrj 	      if (new_rtx)
150138fd1498Szrj 		{
150238fd1498Szrj 		  XEXP (x, 0) = new_rtx;
150338fd1498Szrj 		  *loc = plus_constant (GET_MODE (x), x, offset, true);
150438fd1498Szrj 		  changed = true;
150538fd1498Szrj 		  iter.skip_subrtxes ();
150638fd1498Szrj 		  break;
150738fd1498Szrj 		}
150838fd1498Szrj 
150938fd1498Szrj 	      /* FIXME -- from old code */
151038fd1498Szrj 	      /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
151138fd1498Szrj 		 we can commute the PLUS and SUBREG because pointers into the
151238fd1498Szrj 		 frame are well-behaved.  */
151338fd1498Szrj 	      break;
151438fd1498Szrj 
151538fd1498Szrj 	    default:
151638fd1498Szrj 	      break;
151738fd1498Szrj 	    }
151838fd1498Szrj 	}
151938fd1498Szrj     }
152038fd1498Szrj   return changed;
152138fd1498Szrj }
152238fd1498Szrj 
152338fd1498Szrj /* A subroutine of instantiate_virtual_regs_in_insn.  Return true if X
152438fd1498Szrj    matches the predicate for insn CODE operand OPERAND.  */
152538fd1498Szrj 
152638fd1498Szrj static int
safe_insn_predicate(int code,int operand,rtx x)152738fd1498Szrj safe_insn_predicate (int code, int operand, rtx x)
152838fd1498Szrj {
152938fd1498Szrj   return code < 0 || insn_operand_matches ((enum insn_code) code, operand, x);
153038fd1498Szrj }
153138fd1498Szrj 
153238fd1498Szrj /* A subroutine of instantiate_virtual_regs.  Instantiate any virtual
153338fd1498Szrj    registers present inside of insn.  The result will be a valid insn.  */
153438fd1498Szrj 
153538fd1498Szrj static void
instantiate_virtual_regs_in_insn(rtx_insn * insn)153638fd1498Szrj instantiate_virtual_regs_in_insn (rtx_insn *insn)
153738fd1498Szrj {
153838fd1498Szrj   poly_int64 offset;
153938fd1498Szrj   int insn_code, i;
154038fd1498Szrj   bool any_change = false;
154138fd1498Szrj   rtx set, new_rtx, x;
154238fd1498Szrj   rtx_insn *seq;
154338fd1498Szrj 
154438fd1498Szrj   /* There are some special cases to be handled first.  */
154538fd1498Szrj   set = single_set (insn);
154638fd1498Szrj   if (set)
154738fd1498Szrj     {
154838fd1498Szrj       /* We're allowed to assign to a virtual register.  This is interpreted
154938fd1498Szrj 	 to mean that the underlying register gets assigned the inverse
155038fd1498Szrj 	 transformation.  This is used, for example, in the handling of
155138fd1498Szrj 	 non-local gotos.  */
155238fd1498Szrj       new_rtx = instantiate_new_reg (SET_DEST (set), &offset);
155338fd1498Szrj       if (new_rtx)
155438fd1498Szrj 	{
155538fd1498Szrj 	  start_sequence ();
155638fd1498Szrj 
155738fd1498Szrj 	  instantiate_virtual_regs_in_rtx (&SET_SRC (set));
155838fd1498Szrj 	  x = simplify_gen_binary (PLUS, GET_MODE (new_rtx), SET_SRC (set),
155938fd1498Szrj 				   gen_int_mode (-offset, GET_MODE (new_rtx)));
156038fd1498Szrj 	  x = force_operand (x, new_rtx);
156138fd1498Szrj 	  if (x != new_rtx)
156238fd1498Szrj 	    emit_move_insn (new_rtx, x);
156338fd1498Szrj 
156438fd1498Szrj 	  seq = get_insns ();
156538fd1498Szrj 	  end_sequence ();
156638fd1498Szrj 
156738fd1498Szrj 	  emit_insn_before (seq, insn);
156838fd1498Szrj 	  delete_insn (insn);
156938fd1498Szrj 	  return;
157038fd1498Szrj 	}
157138fd1498Szrj 
157238fd1498Szrj       /* Handle a straight copy from a virtual register by generating a
157338fd1498Szrj 	 new add insn.  The difference between this and falling through
157438fd1498Szrj 	 to the generic case is avoiding a new pseudo and eliminating a
157538fd1498Szrj 	 move insn in the initial rtl stream.  */
157638fd1498Szrj       new_rtx = instantiate_new_reg (SET_SRC (set), &offset);
157738fd1498Szrj       if (new_rtx
157838fd1498Szrj 	  && maybe_ne (offset, 0)
157938fd1498Szrj 	  && REG_P (SET_DEST (set))
158038fd1498Szrj 	  && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
158138fd1498Szrj 	{
158238fd1498Szrj 	  start_sequence ();
158338fd1498Szrj 
158438fd1498Szrj 	  x = expand_simple_binop (GET_MODE (SET_DEST (set)), PLUS, new_rtx,
158538fd1498Szrj 				   gen_int_mode (offset,
158638fd1498Szrj 						 GET_MODE (SET_DEST (set))),
158738fd1498Szrj 				   SET_DEST (set), 1, OPTAB_LIB_WIDEN);
158838fd1498Szrj 	  if (x != SET_DEST (set))
158938fd1498Szrj 	    emit_move_insn (SET_DEST (set), x);
159038fd1498Szrj 
159138fd1498Szrj 	  seq = get_insns ();
159238fd1498Szrj 	  end_sequence ();
159338fd1498Szrj 
159438fd1498Szrj 	  emit_insn_before (seq, insn);
159538fd1498Szrj 	  delete_insn (insn);
159638fd1498Szrj 	  return;
159738fd1498Szrj 	}
159838fd1498Szrj 
159938fd1498Szrj       extract_insn (insn);
160038fd1498Szrj       insn_code = INSN_CODE (insn);
160138fd1498Szrj 
160238fd1498Szrj       /* Handle a plus involving a virtual register by determining if the
160338fd1498Szrj 	 operands remain valid if they're modified in place.  */
160438fd1498Szrj       poly_int64 delta;
160538fd1498Szrj       if (GET_CODE (SET_SRC (set)) == PLUS
160638fd1498Szrj 	  && recog_data.n_operands >= 3
160738fd1498Szrj 	  && recog_data.operand_loc[1] == &XEXP (SET_SRC (set), 0)
160838fd1498Szrj 	  && recog_data.operand_loc[2] == &XEXP (SET_SRC (set), 1)
160938fd1498Szrj 	  && poly_int_rtx_p (recog_data.operand[2], &delta)
161038fd1498Szrj 	  && (new_rtx = instantiate_new_reg (recog_data.operand[1], &offset)))
161138fd1498Szrj 	{
161238fd1498Szrj 	  offset += delta;
161338fd1498Szrj 
161438fd1498Szrj 	  /* If the sum is zero, then replace with a plain move.  */
161538fd1498Szrj 	  if (known_eq (offset, 0)
161638fd1498Szrj 	      && REG_P (SET_DEST (set))
161738fd1498Szrj 	      && REGNO (SET_DEST (set)) > LAST_VIRTUAL_REGISTER)
161838fd1498Szrj 	    {
161938fd1498Szrj 	      start_sequence ();
162038fd1498Szrj 	      emit_move_insn (SET_DEST (set), new_rtx);
162138fd1498Szrj 	      seq = get_insns ();
162238fd1498Szrj 	      end_sequence ();
162338fd1498Szrj 
162438fd1498Szrj 	      emit_insn_before (seq, insn);
162538fd1498Szrj 	      delete_insn (insn);
162638fd1498Szrj 	      return;
162738fd1498Szrj 	    }
162838fd1498Szrj 
162938fd1498Szrj 	  x = gen_int_mode (offset, recog_data.operand_mode[2]);
163038fd1498Szrj 
163138fd1498Szrj 	  /* Using validate_change and apply_change_group here leaves
163238fd1498Szrj 	     recog_data in an invalid state.  Since we know exactly what
163338fd1498Szrj 	     we want to check, do those two by hand.  */
163438fd1498Szrj 	  if (safe_insn_predicate (insn_code, 1, new_rtx)
163538fd1498Szrj 	      && safe_insn_predicate (insn_code, 2, x))
163638fd1498Szrj 	    {
163738fd1498Szrj 	      *recog_data.operand_loc[1] = recog_data.operand[1] = new_rtx;
163838fd1498Szrj 	      *recog_data.operand_loc[2] = recog_data.operand[2] = x;
163938fd1498Szrj 	      any_change = true;
164038fd1498Szrj 
164138fd1498Szrj 	      /* Fall through into the regular operand fixup loop in
164238fd1498Szrj 		 order to take care of operands other than 1 and 2.  */
164338fd1498Szrj 	    }
164438fd1498Szrj 	}
164538fd1498Szrj     }
164638fd1498Szrj   else
164738fd1498Szrj     {
164838fd1498Szrj       extract_insn (insn);
164938fd1498Szrj       insn_code = INSN_CODE (insn);
165038fd1498Szrj     }
165138fd1498Szrj 
165238fd1498Szrj   /* In the general case, we expect virtual registers to appear only in
165338fd1498Szrj      operands, and then only as either bare registers or inside memories.  */
165438fd1498Szrj   for (i = 0; i < recog_data.n_operands; ++i)
165538fd1498Szrj     {
165638fd1498Szrj       x = recog_data.operand[i];
165738fd1498Szrj       switch (GET_CODE (x))
165838fd1498Szrj 	{
165938fd1498Szrj 	case MEM:
166038fd1498Szrj 	  {
166138fd1498Szrj 	    rtx addr = XEXP (x, 0);
166238fd1498Szrj 
166338fd1498Szrj 	    if (!instantiate_virtual_regs_in_rtx (&addr))
166438fd1498Szrj 	      continue;
166538fd1498Szrj 
166638fd1498Szrj 	    start_sequence ();
166738fd1498Szrj 	    x = replace_equiv_address (x, addr, true);
166838fd1498Szrj 	    /* It may happen that the address with the virtual reg
166938fd1498Szrj 	       was valid (e.g. based on the virtual stack reg, which might
167038fd1498Szrj 	       be acceptable to the predicates with all offsets), whereas
167138fd1498Szrj 	       the address now isn't anymore, for instance when the address
167238fd1498Szrj 	       is still offsetted, but the base reg isn't virtual-stack-reg
167338fd1498Szrj 	       anymore.  Below we would do a force_reg on the whole operand,
167438fd1498Szrj 	       but this insn might actually only accept memory.  Hence,
167538fd1498Szrj 	       before doing that last resort, try to reload the address into
167638fd1498Szrj 	       a register, so this operand stays a MEM.  */
167738fd1498Szrj 	    if (!safe_insn_predicate (insn_code, i, x))
167838fd1498Szrj 	      {
167938fd1498Szrj 		addr = force_reg (GET_MODE (addr), addr);
168038fd1498Szrj 		x = replace_equiv_address (x, addr, true);
168138fd1498Szrj 	      }
168238fd1498Szrj 	    seq = get_insns ();
168338fd1498Szrj 	    end_sequence ();
168438fd1498Szrj 	    if (seq)
168538fd1498Szrj 	      emit_insn_before (seq, insn);
168638fd1498Szrj 	  }
168738fd1498Szrj 	  break;
168838fd1498Szrj 
168938fd1498Szrj 	case REG:
169038fd1498Szrj 	  new_rtx = instantiate_new_reg (x, &offset);
169138fd1498Szrj 	  if (new_rtx == NULL)
169238fd1498Szrj 	    continue;
169338fd1498Szrj 	  if (known_eq (offset, 0))
169438fd1498Szrj 	    x = new_rtx;
169538fd1498Szrj 	  else
169638fd1498Szrj 	    {
169738fd1498Szrj 	      start_sequence ();
169838fd1498Szrj 
169938fd1498Szrj 	      /* Careful, special mode predicates may have stuff in
170038fd1498Szrj 		 insn_data[insn_code].operand[i].mode that isn't useful
170138fd1498Szrj 		 to us for computing a new value.  */
170238fd1498Szrj 	      /* ??? Recognize address_operand and/or "p" constraints
170338fd1498Szrj 		 to see if (plus new offset) is a valid before we put
170438fd1498Szrj 		 this through expand_simple_binop.  */
170538fd1498Szrj 	      x = expand_simple_binop (GET_MODE (x), PLUS, new_rtx,
170638fd1498Szrj 				       gen_int_mode (offset, GET_MODE (x)),
170738fd1498Szrj 				       NULL_RTX, 1, OPTAB_LIB_WIDEN);
170838fd1498Szrj 	      seq = get_insns ();
170938fd1498Szrj 	      end_sequence ();
171038fd1498Szrj 	      emit_insn_before (seq, insn);
171138fd1498Szrj 	    }
171238fd1498Szrj 	  break;
171338fd1498Szrj 
171438fd1498Szrj 	case SUBREG:
171538fd1498Szrj 	  new_rtx = instantiate_new_reg (SUBREG_REG (x), &offset);
171638fd1498Szrj 	  if (new_rtx == NULL)
171738fd1498Szrj 	    continue;
171838fd1498Szrj 	  if (maybe_ne (offset, 0))
171938fd1498Szrj 	    {
172038fd1498Szrj 	      start_sequence ();
172138fd1498Szrj 	      new_rtx = expand_simple_binop
172238fd1498Szrj 		(GET_MODE (new_rtx), PLUS, new_rtx,
172338fd1498Szrj 		 gen_int_mode (offset, GET_MODE (new_rtx)),
172438fd1498Szrj 		 NULL_RTX, 1, OPTAB_LIB_WIDEN);
172538fd1498Szrj 	      seq = get_insns ();
172638fd1498Szrj 	      end_sequence ();
172738fd1498Szrj 	      emit_insn_before (seq, insn);
172838fd1498Szrj 	    }
172938fd1498Szrj 	  x = simplify_gen_subreg (recog_data.operand_mode[i], new_rtx,
173038fd1498Szrj 				   GET_MODE (new_rtx), SUBREG_BYTE (x));
173138fd1498Szrj 	  gcc_assert (x);
173238fd1498Szrj 	  break;
173338fd1498Szrj 
173438fd1498Szrj 	default:
173538fd1498Szrj 	  continue;
173638fd1498Szrj 	}
173738fd1498Szrj 
173838fd1498Szrj       /* At this point, X contains the new value for the operand.
173938fd1498Szrj 	 Validate the new value vs the insn predicate.  Note that
174038fd1498Szrj 	 asm insns will have insn_code -1 here.  */
174138fd1498Szrj       if (!safe_insn_predicate (insn_code, i, x))
174238fd1498Szrj 	{
174338fd1498Szrj 	  start_sequence ();
174438fd1498Szrj 	  if (REG_P (x))
174538fd1498Szrj 	    {
174638fd1498Szrj 	      gcc_assert (REGNO (x) <= LAST_VIRTUAL_REGISTER);
174738fd1498Szrj 	      x = copy_to_reg (x);
174838fd1498Szrj 	    }
174938fd1498Szrj 	  else
175038fd1498Szrj 	    x = force_reg (insn_data[insn_code].operand[i].mode, x);
175138fd1498Szrj 	  seq = get_insns ();
175238fd1498Szrj 	  end_sequence ();
175338fd1498Szrj 	  if (seq)
175438fd1498Szrj 	    emit_insn_before (seq, insn);
175538fd1498Szrj 	}
175638fd1498Szrj 
175738fd1498Szrj       *recog_data.operand_loc[i] = recog_data.operand[i] = x;
175838fd1498Szrj       any_change = true;
175938fd1498Szrj     }
176038fd1498Szrj 
176138fd1498Szrj   if (any_change)
176238fd1498Szrj     {
176338fd1498Szrj       /* Propagate operand changes into the duplicates.  */
176438fd1498Szrj       for (i = 0; i < recog_data.n_dups; ++i)
176538fd1498Szrj 	*recog_data.dup_loc[i]
176638fd1498Szrj 	  = copy_rtx (recog_data.operand[(unsigned)recog_data.dup_num[i]]);
176738fd1498Szrj 
176838fd1498Szrj       /* Force re-recognition of the instruction for validation.  */
176938fd1498Szrj       INSN_CODE (insn) = -1;
177038fd1498Szrj     }
177138fd1498Szrj 
177238fd1498Szrj   if (asm_noperands (PATTERN (insn)) >= 0)
177338fd1498Szrj     {
177438fd1498Szrj       if (!check_asm_operands (PATTERN (insn)))
177538fd1498Szrj 	{
177638fd1498Szrj 	  error_for_asm (insn, "impossible constraint in %<asm%>");
177738fd1498Szrj 	  /* For asm goto, instead of fixing up all the edges
177838fd1498Szrj 	     just clear the template and clear input operands
177938fd1498Szrj 	     (asm goto doesn't have any output operands).  */
178038fd1498Szrj 	  if (JUMP_P (insn))
178138fd1498Szrj 	    {
178238fd1498Szrj 	      rtx asm_op = extract_asm_operands (PATTERN (insn));
178338fd1498Szrj 	      ASM_OPERANDS_TEMPLATE (asm_op) = ggc_strdup ("");
178438fd1498Szrj 	      ASM_OPERANDS_INPUT_VEC (asm_op) = rtvec_alloc (0);
178538fd1498Szrj 	      ASM_OPERANDS_INPUT_CONSTRAINT_VEC (asm_op) = rtvec_alloc (0);
178638fd1498Szrj 	    }
178738fd1498Szrj 	  else
178838fd1498Szrj 	    delete_insn (insn);
178938fd1498Szrj 	}
179038fd1498Szrj     }
179138fd1498Szrj   else
179238fd1498Szrj     {
179338fd1498Szrj       if (recog_memoized (insn) < 0)
179438fd1498Szrj 	fatal_insn_not_found (insn);
179538fd1498Szrj     }
179638fd1498Szrj }
179738fd1498Szrj 
179838fd1498Szrj /* Subroutine of instantiate_decls.  Given RTL representing a decl,
179938fd1498Szrj    do any instantiation required.  */
180038fd1498Szrj 
180138fd1498Szrj void
instantiate_decl_rtl(rtx x)180238fd1498Szrj instantiate_decl_rtl (rtx x)
180338fd1498Szrj {
180438fd1498Szrj   rtx addr;
180538fd1498Szrj 
180638fd1498Szrj   if (x == 0)
180738fd1498Szrj     return;
180838fd1498Szrj 
180938fd1498Szrj   /* If this is a CONCAT, recurse for the pieces.  */
181038fd1498Szrj   if (GET_CODE (x) == CONCAT)
181138fd1498Szrj     {
181238fd1498Szrj       instantiate_decl_rtl (XEXP (x, 0));
181338fd1498Szrj       instantiate_decl_rtl (XEXP (x, 1));
181438fd1498Szrj       return;
181538fd1498Szrj     }
181638fd1498Szrj 
181738fd1498Szrj   /* If this is not a MEM, no need to do anything.  Similarly if the
181838fd1498Szrj      address is a constant or a register that is not a virtual register.  */
181938fd1498Szrj   if (!MEM_P (x))
182038fd1498Szrj     return;
182138fd1498Szrj 
182238fd1498Szrj   addr = XEXP (x, 0);
182338fd1498Szrj   if (CONSTANT_P (addr)
182438fd1498Szrj       || (REG_P (addr)
182538fd1498Szrj 	  && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
182638fd1498Szrj 	      || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
182738fd1498Szrj     return;
182838fd1498Szrj 
182938fd1498Szrj   instantiate_virtual_regs_in_rtx (&XEXP (x, 0));
183038fd1498Szrj }
183138fd1498Szrj 
183238fd1498Szrj /* Helper for instantiate_decls called via walk_tree: Process all decls
183338fd1498Szrj    in the given DECL_VALUE_EXPR.  */
183438fd1498Szrj 
183538fd1498Szrj static tree
instantiate_expr(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)183638fd1498Szrj instantiate_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
183738fd1498Szrj {
183838fd1498Szrj   tree t = *tp;
183938fd1498Szrj   if (! EXPR_P (t))
184038fd1498Szrj     {
184138fd1498Szrj       *walk_subtrees = 0;
184238fd1498Szrj       if (DECL_P (t))
184338fd1498Szrj 	{
184438fd1498Szrj 	  if (DECL_RTL_SET_P (t))
184538fd1498Szrj 	    instantiate_decl_rtl (DECL_RTL (t));
184638fd1498Szrj 	  if (TREE_CODE (t) == PARM_DECL && DECL_NAMELESS (t)
184738fd1498Szrj 	      && DECL_INCOMING_RTL (t))
184838fd1498Szrj 	    instantiate_decl_rtl (DECL_INCOMING_RTL (t));
184938fd1498Szrj 	  if ((VAR_P (t) || TREE_CODE (t) == RESULT_DECL)
185038fd1498Szrj 	      && DECL_HAS_VALUE_EXPR_P (t))
185138fd1498Szrj 	    {
185238fd1498Szrj 	      tree v = DECL_VALUE_EXPR (t);
185338fd1498Szrj 	      walk_tree (&v, instantiate_expr, NULL, NULL);
185438fd1498Szrj 	    }
185538fd1498Szrj 	}
185638fd1498Szrj     }
185738fd1498Szrj   return NULL;
185838fd1498Szrj }
185938fd1498Szrj 
186038fd1498Szrj /* Subroutine of instantiate_decls: Process all decls in the given
186138fd1498Szrj    BLOCK node and all its subblocks.  */
186238fd1498Szrj 
186338fd1498Szrj static void
instantiate_decls_1(tree let)186438fd1498Szrj instantiate_decls_1 (tree let)
186538fd1498Szrj {
186638fd1498Szrj   tree t;
186738fd1498Szrj 
186838fd1498Szrj   for (t = BLOCK_VARS (let); t; t = DECL_CHAIN (t))
186938fd1498Szrj     {
187038fd1498Szrj       if (DECL_RTL_SET_P (t))
187138fd1498Szrj 	instantiate_decl_rtl (DECL_RTL (t));
187238fd1498Szrj       if (VAR_P (t) && DECL_HAS_VALUE_EXPR_P (t))
187338fd1498Szrj 	{
187438fd1498Szrj 	  tree v = DECL_VALUE_EXPR (t);
187538fd1498Szrj 	  walk_tree (&v, instantiate_expr, NULL, NULL);
187638fd1498Szrj 	}
187738fd1498Szrj     }
187838fd1498Szrj 
187938fd1498Szrj   /* Process all subblocks.  */
188038fd1498Szrj   for (t = BLOCK_SUBBLOCKS (let); t; t = BLOCK_CHAIN (t))
188138fd1498Szrj     instantiate_decls_1 (t);
188238fd1498Szrj }
188338fd1498Szrj 
188438fd1498Szrj /* Scan all decls in FNDECL (both variables and parameters) and instantiate
188538fd1498Szrj    all virtual registers in their DECL_RTL's.  */
188638fd1498Szrj 
188738fd1498Szrj static void
instantiate_decls(tree fndecl)188838fd1498Szrj instantiate_decls (tree fndecl)
188938fd1498Szrj {
189038fd1498Szrj   tree decl;
189138fd1498Szrj   unsigned ix;
189238fd1498Szrj 
189338fd1498Szrj   /* Process all parameters of the function.  */
189438fd1498Szrj   for (decl = DECL_ARGUMENTS (fndecl); decl; decl = DECL_CHAIN (decl))
189538fd1498Szrj     {
189638fd1498Szrj       instantiate_decl_rtl (DECL_RTL (decl));
189738fd1498Szrj       instantiate_decl_rtl (DECL_INCOMING_RTL (decl));
189838fd1498Szrj       if (DECL_HAS_VALUE_EXPR_P (decl))
189938fd1498Szrj 	{
190038fd1498Szrj 	  tree v = DECL_VALUE_EXPR (decl);
190138fd1498Szrj 	  walk_tree (&v, instantiate_expr, NULL, NULL);
190238fd1498Szrj 	}
190338fd1498Szrj     }
190438fd1498Szrj 
190538fd1498Szrj   if ((decl = DECL_RESULT (fndecl))
190638fd1498Szrj       && TREE_CODE (decl) == RESULT_DECL)
190738fd1498Szrj     {
190838fd1498Szrj       if (DECL_RTL_SET_P (decl))
190938fd1498Szrj 	instantiate_decl_rtl (DECL_RTL (decl));
191038fd1498Szrj       if (DECL_HAS_VALUE_EXPR_P (decl))
191138fd1498Szrj 	{
191238fd1498Szrj 	  tree v = DECL_VALUE_EXPR (decl);
191338fd1498Szrj 	  walk_tree (&v, instantiate_expr, NULL, NULL);
191438fd1498Szrj 	}
191538fd1498Szrj     }
191638fd1498Szrj 
191738fd1498Szrj   /* Process the saved static chain if it exists.  */
191838fd1498Szrj   decl = DECL_STRUCT_FUNCTION (fndecl)->static_chain_decl;
191938fd1498Szrj   if (decl && DECL_HAS_VALUE_EXPR_P (decl))
192038fd1498Szrj     instantiate_decl_rtl (DECL_RTL (DECL_VALUE_EXPR (decl)));
192138fd1498Szrj 
192238fd1498Szrj   /* Now process all variables defined in the function or its subblocks.  */
192338fd1498Szrj   if (DECL_INITIAL (fndecl))
192438fd1498Szrj     instantiate_decls_1 (DECL_INITIAL (fndecl));
192538fd1498Szrj 
192638fd1498Szrj   FOR_EACH_LOCAL_DECL (cfun, ix, decl)
192738fd1498Szrj     if (DECL_RTL_SET_P (decl))
192838fd1498Szrj       instantiate_decl_rtl (DECL_RTL (decl));
192938fd1498Szrj   vec_free (cfun->local_decls);
193038fd1498Szrj }
193138fd1498Szrj 
193238fd1498Szrj /* Pass through the INSNS of function FNDECL and convert virtual register
193338fd1498Szrj    references to hard register references.  */
193438fd1498Szrj 
193538fd1498Szrj static unsigned int
instantiate_virtual_regs(void)193638fd1498Szrj instantiate_virtual_regs (void)
193738fd1498Szrj {
193838fd1498Szrj   rtx_insn *insn;
193938fd1498Szrj 
194038fd1498Szrj   /* Compute the offsets to use for this function.  */
194138fd1498Szrj   in_arg_offset = FIRST_PARM_OFFSET (current_function_decl);
194238fd1498Szrj   var_offset = targetm.starting_frame_offset ();
194338fd1498Szrj   dynamic_offset = STACK_DYNAMIC_OFFSET (current_function_decl);
194438fd1498Szrj   out_arg_offset = STACK_POINTER_OFFSET;
194538fd1498Szrj #ifdef FRAME_POINTER_CFA_OFFSET
194638fd1498Szrj   cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl);
194738fd1498Szrj #else
194838fd1498Szrj   cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl);
194938fd1498Szrj #endif
195038fd1498Szrj 
195138fd1498Szrj   /* Initialize recognition, indicating that volatile is OK.  */
195238fd1498Szrj   init_recog ();
195338fd1498Szrj 
195438fd1498Szrj   /* Scan through all the insns, instantiating every virtual register still
195538fd1498Szrj      present.  */
195638fd1498Szrj   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
195738fd1498Szrj     if (INSN_P (insn))
195838fd1498Szrj       {
195938fd1498Szrj 	/* These patterns in the instruction stream can never be recognized.
196038fd1498Szrj 	   Fortunately, they shouldn't contain virtual registers either.  */
196138fd1498Szrj         if (GET_CODE (PATTERN (insn)) == USE
196238fd1498Szrj 	    || GET_CODE (PATTERN (insn)) == CLOBBER
196338fd1498Szrj 	    || GET_CODE (PATTERN (insn)) == ASM_INPUT
196438fd1498Szrj 	    || DEBUG_MARKER_INSN_P (insn))
196538fd1498Szrj 	  continue;
196638fd1498Szrj 	else if (DEBUG_BIND_INSN_P (insn))
196738fd1498Szrj 	  instantiate_virtual_regs_in_rtx (INSN_VAR_LOCATION_PTR (insn));
196838fd1498Szrj 	else
196938fd1498Szrj 	  instantiate_virtual_regs_in_insn (insn);
197038fd1498Szrj 
197138fd1498Szrj 	if (insn->deleted ())
197238fd1498Szrj 	  continue;
197338fd1498Szrj 
197438fd1498Szrj 	instantiate_virtual_regs_in_rtx (&REG_NOTES (insn));
197538fd1498Szrj 
197638fd1498Szrj 	/* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE.  */
197738fd1498Szrj 	if (CALL_P (insn))
197838fd1498Szrj 	  instantiate_virtual_regs_in_rtx (&CALL_INSN_FUNCTION_USAGE (insn));
197938fd1498Szrj       }
198038fd1498Szrj 
198138fd1498Szrj   /* Instantiate the virtual registers in the DECLs for debugging purposes.  */
198238fd1498Szrj   instantiate_decls (current_function_decl);
198338fd1498Szrj 
198438fd1498Szrj   targetm.instantiate_decls ();
198538fd1498Szrj 
198638fd1498Szrj   /* Indicate that, from now on, assign_stack_local should use
198738fd1498Szrj      frame_pointer_rtx.  */
198838fd1498Szrj   virtuals_instantiated = 1;
198938fd1498Szrj 
199038fd1498Szrj   return 0;
199138fd1498Szrj }
199238fd1498Szrj 
199338fd1498Szrj namespace {
199438fd1498Szrj 
199538fd1498Szrj const pass_data pass_data_instantiate_virtual_regs =
199638fd1498Szrj {
199738fd1498Szrj   RTL_PASS, /* type */
199838fd1498Szrj   "vregs", /* name */
199938fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
200038fd1498Szrj   TV_NONE, /* tv_id */
200138fd1498Szrj   0, /* properties_required */
200238fd1498Szrj   0, /* properties_provided */
200338fd1498Szrj   0, /* properties_destroyed */
200438fd1498Szrj   0, /* todo_flags_start */
200538fd1498Szrj   0, /* todo_flags_finish */
200638fd1498Szrj };
200738fd1498Szrj 
200838fd1498Szrj class pass_instantiate_virtual_regs : public rtl_opt_pass
200938fd1498Szrj {
201038fd1498Szrj public:
pass_instantiate_virtual_regs(gcc::context * ctxt)201138fd1498Szrj   pass_instantiate_virtual_regs (gcc::context *ctxt)
201238fd1498Szrj     : rtl_opt_pass (pass_data_instantiate_virtual_regs, ctxt)
201338fd1498Szrj   {}
201438fd1498Szrj 
201538fd1498Szrj   /* opt_pass methods: */
execute(function *)201638fd1498Szrj   virtual unsigned int execute (function *)
201738fd1498Szrj     {
201838fd1498Szrj       return instantiate_virtual_regs ();
201938fd1498Szrj     }
202038fd1498Szrj 
202138fd1498Szrj }; // class pass_instantiate_virtual_regs
202238fd1498Szrj 
202338fd1498Szrj } // anon namespace
202438fd1498Szrj 
202538fd1498Szrj rtl_opt_pass *
make_pass_instantiate_virtual_regs(gcc::context * ctxt)202638fd1498Szrj make_pass_instantiate_virtual_regs (gcc::context *ctxt)
202738fd1498Szrj {
202838fd1498Szrj   return new pass_instantiate_virtual_regs (ctxt);
202938fd1498Szrj }
203038fd1498Szrj 
203138fd1498Szrj 
203238fd1498Szrj /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
203338fd1498Szrj    This means a type for which function calls must pass an address to the
203438fd1498Szrj    function or get an address back from the function.
203538fd1498Szrj    EXP may be a type node or an expression (whose type is tested).  */
203638fd1498Szrj 
203738fd1498Szrj int
aggregate_value_p(const_tree exp,const_tree fntype)203838fd1498Szrj aggregate_value_p (const_tree exp, const_tree fntype)
203938fd1498Szrj {
204038fd1498Szrj   const_tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
204138fd1498Szrj   int i, regno, nregs;
204238fd1498Szrj   rtx reg;
204338fd1498Szrj 
204438fd1498Szrj   if (fntype)
204538fd1498Szrj     switch (TREE_CODE (fntype))
204638fd1498Szrj       {
204738fd1498Szrj       case CALL_EXPR:
204838fd1498Szrj 	{
204938fd1498Szrj 	  tree fndecl = get_callee_fndecl (fntype);
205038fd1498Szrj 	  if (fndecl)
205138fd1498Szrj 	    fntype = TREE_TYPE (fndecl);
205238fd1498Szrj 	  else if (CALL_EXPR_FN (fntype))
205338fd1498Szrj 	    fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (fntype)));
205438fd1498Szrj 	  else
205538fd1498Szrj 	    /* For internal functions, assume nothing needs to be
205638fd1498Szrj 	       returned in memory.  */
205738fd1498Szrj 	    return 0;
205838fd1498Szrj 	}
205938fd1498Szrj 	break;
206038fd1498Szrj       case FUNCTION_DECL:
206138fd1498Szrj 	fntype = TREE_TYPE (fntype);
206238fd1498Szrj 	break;
206338fd1498Szrj       case FUNCTION_TYPE:
206438fd1498Szrj       case METHOD_TYPE:
206538fd1498Szrj         break;
206638fd1498Szrj       case IDENTIFIER_NODE:
206738fd1498Szrj 	fntype = NULL_TREE;
206838fd1498Szrj 	break;
206938fd1498Szrj       default:
207038fd1498Szrj 	/* We don't expect other tree types here.  */
207138fd1498Szrj 	gcc_unreachable ();
207238fd1498Szrj       }
207338fd1498Szrj 
207438fd1498Szrj   if (VOID_TYPE_P (type))
207538fd1498Szrj     return 0;
207638fd1498Szrj 
207738fd1498Szrj   /* If a record should be passed the same as its first (and only) member
207838fd1498Szrj      don't pass it as an aggregate.  */
207938fd1498Szrj   if (TREE_CODE (type) == RECORD_TYPE && TYPE_TRANSPARENT_AGGR (type))
208038fd1498Szrj     return aggregate_value_p (first_field (type), fntype);
208138fd1498Szrj 
208238fd1498Szrj   /* If the front end has decided that this needs to be passed by
208338fd1498Szrj      reference, do so.  */
208438fd1498Szrj   if ((TREE_CODE (exp) == PARM_DECL || TREE_CODE (exp) == RESULT_DECL)
208538fd1498Szrj       && DECL_BY_REFERENCE (exp))
208638fd1498Szrj     return 1;
208738fd1498Szrj 
208838fd1498Szrj   /* Function types that are TREE_ADDRESSABLE force return in memory.  */
208938fd1498Szrj   if (fntype && TREE_ADDRESSABLE (fntype))
209038fd1498Szrj     return 1;
209138fd1498Szrj 
209238fd1498Szrj   /* Types that are TREE_ADDRESSABLE must be constructed in memory,
209338fd1498Szrj      and thus can't be returned in registers.  */
209438fd1498Szrj   if (TREE_ADDRESSABLE (type))
209538fd1498Szrj     return 1;
209638fd1498Szrj 
209738fd1498Szrj   if (TYPE_EMPTY_P (type))
209838fd1498Szrj     return 0;
209938fd1498Szrj 
210038fd1498Szrj   if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
210138fd1498Szrj     return 1;
210238fd1498Szrj 
210338fd1498Szrj   if (targetm.calls.return_in_memory (type, fntype))
210438fd1498Szrj     return 1;
210538fd1498Szrj 
210638fd1498Szrj   /* Make sure we have suitable call-clobbered regs to return
210738fd1498Szrj      the value in; if not, we must return it in memory.  */
210838fd1498Szrj   reg = hard_function_value (type, 0, fntype, 0);
210938fd1498Szrj 
211038fd1498Szrj   /* If we have something other than a REG (e.g. a PARALLEL), then assume
211138fd1498Szrj      it is OK.  */
211238fd1498Szrj   if (!REG_P (reg))
211338fd1498Szrj     return 0;
211438fd1498Szrj 
211538fd1498Szrj   regno = REGNO (reg);
211638fd1498Szrj   nregs = hard_regno_nregs (regno, TYPE_MODE (type));
211738fd1498Szrj   for (i = 0; i < nregs; i++)
211838fd1498Szrj     if (! call_used_regs[regno + i])
211938fd1498Szrj       return 1;
212038fd1498Szrj 
212138fd1498Szrj   return 0;
212238fd1498Szrj }
212338fd1498Szrj 
212438fd1498Szrj /* Return true if we should assign DECL a pseudo register; false if it
212538fd1498Szrj    should live on the local stack.  */
212638fd1498Szrj 
212738fd1498Szrj bool
use_register_for_decl(const_tree decl)212838fd1498Szrj use_register_for_decl (const_tree decl)
212938fd1498Szrj {
213038fd1498Szrj   if (TREE_CODE (decl) == SSA_NAME)
213138fd1498Szrj     {
213238fd1498Szrj       /* We often try to use the SSA_NAME, instead of its underlying
213338fd1498Szrj 	 decl, to get type information and guide decisions, to avoid
213438fd1498Szrj 	 differences of behavior between anonymous and named
213538fd1498Szrj 	 variables, but in this one case we have to go for the actual
213638fd1498Szrj 	 variable if there is one.  The main reason is that, at least
213738fd1498Szrj 	 at -O0, we want to place user variables on the stack, but we
213838fd1498Szrj 	 don't mind using pseudos for anonymous or ignored temps.
213938fd1498Szrj 	 Should we take the SSA_NAME, we'd conclude all SSA_NAMEs
214038fd1498Szrj 	 should go in pseudos, whereas their corresponding variables
214138fd1498Szrj 	 might have to go on the stack.  So, disregarding the decl
214238fd1498Szrj 	 here would negatively impact debug info at -O0, enable
214338fd1498Szrj 	 coalescing between SSA_NAMEs that ought to get different
214438fd1498Szrj 	 stack/pseudo assignments, and get the incoming argument
214538fd1498Szrj 	 processing thoroughly confused by PARM_DECLs expected to live
214638fd1498Szrj 	 in stack slots but assigned to pseudos.  */
214738fd1498Szrj       if (!SSA_NAME_VAR (decl))
214838fd1498Szrj 	return TYPE_MODE (TREE_TYPE (decl)) != BLKmode
214938fd1498Szrj 	  && !(flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)));
215038fd1498Szrj 
215138fd1498Szrj       decl = SSA_NAME_VAR (decl);
215238fd1498Szrj     }
215338fd1498Szrj 
215438fd1498Szrj   /* Honor volatile.  */
215538fd1498Szrj   if (TREE_SIDE_EFFECTS (decl))
215638fd1498Szrj     return false;
215738fd1498Szrj 
215838fd1498Szrj   /* Honor addressability.  */
215938fd1498Szrj   if (TREE_ADDRESSABLE (decl))
216038fd1498Szrj     return false;
216138fd1498Szrj 
216238fd1498Szrj   /* RESULT_DECLs are a bit special in that they're assigned without
216338fd1498Szrj      regard to use_register_for_decl, but we generally only store in
216438fd1498Szrj      them.  If we coalesce their SSA NAMEs, we'd better return a
216538fd1498Szrj      result that matches the assignment in expand_function_start.  */
216638fd1498Szrj   if (TREE_CODE (decl) == RESULT_DECL)
216738fd1498Szrj     {
216838fd1498Szrj       /* If it's not an aggregate, we're going to use a REG or a
216938fd1498Szrj 	 PARALLEL containing a REG.  */
217038fd1498Szrj       if (!aggregate_value_p (decl, current_function_decl))
217138fd1498Szrj 	return true;
217238fd1498Szrj 
217338fd1498Szrj       /* If expand_function_start determines the return value, we'll
217438fd1498Szrj 	 use MEM if it's not by reference.  */
217538fd1498Szrj       if (cfun->returns_pcc_struct
217638fd1498Szrj 	  || (targetm.calls.struct_value_rtx
217738fd1498Szrj 	      (TREE_TYPE (current_function_decl), 1)))
217838fd1498Szrj 	return DECL_BY_REFERENCE (decl);
217938fd1498Szrj 
218038fd1498Szrj       /* Otherwise, we're taking an extra all.function_result_decl
218138fd1498Szrj 	 argument.  It's set up in assign_parms_augmented_arg_list,
218238fd1498Szrj 	 under the (negated) conditions above, and then it's used to
218338fd1498Szrj 	 set up the RESULT_DECL rtl in assign_params, after looping
218438fd1498Szrj 	 over all parameters.  Now, if the RESULT_DECL is not by
218538fd1498Szrj 	 reference, we'll use a MEM either way.  */
218638fd1498Szrj       if (!DECL_BY_REFERENCE (decl))
218738fd1498Szrj 	return false;
218838fd1498Szrj 
218938fd1498Szrj       /* Otherwise, if RESULT_DECL is DECL_BY_REFERENCE, it will take
219038fd1498Szrj 	 the function_result_decl's assignment.  Since it's a pointer,
219138fd1498Szrj 	 we can short-circuit a number of the tests below, and we must
219238fd1498Szrj 	 duplicat e them because we don't have the
219338fd1498Szrj 	 function_result_decl to test.  */
219438fd1498Szrj       if (!targetm.calls.allocate_stack_slots_for_args ())
219538fd1498Szrj 	return true;
219638fd1498Szrj       /* We don't set DECL_IGNORED_P for the function_result_decl.  */
219738fd1498Szrj       if (optimize)
219838fd1498Szrj 	return true;
219938fd1498Szrj       /* We don't set DECL_REGISTER for the function_result_decl.  */
220038fd1498Szrj       return false;
220138fd1498Szrj     }
220238fd1498Szrj 
220338fd1498Szrj   /* Decl is implicitly addressible by bound stores and loads
220438fd1498Szrj      if it is an aggregate holding bounds.  */
220538fd1498Szrj   if (chkp_function_instrumented_p (current_function_decl)
220638fd1498Szrj       && TREE_TYPE (decl)
220738fd1498Szrj       && !BOUNDED_P (decl)
220838fd1498Szrj       && chkp_type_has_pointer (TREE_TYPE (decl)))
220938fd1498Szrj     return false;
221038fd1498Szrj 
221138fd1498Szrj   /* Only register-like things go in registers.  */
221238fd1498Szrj   if (DECL_MODE (decl) == BLKmode)
221338fd1498Szrj     return false;
221438fd1498Szrj 
221538fd1498Szrj   /* If -ffloat-store specified, don't put explicit float variables
221638fd1498Szrj      into registers.  */
221738fd1498Szrj   /* ??? This should be checked after DECL_ARTIFICIAL, but tree-ssa
221838fd1498Szrj      propagates values across these stores, and it probably shouldn't.  */
221938fd1498Szrj   if (flag_float_store && FLOAT_TYPE_P (TREE_TYPE (decl)))
222038fd1498Szrj     return false;
222138fd1498Szrj 
222238fd1498Szrj   if (!targetm.calls.allocate_stack_slots_for_args ())
222338fd1498Szrj     return true;
222438fd1498Szrj 
222538fd1498Szrj   /* If we're not interested in tracking debugging information for
222638fd1498Szrj      this decl, then we can certainly put it in a register.  */
222738fd1498Szrj   if (DECL_IGNORED_P (decl))
222838fd1498Szrj     return true;
222938fd1498Szrj 
223038fd1498Szrj   if (optimize)
223138fd1498Szrj     return true;
223238fd1498Szrj 
223338fd1498Szrj   if (!DECL_REGISTER (decl))
223438fd1498Szrj     return false;
223538fd1498Szrj 
223638fd1498Szrj   /* When not optimizing, disregard register keyword for types that
223738fd1498Szrj      could have methods, otherwise the methods won't be callable from
223838fd1498Szrj      the debugger.  */
223938fd1498Szrj   if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (decl)))
224038fd1498Szrj     return false;
224138fd1498Szrj 
224238fd1498Szrj   return true;
224338fd1498Szrj }
224438fd1498Szrj 
224538fd1498Szrj /* Structures to communicate between the subroutines of assign_parms.
224638fd1498Szrj    The first holds data persistent across all parameters, the second
224738fd1498Szrj    is cleared out for each parameter.  */
224838fd1498Szrj 
224938fd1498Szrj struct assign_parm_data_all
225038fd1498Szrj {
225138fd1498Szrj   /* When INIT_CUMULATIVE_ARGS gets revamped, allocating CUMULATIVE_ARGS
225238fd1498Szrj      should become a job of the target or otherwise encapsulated.  */
225338fd1498Szrj   CUMULATIVE_ARGS args_so_far_v;
225438fd1498Szrj   cumulative_args_t args_so_far;
225538fd1498Szrj   struct args_size stack_args_size;
225638fd1498Szrj   tree function_result_decl;
225738fd1498Szrj   tree orig_fnargs;
225838fd1498Szrj   rtx_insn *first_conversion_insn;
225938fd1498Szrj   rtx_insn *last_conversion_insn;
226038fd1498Szrj   HOST_WIDE_INT pretend_args_size;
226138fd1498Szrj   HOST_WIDE_INT extra_pretend_bytes;
226238fd1498Szrj   int reg_parm_stack_space;
226338fd1498Szrj };
226438fd1498Szrj 
226538fd1498Szrj struct assign_parm_data_one
226638fd1498Szrj {
226738fd1498Szrj   tree nominal_type;
226838fd1498Szrj   tree passed_type;
226938fd1498Szrj   rtx entry_parm;
227038fd1498Szrj   rtx stack_parm;
227138fd1498Szrj   machine_mode nominal_mode;
227238fd1498Szrj   machine_mode passed_mode;
227338fd1498Szrj   machine_mode promoted_mode;
227438fd1498Szrj   struct locate_and_pad_arg_data locate;
227538fd1498Szrj   int partial;
227638fd1498Szrj   BOOL_BITFIELD named_arg : 1;
227738fd1498Szrj   BOOL_BITFIELD passed_pointer : 1;
227838fd1498Szrj   BOOL_BITFIELD on_stack : 1;
227938fd1498Szrj   BOOL_BITFIELD loaded_in_reg : 1;
228038fd1498Szrj };
228138fd1498Szrj 
228238fd1498Szrj struct bounds_parm_data
228338fd1498Szrj {
228438fd1498Szrj   assign_parm_data_one parm_data;
228538fd1498Szrj   tree bounds_parm;
228638fd1498Szrj   tree ptr_parm;
228738fd1498Szrj   rtx ptr_entry;
228838fd1498Szrj   int bound_no;
228938fd1498Szrj };
229038fd1498Szrj 
229138fd1498Szrj /* A subroutine of assign_parms.  Initialize ALL.  */
229238fd1498Szrj 
229338fd1498Szrj static void
assign_parms_initialize_all(struct assign_parm_data_all * all)229438fd1498Szrj assign_parms_initialize_all (struct assign_parm_data_all *all)
229538fd1498Szrj {
229638fd1498Szrj   tree fntype ATTRIBUTE_UNUSED;
229738fd1498Szrj 
229838fd1498Szrj   memset (all, 0, sizeof (*all));
229938fd1498Szrj 
230038fd1498Szrj   fntype = TREE_TYPE (current_function_decl);
230138fd1498Szrj 
230238fd1498Szrj #ifdef INIT_CUMULATIVE_INCOMING_ARGS
230338fd1498Szrj   INIT_CUMULATIVE_INCOMING_ARGS (all->args_so_far_v, fntype, NULL_RTX);
230438fd1498Szrj #else
230538fd1498Szrj   INIT_CUMULATIVE_ARGS (all->args_so_far_v, fntype, NULL_RTX,
230638fd1498Szrj 			current_function_decl, -1);
230738fd1498Szrj #endif
230838fd1498Szrj   all->args_so_far = pack_cumulative_args (&all->args_so_far_v);
230938fd1498Szrj 
231038fd1498Szrj #ifdef INCOMING_REG_PARM_STACK_SPACE
231138fd1498Szrj   all->reg_parm_stack_space
231238fd1498Szrj     = INCOMING_REG_PARM_STACK_SPACE (current_function_decl);
231338fd1498Szrj #endif
231438fd1498Szrj }
231538fd1498Szrj 
231638fd1498Szrj /* If ARGS contains entries with complex types, split the entry into two
231738fd1498Szrj    entries of the component type.  Return a new list of substitutions are
231838fd1498Szrj    needed, else the old list.  */
231938fd1498Szrj 
232038fd1498Szrj static void
split_complex_args(vec<tree> * args)232138fd1498Szrj split_complex_args (vec<tree> *args)
232238fd1498Szrj {
232338fd1498Szrj   unsigned i;
232438fd1498Szrj   tree p;
232538fd1498Szrj 
232638fd1498Szrj   FOR_EACH_VEC_ELT (*args, i, p)
232738fd1498Szrj     {
232838fd1498Szrj       tree type = TREE_TYPE (p);
232938fd1498Szrj       if (TREE_CODE (type) == COMPLEX_TYPE
233038fd1498Szrj 	  && targetm.calls.split_complex_arg (type))
233138fd1498Szrj 	{
233238fd1498Szrj 	  tree decl;
233338fd1498Szrj 	  tree subtype = TREE_TYPE (type);
233438fd1498Szrj 	  bool addressable = TREE_ADDRESSABLE (p);
233538fd1498Szrj 
233638fd1498Szrj 	  /* Rewrite the PARM_DECL's type with its component.  */
233738fd1498Szrj 	  p = copy_node (p);
233838fd1498Szrj 	  TREE_TYPE (p) = subtype;
233938fd1498Szrj 	  DECL_ARG_TYPE (p) = TREE_TYPE (DECL_ARG_TYPE (p));
234038fd1498Szrj 	  SET_DECL_MODE (p, VOIDmode);
234138fd1498Szrj 	  DECL_SIZE (p) = NULL;
234238fd1498Szrj 	  DECL_SIZE_UNIT (p) = NULL;
234338fd1498Szrj 	  /* If this arg must go in memory, put it in a pseudo here.
234438fd1498Szrj 	     We can't allow it to go in memory as per normal parms,
234538fd1498Szrj 	     because the usual place might not have the imag part
234638fd1498Szrj 	     adjacent to the real part.  */
234738fd1498Szrj 	  DECL_ARTIFICIAL (p) = addressable;
234838fd1498Szrj 	  DECL_IGNORED_P (p) = addressable;
234938fd1498Szrj 	  TREE_ADDRESSABLE (p) = 0;
235038fd1498Szrj 	  layout_decl (p, 0);
235138fd1498Szrj 	  (*args)[i] = p;
235238fd1498Szrj 
235338fd1498Szrj 	  /* Build a second synthetic decl.  */
235438fd1498Szrj 	  decl = build_decl (EXPR_LOCATION (p),
235538fd1498Szrj 			     PARM_DECL, NULL_TREE, subtype);
235638fd1498Szrj 	  DECL_ARG_TYPE (decl) = DECL_ARG_TYPE (p);
235738fd1498Szrj 	  DECL_ARTIFICIAL (decl) = addressable;
235838fd1498Szrj 	  DECL_IGNORED_P (decl) = addressable;
235938fd1498Szrj 	  layout_decl (decl, 0);
236038fd1498Szrj 	  args->safe_insert (++i, decl);
236138fd1498Szrj 	}
236238fd1498Szrj     }
236338fd1498Szrj }
236438fd1498Szrj 
236538fd1498Szrj /* A subroutine of assign_parms.  Adjust the parameter list to incorporate
236638fd1498Szrj    the hidden struct return argument, and (abi willing) complex args.
236738fd1498Szrj    Return the new parameter list.  */
236838fd1498Szrj 
236938fd1498Szrj static vec<tree>
assign_parms_augmented_arg_list(struct assign_parm_data_all * all)237038fd1498Szrj assign_parms_augmented_arg_list (struct assign_parm_data_all *all)
237138fd1498Szrj {
237238fd1498Szrj   tree fndecl = current_function_decl;
237338fd1498Szrj   tree fntype = TREE_TYPE (fndecl);
237438fd1498Szrj   vec<tree> fnargs = vNULL;
237538fd1498Szrj   tree arg;
237638fd1498Szrj 
237738fd1498Szrj   for (arg = DECL_ARGUMENTS (fndecl); arg; arg = DECL_CHAIN (arg))
237838fd1498Szrj     fnargs.safe_push (arg);
237938fd1498Szrj 
238038fd1498Szrj   all->orig_fnargs = DECL_ARGUMENTS (fndecl);
238138fd1498Szrj 
238238fd1498Szrj   /* If struct value address is treated as the first argument, make it so.  */
238338fd1498Szrj   if (aggregate_value_p (DECL_RESULT (fndecl), fndecl)
238438fd1498Szrj       && ! cfun->returns_pcc_struct
238538fd1498Szrj       && targetm.calls.struct_value_rtx (TREE_TYPE (fndecl), 1) == 0)
238638fd1498Szrj     {
238738fd1498Szrj       tree type = build_pointer_type (TREE_TYPE (fntype));
238838fd1498Szrj       tree decl;
238938fd1498Szrj 
239038fd1498Szrj       decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
239138fd1498Szrj 			 PARM_DECL, get_identifier (".result_ptr"), type);
239238fd1498Szrj       DECL_ARG_TYPE (decl) = type;
239338fd1498Szrj       DECL_ARTIFICIAL (decl) = 1;
239438fd1498Szrj       DECL_NAMELESS (decl) = 1;
239538fd1498Szrj       TREE_CONSTANT (decl) = 1;
239638fd1498Szrj       /* We don't set DECL_IGNORED_P or DECL_REGISTER here.  If this
239738fd1498Szrj 	 changes, the end of the RESULT_DECL handling block in
239838fd1498Szrj 	 use_register_for_decl must be adjusted to match.  */
239938fd1498Szrj 
240038fd1498Szrj       DECL_CHAIN (decl) = all->orig_fnargs;
240138fd1498Szrj       all->orig_fnargs = decl;
240238fd1498Szrj       fnargs.safe_insert (0, decl);
240338fd1498Szrj 
240438fd1498Szrj       all->function_result_decl = decl;
240538fd1498Szrj 
240638fd1498Szrj       /* If function is instrumented then bounds of the
240738fd1498Szrj 	 passed structure address is the second argument.  */
240838fd1498Szrj       if (chkp_function_instrumented_p (fndecl))
240938fd1498Szrj 	{
241038fd1498Szrj 	  decl = build_decl (DECL_SOURCE_LOCATION (fndecl),
241138fd1498Szrj 			     PARM_DECL, get_identifier (".result_bnd"),
241238fd1498Szrj 			     pointer_bounds_type_node);
241338fd1498Szrj 	  DECL_ARG_TYPE (decl) = pointer_bounds_type_node;
241438fd1498Szrj 	  DECL_ARTIFICIAL (decl) = 1;
241538fd1498Szrj 	  DECL_NAMELESS (decl) = 1;
241638fd1498Szrj 	  TREE_CONSTANT (decl) = 1;
241738fd1498Szrj 
241838fd1498Szrj 	  DECL_CHAIN (decl) = DECL_CHAIN (all->orig_fnargs);
241938fd1498Szrj 	  DECL_CHAIN (all->orig_fnargs) = decl;
242038fd1498Szrj 	  fnargs.safe_insert (1, decl);
242138fd1498Szrj 	}
242238fd1498Szrj     }
242338fd1498Szrj 
242438fd1498Szrj   /* If the target wants to split complex arguments into scalars, do so.  */
242538fd1498Szrj   if (targetm.calls.split_complex_arg)
242638fd1498Szrj     split_complex_args (&fnargs);
242738fd1498Szrj 
242838fd1498Szrj   return fnargs;
242938fd1498Szrj }
243038fd1498Szrj 
243138fd1498Szrj /* A subroutine of assign_parms.  Examine PARM and pull out type and mode
243238fd1498Szrj    data for the parameter.  Incorporate ABI specifics such as pass-by-
243338fd1498Szrj    reference and type promotion.  */
243438fd1498Szrj 
243538fd1498Szrj static void
assign_parm_find_data_types(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)243638fd1498Szrj assign_parm_find_data_types (struct assign_parm_data_all *all, tree parm,
243738fd1498Szrj 			     struct assign_parm_data_one *data)
243838fd1498Szrj {
243938fd1498Szrj   tree nominal_type, passed_type;
244038fd1498Szrj   machine_mode nominal_mode, passed_mode, promoted_mode;
244138fd1498Szrj   int unsignedp;
244238fd1498Szrj 
244338fd1498Szrj   memset (data, 0, sizeof (*data));
244438fd1498Szrj 
244538fd1498Szrj   /* NAMED_ARG is a misnomer.  We really mean 'non-variadic'. */
244638fd1498Szrj   if (!cfun->stdarg)
244738fd1498Szrj     data->named_arg = 1;  /* No variadic parms.  */
244838fd1498Szrj   else if (DECL_CHAIN (parm))
244938fd1498Szrj     data->named_arg = 1;  /* Not the last non-variadic parm. */
245038fd1498Szrj   else if (targetm.calls.strict_argument_naming (all->args_so_far))
245138fd1498Szrj     data->named_arg = 1;  /* Only variadic ones are unnamed.  */
245238fd1498Szrj   else
245338fd1498Szrj     data->named_arg = 0;  /* Treat as variadic.  */
245438fd1498Szrj 
245538fd1498Szrj   nominal_type = TREE_TYPE (parm);
245638fd1498Szrj   passed_type = DECL_ARG_TYPE (parm);
245738fd1498Szrj 
245838fd1498Szrj   /* Look out for errors propagating this far.  Also, if the parameter's
245938fd1498Szrj      type is void then its value doesn't matter.  */
246038fd1498Szrj   if (TREE_TYPE (parm) == error_mark_node
246138fd1498Szrj       /* This can happen after weird syntax errors
246238fd1498Szrj 	 or if an enum type is defined among the parms.  */
246338fd1498Szrj       || TREE_CODE (parm) != PARM_DECL
246438fd1498Szrj       || passed_type == NULL
246538fd1498Szrj       || VOID_TYPE_P (nominal_type))
246638fd1498Szrj     {
246738fd1498Szrj       nominal_type = passed_type = void_type_node;
246838fd1498Szrj       nominal_mode = passed_mode = promoted_mode = VOIDmode;
246938fd1498Szrj       goto egress;
247038fd1498Szrj     }
247138fd1498Szrj 
247238fd1498Szrj   /* Find mode of arg as it is passed, and mode of arg as it should be
247338fd1498Szrj      during execution of this function.  */
247438fd1498Szrj   passed_mode = TYPE_MODE (passed_type);
247538fd1498Szrj   nominal_mode = TYPE_MODE (nominal_type);
247638fd1498Szrj 
247738fd1498Szrj   /* If the parm is to be passed as a transparent union or record, use the
247838fd1498Szrj      type of the first field for the tests below.  We have already verified
247938fd1498Szrj      that the modes are the same.  */
248038fd1498Szrj   if ((TREE_CODE (passed_type) == UNION_TYPE
248138fd1498Szrj        || TREE_CODE (passed_type) == RECORD_TYPE)
248238fd1498Szrj       && TYPE_TRANSPARENT_AGGR (passed_type))
248338fd1498Szrj     passed_type = TREE_TYPE (first_field (passed_type));
248438fd1498Szrj 
248538fd1498Szrj   /* See if this arg was passed by invisible reference.  */
248638fd1498Szrj   if (pass_by_reference (&all->args_so_far_v, passed_mode,
248738fd1498Szrj 			 passed_type, data->named_arg))
248838fd1498Szrj     {
248938fd1498Szrj       passed_type = nominal_type = build_pointer_type (passed_type);
249038fd1498Szrj       data->passed_pointer = true;
249138fd1498Szrj       passed_mode = nominal_mode = TYPE_MODE (nominal_type);
249238fd1498Szrj     }
249338fd1498Szrj 
249438fd1498Szrj   /* Find mode as it is passed by the ABI.  */
249538fd1498Szrj   unsignedp = TYPE_UNSIGNED (passed_type);
249638fd1498Szrj   promoted_mode = promote_function_mode (passed_type, passed_mode, &unsignedp,
249738fd1498Szrj 				         TREE_TYPE (current_function_decl), 0);
249838fd1498Szrj 
249938fd1498Szrj  egress:
250038fd1498Szrj   data->nominal_type = nominal_type;
250138fd1498Szrj   data->passed_type = passed_type;
250238fd1498Szrj   data->nominal_mode = nominal_mode;
250338fd1498Szrj   data->passed_mode = passed_mode;
250438fd1498Szrj   data->promoted_mode = promoted_mode;
250538fd1498Szrj }
250638fd1498Szrj 
250738fd1498Szrj /* A subroutine of assign_parms.  Invoke setup_incoming_varargs.  */
250838fd1498Szrj 
250938fd1498Szrj static void
assign_parms_setup_varargs(struct assign_parm_data_all * all,struct assign_parm_data_one * data,bool no_rtl)251038fd1498Szrj assign_parms_setup_varargs (struct assign_parm_data_all *all,
251138fd1498Szrj 			    struct assign_parm_data_one *data, bool no_rtl)
251238fd1498Szrj {
251338fd1498Szrj   int varargs_pretend_bytes = 0;
251438fd1498Szrj 
251538fd1498Szrj   targetm.calls.setup_incoming_varargs (all->args_so_far,
251638fd1498Szrj 					data->promoted_mode,
251738fd1498Szrj 					data->passed_type,
251838fd1498Szrj 					&varargs_pretend_bytes, no_rtl);
251938fd1498Szrj 
252038fd1498Szrj   /* If the back-end has requested extra stack space, record how much is
252138fd1498Szrj      needed.  Do not change pretend_args_size otherwise since it may be
252238fd1498Szrj      nonzero from an earlier partial argument.  */
252338fd1498Szrj   if (varargs_pretend_bytes > 0)
252438fd1498Szrj     all->pretend_args_size = varargs_pretend_bytes;
252538fd1498Szrj }
252638fd1498Szrj 
252738fd1498Szrj /* A subroutine of assign_parms.  Set DATA->ENTRY_PARM corresponding to
252838fd1498Szrj    the incoming location of the current parameter.  */
252938fd1498Szrj 
253038fd1498Szrj static void
assign_parm_find_entry_rtl(struct assign_parm_data_all * all,struct assign_parm_data_one * data)253138fd1498Szrj assign_parm_find_entry_rtl (struct assign_parm_data_all *all,
253238fd1498Szrj 			    struct assign_parm_data_one *data)
253338fd1498Szrj {
253438fd1498Szrj   HOST_WIDE_INT pretend_bytes = 0;
253538fd1498Szrj   rtx entry_parm;
253638fd1498Szrj   bool in_regs;
253738fd1498Szrj 
253838fd1498Szrj   if (data->promoted_mode == VOIDmode)
253938fd1498Szrj     {
254038fd1498Szrj       data->entry_parm = data->stack_parm = const0_rtx;
254138fd1498Szrj       return;
254238fd1498Szrj     }
254338fd1498Szrj 
254438fd1498Szrj   targetm.calls.warn_parameter_passing_abi (all->args_so_far,
254538fd1498Szrj 					    data->passed_type);
254638fd1498Szrj 
254738fd1498Szrj   entry_parm = targetm.calls.function_incoming_arg (all->args_so_far,
254838fd1498Szrj 						    data->promoted_mode,
254938fd1498Szrj 						    data->passed_type,
255038fd1498Szrj 						    data->named_arg);
255138fd1498Szrj 
255238fd1498Szrj   if (entry_parm == 0)
255338fd1498Szrj     data->promoted_mode = data->passed_mode;
255438fd1498Szrj 
255538fd1498Szrj   /* Determine parm's home in the stack, in case it arrives in the stack
255638fd1498Szrj      or we should pretend it did.  Compute the stack position and rtx where
255738fd1498Szrj      the argument arrives and its size.
255838fd1498Szrj 
255938fd1498Szrj      There is one complexity here:  If this was a parameter that would
256038fd1498Szrj      have been passed in registers, but wasn't only because it is
256138fd1498Szrj      __builtin_va_alist, we want locate_and_pad_parm to treat it as if
256238fd1498Szrj      it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
256338fd1498Szrj      In this case, we call FUNCTION_ARG with NAMED set to 1 instead of 0
256438fd1498Szrj      as it was the previous time.  */
256538fd1498Szrj   in_regs = (entry_parm != 0) || POINTER_BOUNDS_TYPE_P (data->passed_type);
256638fd1498Szrj #ifdef STACK_PARMS_IN_REG_PARM_AREA
256738fd1498Szrj   in_regs = true;
256838fd1498Szrj #endif
256938fd1498Szrj   if (!in_regs && !data->named_arg)
257038fd1498Szrj     {
257138fd1498Szrj       if (targetm.calls.pretend_outgoing_varargs_named (all->args_so_far))
257238fd1498Szrj 	{
257338fd1498Szrj 	  rtx tem;
257438fd1498Szrj 	  tem = targetm.calls.function_incoming_arg (all->args_so_far,
257538fd1498Szrj 						     data->promoted_mode,
257638fd1498Szrj 						     data->passed_type, true);
257738fd1498Szrj 	  in_regs = tem != NULL;
257838fd1498Szrj 	}
257938fd1498Szrj     }
258038fd1498Szrj 
258138fd1498Szrj   /* If this parameter was passed both in registers and in the stack, use
258238fd1498Szrj      the copy on the stack.  */
258338fd1498Szrj   if (targetm.calls.must_pass_in_stack (data->promoted_mode,
258438fd1498Szrj 					data->passed_type))
258538fd1498Szrj     entry_parm = 0;
258638fd1498Szrj 
258738fd1498Szrj   if (entry_parm)
258838fd1498Szrj     {
258938fd1498Szrj       int partial;
259038fd1498Szrj 
259138fd1498Szrj       partial = targetm.calls.arg_partial_bytes (all->args_so_far,
259238fd1498Szrj 						 data->promoted_mode,
259338fd1498Szrj 						 data->passed_type,
259438fd1498Szrj 						 data->named_arg);
259538fd1498Szrj       data->partial = partial;
259638fd1498Szrj 
259738fd1498Szrj       /* The caller might already have allocated stack space for the
259838fd1498Szrj 	 register parameters.  */
259938fd1498Szrj       if (partial != 0 && all->reg_parm_stack_space == 0)
260038fd1498Szrj 	{
260138fd1498Szrj 	  /* Part of this argument is passed in registers and part
260238fd1498Szrj 	     is passed on the stack.  Ask the prologue code to extend
260338fd1498Szrj 	     the stack part so that we can recreate the full value.
260438fd1498Szrj 
260538fd1498Szrj 	     PRETEND_BYTES is the size of the registers we need to store.
260638fd1498Szrj 	     CURRENT_FUNCTION_PRETEND_ARGS_SIZE is the amount of extra
260738fd1498Szrj 	     stack space that the prologue should allocate.
260838fd1498Szrj 
260938fd1498Szrj 	     Internally, gcc assumes that the argument pointer is aligned
261038fd1498Szrj 	     to STACK_BOUNDARY bits.  This is used both for alignment
261138fd1498Szrj 	     optimizations (see init_emit) and to locate arguments that are
261238fd1498Szrj 	     aligned to more than PARM_BOUNDARY bits.  We must preserve this
261338fd1498Szrj 	     invariant by rounding CURRENT_FUNCTION_PRETEND_ARGS_SIZE up to
261438fd1498Szrj 	     a stack boundary.  */
261538fd1498Szrj 
261638fd1498Szrj 	  /* We assume at most one partial arg, and it must be the first
261738fd1498Szrj 	     argument on the stack.  */
261838fd1498Szrj 	  gcc_assert (!all->extra_pretend_bytes && !all->pretend_args_size);
261938fd1498Szrj 
262038fd1498Szrj 	  pretend_bytes = partial;
262138fd1498Szrj 	  all->pretend_args_size = CEIL_ROUND (pretend_bytes, STACK_BYTES);
262238fd1498Szrj 
262338fd1498Szrj 	  /* We want to align relative to the actual stack pointer, so
262438fd1498Szrj 	     don't include this in the stack size until later.  */
262538fd1498Szrj 	  all->extra_pretend_bytes = all->pretend_args_size;
262638fd1498Szrj 	}
262738fd1498Szrj     }
262838fd1498Szrj 
262938fd1498Szrj   locate_and_pad_parm (data->promoted_mode, data->passed_type, in_regs,
263038fd1498Szrj 		       all->reg_parm_stack_space,
263138fd1498Szrj 		       entry_parm ? data->partial : 0, current_function_decl,
263238fd1498Szrj 		       &all->stack_args_size, &data->locate);
263338fd1498Szrj 
263438fd1498Szrj   /* Update parm_stack_boundary if this parameter is passed in the
263538fd1498Szrj      stack.  */
263638fd1498Szrj   if (!in_regs && crtl->parm_stack_boundary < data->locate.boundary)
263738fd1498Szrj     crtl->parm_stack_boundary = data->locate.boundary;
263838fd1498Szrj 
263938fd1498Szrj   /* Adjust offsets to include the pretend args.  */
264038fd1498Szrj   pretend_bytes = all->extra_pretend_bytes - pretend_bytes;
264138fd1498Szrj   data->locate.slot_offset.constant += pretend_bytes;
264238fd1498Szrj   data->locate.offset.constant += pretend_bytes;
264338fd1498Szrj 
264438fd1498Szrj   data->entry_parm = entry_parm;
264538fd1498Szrj }
264638fd1498Szrj 
264738fd1498Szrj /* A subroutine of assign_parms.  If there is actually space on the stack
264838fd1498Szrj    for this parm, count it in stack_args_size and return true.  */
264938fd1498Szrj 
265038fd1498Szrj static bool
assign_parm_is_stack_parm(struct assign_parm_data_all * all,struct assign_parm_data_one * data)265138fd1498Szrj assign_parm_is_stack_parm (struct assign_parm_data_all *all,
265238fd1498Szrj 			   struct assign_parm_data_one *data)
265338fd1498Szrj {
265438fd1498Szrj   /* Bounds are never passed on the stack to keep compatibility
265538fd1498Szrj      with not instrumented code.  */
265638fd1498Szrj   if (POINTER_BOUNDS_TYPE_P (data->passed_type))
265738fd1498Szrj     return false;
265838fd1498Szrj   /* Trivially true if we've no incoming register.  */
265938fd1498Szrj   else if (data->entry_parm == NULL)
266038fd1498Szrj     ;
266138fd1498Szrj   /* Also true if we're partially in registers and partially not,
266238fd1498Szrj      since we've arranged to drop the entire argument on the stack.  */
266338fd1498Szrj   else if (data->partial != 0)
266438fd1498Szrj     ;
266538fd1498Szrj   /* Also true if the target says that it's passed in both registers
266638fd1498Szrj      and on the stack.  */
266738fd1498Szrj   else if (GET_CODE (data->entry_parm) == PARALLEL
266838fd1498Szrj 	   && XEXP (XVECEXP (data->entry_parm, 0, 0), 0) == NULL_RTX)
266938fd1498Szrj     ;
267038fd1498Szrj   /* Also true if the target says that there's stack allocated for
267138fd1498Szrj      all register parameters.  */
267238fd1498Szrj   else if (all->reg_parm_stack_space > 0)
267338fd1498Szrj     ;
267438fd1498Szrj   /* Otherwise, no, this parameter has no ABI defined stack slot.  */
267538fd1498Szrj   else
267638fd1498Szrj     return false;
267738fd1498Szrj 
267838fd1498Szrj   all->stack_args_size.constant += data->locate.size.constant;
267938fd1498Szrj   if (data->locate.size.var)
268038fd1498Szrj     ADD_PARM_SIZE (all->stack_args_size, data->locate.size.var);
268138fd1498Szrj 
268238fd1498Szrj   return true;
268338fd1498Szrj }
268438fd1498Szrj 
268538fd1498Szrj /* A subroutine of assign_parms.  Given that this parameter is allocated
268638fd1498Szrj    stack space by the ABI, find it.  */
268738fd1498Szrj 
268838fd1498Szrj static void
assign_parm_find_stack_rtl(tree parm,struct assign_parm_data_one * data)268938fd1498Szrj assign_parm_find_stack_rtl (tree parm, struct assign_parm_data_one *data)
269038fd1498Szrj {
269138fd1498Szrj   rtx offset_rtx, stack_parm;
269238fd1498Szrj   unsigned int align, boundary;
269338fd1498Szrj 
269438fd1498Szrj   /* If we're passing this arg using a reg, make its stack home the
269538fd1498Szrj      aligned stack slot.  */
269638fd1498Szrj   if (data->entry_parm)
269738fd1498Szrj     offset_rtx = ARGS_SIZE_RTX (data->locate.slot_offset);
269838fd1498Szrj   else
269938fd1498Szrj     offset_rtx = ARGS_SIZE_RTX (data->locate.offset);
270038fd1498Szrj 
270138fd1498Szrj   stack_parm = crtl->args.internal_arg_pointer;
270238fd1498Szrj   if (offset_rtx != const0_rtx)
270338fd1498Szrj     stack_parm = gen_rtx_PLUS (Pmode, stack_parm, offset_rtx);
270438fd1498Szrj   stack_parm = gen_rtx_MEM (data->promoted_mode, stack_parm);
270538fd1498Szrj 
270638fd1498Szrj   if (!data->passed_pointer)
270738fd1498Szrj     {
270838fd1498Szrj       set_mem_attributes (stack_parm, parm, 1);
270938fd1498Szrj       /* set_mem_attributes could set MEM_SIZE to the passed mode's size,
271038fd1498Szrj 	 while promoted mode's size is needed.  */
271138fd1498Szrj       if (data->promoted_mode != BLKmode
271238fd1498Szrj 	  && data->promoted_mode != DECL_MODE (parm))
271338fd1498Szrj 	{
271438fd1498Szrj 	  set_mem_size (stack_parm, GET_MODE_SIZE (data->promoted_mode));
271538fd1498Szrj 	  if (MEM_EXPR (stack_parm) && MEM_OFFSET_KNOWN_P (stack_parm))
271638fd1498Szrj 	    {
271738fd1498Szrj 	      poly_int64 offset = subreg_lowpart_offset (DECL_MODE (parm),
271838fd1498Szrj 							 data->promoted_mode);
271938fd1498Szrj 	      if (maybe_ne (offset, 0))
272038fd1498Szrj 		set_mem_offset (stack_parm, MEM_OFFSET (stack_parm) - offset);
272138fd1498Szrj 	    }
272238fd1498Szrj 	}
272338fd1498Szrj     }
272438fd1498Szrj 
272538fd1498Szrj   boundary = data->locate.boundary;
272638fd1498Szrj   align = BITS_PER_UNIT;
272738fd1498Szrj 
272838fd1498Szrj   /* If we're padding upward, we know that the alignment of the slot
272938fd1498Szrj      is TARGET_FUNCTION_ARG_BOUNDARY.  If we're using slot_offset, we're
273038fd1498Szrj      intentionally forcing upward padding.  Otherwise we have to come
273138fd1498Szrj      up with a guess at the alignment based on OFFSET_RTX.  */
273238fd1498Szrj   poly_int64 offset;
273338fd1498Szrj   if (data->locate.where_pad != PAD_DOWNWARD || data->entry_parm)
273438fd1498Szrj     align = boundary;
273538fd1498Szrj   else if (poly_int_rtx_p (offset_rtx, &offset))
273638fd1498Szrj     {
273738fd1498Szrj       align = least_bit_hwi (boundary);
273838fd1498Szrj       unsigned int offset_align = known_alignment (offset) * BITS_PER_UNIT;
273938fd1498Szrj       if (offset_align != 0)
274038fd1498Szrj 	align = MIN (align, offset_align);
274138fd1498Szrj     }
274238fd1498Szrj   set_mem_align (stack_parm, align);
274338fd1498Szrj 
274438fd1498Szrj   if (data->entry_parm)
274538fd1498Szrj     set_reg_attrs_for_parm (data->entry_parm, stack_parm);
274638fd1498Szrj 
274738fd1498Szrj   data->stack_parm = stack_parm;
274838fd1498Szrj }
274938fd1498Szrj 
275038fd1498Szrj /* A subroutine of assign_parms.  Adjust DATA->ENTRY_RTL such that it's
275138fd1498Szrj    always valid and contiguous.  */
275238fd1498Szrj 
275338fd1498Szrj static void
assign_parm_adjust_entry_rtl(struct assign_parm_data_one * data)275438fd1498Szrj assign_parm_adjust_entry_rtl (struct assign_parm_data_one *data)
275538fd1498Szrj {
275638fd1498Szrj   rtx entry_parm = data->entry_parm;
275738fd1498Szrj   rtx stack_parm = data->stack_parm;
275838fd1498Szrj 
275938fd1498Szrj   /* If this parm was passed part in regs and part in memory, pretend it
276038fd1498Szrj      arrived entirely in memory by pushing the register-part onto the stack.
276138fd1498Szrj      In the special case of a DImode or DFmode that is split, we could put
276238fd1498Szrj      it together in a pseudoreg directly, but for now that's not worth
276338fd1498Szrj      bothering with.  */
276438fd1498Szrj   if (data->partial != 0)
276538fd1498Szrj     {
276638fd1498Szrj       /* Handle calls that pass values in multiple non-contiguous
276738fd1498Szrj 	 locations.  The Irix 6 ABI has examples of this.  */
276838fd1498Szrj       if (GET_CODE (entry_parm) == PARALLEL)
276938fd1498Szrj 	emit_group_store (validize_mem (copy_rtx (stack_parm)), entry_parm,
277038fd1498Szrj 			  data->passed_type,
277138fd1498Szrj 			  int_size_in_bytes (data->passed_type));
277238fd1498Szrj       else
277338fd1498Szrj 	{
277438fd1498Szrj 	  gcc_assert (data->partial % UNITS_PER_WORD == 0);
277538fd1498Szrj 	  move_block_from_reg (REGNO (entry_parm),
277638fd1498Szrj 			       validize_mem (copy_rtx (stack_parm)),
277738fd1498Szrj 			       data->partial / UNITS_PER_WORD);
277838fd1498Szrj 	}
277938fd1498Szrj 
278038fd1498Szrj       entry_parm = stack_parm;
278138fd1498Szrj     }
278238fd1498Szrj 
278338fd1498Szrj   /* If we didn't decide this parm came in a register, by default it came
278438fd1498Szrj      on the stack.  */
278538fd1498Szrj   else if (entry_parm == NULL)
278638fd1498Szrj     entry_parm = stack_parm;
278738fd1498Szrj 
278838fd1498Szrj   /* When an argument is passed in multiple locations, we can't make use
278938fd1498Szrj      of this information, but we can save some copying if the whole argument
279038fd1498Szrj      is passed in a single register.  */
279138fd1498Szrj   else if (GET_CODE (entry_parm) == PARALLEL
279238fd1498Szrj 	   && data->nominal_mode != BLKmode
279338fd1498Szrj 	   && data->passed_mode != BLKmode)
279438fd1498Szrj     {
279538fd1498Szrj       size_t i, len = XVECLEN (entry_parm, 0);
279638fd1498Szrj 
279738fd1498Szrj       for (i = 0; i < len; i++)
279838fd1498Szrj 	if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
279938fd1498Szrj 	    && REG_P (XEXP (XVECEXP (entry_parm, 0, i), 0))
280038fd1498Szrj 	    && (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
280138fd1498Szrj 		== data->passed_mode)
280238fd1498Szrj 	    && INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
280338fd1498Szrj 	  {
280438fd1498Szrj 	    entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
280538fd1498Szrj 	    break;
280638fd1498Szrj 	  }
280738fd1498Szrj     }
280838fd1498Szrj 
280938fd1498Szrj   data->entry_parm = entry_parm;
281038fd1498Szrj }
281138fd1498Szrj 
281238fd1498Szrj /* A subroutine of assign_parms.  Reconstitute any values which were
281338fd1498Szrj    passed in multiple registers and would fit in a single register.  */
281438fd1498Szrj 
281538fd1498Szrj static void
assign_parm_remove_parallels(struct assign_parm_data_one * data)281638fd1498Szrj assign_parm_remove_parallels (struct assign_parm_data_one *data)
281738fd1498Szrj {
281838fd1498Szrj   rtx entry_parm = data->entry_parm;
281938fd1498Szrj 
282038fd1498Szrj   /* Convert the PARALLEL to a REG of the same mode as the parallel.
282138fd1498Szrj      This can be done with register operations rather than on the
282238fd1498Szrj      stack, even if we will store the reconstituted parameter on the
282338fd1498Szrj      stack later.  */
282438fd1498Szrj   if (GET_CODE (entry_parm) == PARALLEL && GET_MODE (entry_parm) != BLKmode)
282538fd1498Szrj     {
282638fd1498Szrj       rtx parmreg = gen_reg_rtx (GET_MODE (entry_parm));
282738fd1498Szrj       emit_group_store (parmreg, entry_parm, data->passed_type,
282838fd1498Szrj 			GET_MODE_SIZE (GET_MODE (entry_parm)));
282938fd1498Szrj       entry_parm = parmreg;
283038fd1498Szrj     }
283138fd1498Szrj 
283238fd1498Szrj   data->entry_parm = entry_parm;
283338fd1498Szrj }
283438fd1498Szrj 
283538fd1498Szrj /* A subroutine of assign_parms.  Adjust DATA->STACK_RTL such that it's
283638fd1498Szrj    always valid and properly aligned.  */
283738fd1498Szrj 
283838fd1498Szrj static void
assign_parm_adjust_stack_rtl(struct assign_parm_data_one * data)283938fd1498Szrj assign_parm_adjust_stack_rtl (struct assign_parm_data_one *data)
284038fd1498Szrj {
284138fd1498Szrj   rtx stack_parm = data->stack_parm;
284238fd1498Szrj 
284338fd1498Szrj   /* If we can't trust the parm stack slot to be aligned enough for its
284438fd1498Szrj      ultimate type, don't use that slot after entry.  We'll make another
284538fd1498Szrj      stack slot, if we need one.  */
284638fd1498Szrj   if (stack_parm
284738fd1498Szrj       && ((STRICT_ALIGNMENT
284838fd1498Szrj 	   && GET_MODE_ALIGNMENT (data->nominal_mode) > MEM_ALIGN (stack_parm))
284938fd1498Szrj 	  || (data->nominal_type
285038fd1498Szrj 	      && TYPE_ALIGN (data->nominal_type) > MEM_ALIGN (stack_parm)
285138fd1498Szrj 	      && MEM_ALIGN (stack_parm) < PREFERRED_STACK_BOUNDARY)))
285238fd1498Szrj     stack_parm = NULL;
285338fd1498Szrj 
285438fd1498Szrj   /* If parm was passed in memory, and we need to convert it on entry,
285538fd1498Szrj      don't store it back in that same slot.  */
285638fd1498Szrj   else if (data->entry_parm == stack_parm
285738fd1498Szrj 	   && data->nominal_mode != BLKmode
285838fd1498Szrj 	   && data->nominal_mode != data->passed_mode)
285938fd1498Szrj     stack_parm = NULL;
286038fd1498Szrj 
286138fd1498Szrj   /* If stack protection is in effect for this function, don't leave any
286238fd1498Szrj      pointers in their passed stack slots.  */
286338fd1498Szrj   else if (crtl->stack_protect_guard
286438fd1498Szrj 	   && (flag_stack_protect == 2
286538fd1498Szrj 	       || data->passed_pointer
286638fd1498Szrj 	       || POINTER_TYPE_P (data->nominal_type)))
286738fd1498Szrj     stack_parm = NULL;
286838fd1498Szrj 
286938fd1498Szrj   data->stack_parm = stack_parm;
287038fd1498Szrj }
287138fd1498Szrj 
287238fd1498Szrj /* A subroutine of assign_parms.  Return true if the current parameter
287338fd1498Szrj    should be stored as a BLKmode in the current frame.  */
287438fd1498Szrj 
287538fd1498Szrj static bool
assign_parm_setup_block_p(struct assign_parm_data_one * data)287638fd1498Szrj assign_parm_setup_block_p (struct assign_parm_data_one *data)
287738fd1498Szrj {
287838fd1498Szrj   if (data->nominal_mode == BLKmode)
287938fd1498Szrj     return true;
288038fd1498Szrj   if (GET_MODE (data->entry_parm) == BLKmode)
288138fd1498Szrj     return true;
288238fd1498Szrj 
288338fd1498Szrj #ifdef BLOCK_REG_PADDING
288438fd1498Szrj   /* Only assign_parm_setup_block knows how to deal with register arguments
288538fd1498Szrj      that are padded at the least significant end.  */
288638fd1498Szrj   if (REG_P (data->entry_parm)
288738fd1498Szrj       && known_lt (GET_MODE_SIZE (data->promoted_mode), UNITS_PER_WORD)
288838fd1498Szrj       && (BLOCK_REG_PADDING (data->passed_mode, data->passed_type, 1)
288938fd1498Szrj 	  == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
289038fd1498Szrj     return true;
289138fd1498Szrj #endif
289238fd1498Szrj 
289338fd1498Szrj   return false;
289438fd1498Szrj }
289538fd1498Szrj 
289638fd1498Szrj /* A subroutine of assign_parms.  Arrange for the parameter to be
289738fd1498Szrj    present and valid in DATA->STACK_RTL.  */
289838fd1498Szrj 
289938fd1498Szrj static void
assign_parm_setup_block(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)290038fd1498Szrj assign_parm_setup_block (struct assign_parm_data_all *all,
290138fd1498Szrj 			 tree parm, struct assign_parm_data_one *data)
290238fd1498Szrj {
290338fd1498Szrj   rtx entry_parm = data->entry_parm;
290438fd1498Szrj   rtx stack_parm = data->stack_parm;
290538fd1498Szrj   rtx target_reg = NULL_RTX;
290638fd1498Szrj   bool in_conversion_seq = false;
290738fd1498Szrj   HOST_WIDE_INT size;
290838fd1498Szrj   HOST_WIDE_INT size_stored;
290938fd1498Szrj 
291038fd1498Szrj   if (GET_CODE (entry_parm) == PARALLEL)
291138fd1498Szrj     entry_parm = emit_group_move_into_temps (entry_parm);
291238fd1498Szrj 
291338fd1498Szrj   /* If we want the parameter in a pseudo, don't use a stack slot.  */
291438fd1498Szrj   if (is_gimple_reg (parm) && use_register_for_decl (parm))
291538fd1498Szrj     {
291638fd1498Szrj       tree def = ssa_default_def (cfun, parm);
291738fd1498Szrj       gcc_assert (def);
291838fd1498Szrj       machine_mode mode = promote_ssa_mode (def, NULL);
291938fd1498Szrj       rtx reg = gen_reg_rtx (mode);
292038fd1498Szrj       if (GET_CODE (reg) != CONCAT)
292138fd1498Szrj 	stack_parm = reg;
292238fd1498Szrj       else
292338fd1498Szrj 	{
292438fd1498Szrj 	  target_reg = reg;
292538fd1498Szrj 	  /* Avoid allocating a stack slot, if there isn't one
292638fd1498Szrj 	     preallocated by the ABI.  It might seem like we should
292738fd1498Szrj 	     always prefer a pseudo, but converting between
292838fd1498Szrj 	     floating-point and integer modes goes through the stack
292938fd1498Szrj 	     on various machines, so it's better to use the reserved
293038fd1498Szrj 	     stack slot than to risk wasting it and allocating more
293138fd1498Szrj 	     for the conversion.  */
293238fd1498Szrj 	  if (stack_parm == NULL_RTX)
293338fd1498Szrj 	    {
293438fd1498Szrj 	      int save = generating_concat_p;
293538fd1498Szrj 	      generating_concat_p = 0;
293638fd1498Szrj 	      stack_parm = gen_reg_rtx (mode);
293738fd1498Szrj 	      generating_concat_p = save;
293838fd1498Szrj 	    }
293938fd1498Szrj 	}
294038fd1498Szrj       data->stack_parm = NULL;
294138fd1498Szrj     }
294238fd1498Szrj 
294338fd1498Szrj   size = int_size_in_bytes (data->passed_type);
294438fd1498Szrj   size_stored = CEIL_ROUND (size, UNITS_PER_WORD);
294538fd1498Szrj   if (stack_parm == 0)
294638fd1498Szrj     {
294738fd1498Szrj       SET_DECL_ALIGN (parm, MAX (DECL_ALIGN (parm), BITS_PER_WORD));
294838fd1498Szrj       stack_parm = assign_stack_local (BLKmode, size_stored,
294938fd1498Szrj 				       DECL_ALIGN (parm));
295038fd1498Szrj       if (known_eq (GET_MODE_SIZE (GET_MODE (entry_parm)), size))
295138fd1498Szrj 	PUT_MODE (stack_parm, GET_MODE (entry_parm));
295238fd1498Szrj       set_mem_attributes (stack_parm, parm, 1);
295338fd1498Szrj     }
295438fd1498Szrj 
295538fd1498Szrj   /* If a BLKmode arrives in registers, copy it to a stack slot.  Handle
295638fd1498Szrj      calls that pass values in multiple non-contiguous locations.  */
295738fd1498Szrj   if (REG_P (entry_parm) || GET_CODE (entry_parm) == PARALLEL)
295838fd1498Szrj     {
295938fd1498Szrj       rtx mem;
296038fd1498Szrj 
296138fd1498Szrj       /* Note that we will be storing an integral number of words.
296238fd1498Szrj 	 So we have to be careful to ensure that we allocate an
296338fd1498Szrj 	 integral number of words.  We do this above when we call
296438fd1498Szrj 	 assign_stack_local if space was not allocated in the argument
296538fd1498Szrj 	 list.  If it was, this will not work if PARM_BOUNDARY is not
296638fd1498Szrj 	 a multiple of BITS_PER_WORD.  It isn't clear how to fix this
296738fd1498Szrj 	 if it becomes a problem.  Exception is when BLKmode arrives
296838fd1498Szrj 	 with arguments not conforming to word_mode.  */
296938fd1498Szrj 
297038fd1498Szrj       if (data->stack_parm == 0)
297138fd1498Szrj 	;
297238fd1498Szrj       else if (GET_CODE (entry_parm) == PARALLEL)
297338fd1498Szrj 	;
297438fd1498Szrj       else
297538fd1498Szrj 	gcc_assert (!size || !(PARM_BOUNDARY % BITS_PER_WORD));
297638fd1498Szrj 
297738fd1498Szrj       mem = validize_mem (copy_rtx (stack_parm));
297838fd1498Szrj 
297938fd1498Szrj       /* Handle values in multiple non-contiguous locations.  */
298038fd1498Szrj       if (GET_CODE (entry_parm) == PARALLEL && !MEM_P (mem))
298138fd1498Szrj 	emit_group_store (mem, entry_parm, data->passed_type, size);
298238fd1498Szrj       else if (GET_CODE (entry_parm) == PARALLEL)
298338fd1498Szrj 	{
298438fd1498Szrj 	  push_to_sequence2 (all->first_conversion_insn,
298538fd1498Szrj 			     all->last_conversion_insn);
298638fd1498Szrj 	  emit_group_store (mem, entry_parm, data->passed_type, size);
298738fd1498Szrj 	  all->first_conversion_insn = get_insns ();
298838fd1498Szrj 	  all->last_conversion_insn = get_last_insn ();
298938fd1498Szrj 	  end_sequence ();
299038fd1498Szrj 	  in_conversion_seq = true;
299138fd1498Szrj 	}
299238fd1498Szrj 
299338fd1498Szrj       else if (size == 0)
299438fd1498Szrj 	;
299538fd1498Szrj 
299638fd1498Szrj       /* If SIZE is that of a mode no bigger than a word, just use
299738fd1498Szrj 	 that mode's store operation.  */
299838fd1498Szrj       else if (size <= UNITS_PER_WORD)
299938fd1498Szrj 	{
300038fd1498Szrj 	  unsigned int bits = size * BITS_PER_UNIT;
300138fd1498Szrj 	  machine_mode mode = int_mode_for_size (bits, 0).else_blk ();
300238fd1498Szrj 
300338fd1498Szrj 	  if (mode != BLKmode
300438fd1498Szrj #ifdef BLOCK_REG_PADDING
300538fd1498Szrj 	      && (size == UNITS_PER_WORD
300638fd1498Szrj 		  || (BLOCK_REG_PADDING (mode, data->passed_type, 1)
300738fd1498Szrj 		      != (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)))
300838fd1498Szrj #endif
300938fd1498Szrj 	      )
301038fd1498Szrj 	    {
301138fd1498Szrj 	      rtx reg;
301238fd1498Szrj 
301338fd1498Szrj 	      /* We are really truncating a word_mode value containing
301438fd1498Szrj 		 SIZE bytes into a value of mode MODE.  If such an
301538fd1498Szrj 		 operation requires no actual instructions, we can refer
301638fd1498Szrj 		 to the value directly in mode MODE, otherwise we must
301738fd1498Szrj 		 start with the register in word_mode and explicitly
301838fd1498Szrj 		 convert it.  */
301938fd1498Szrj 	      if (targetm.truly_noop_truncation (size * BITS_PER_UNIT,
302038fd1498Szrj 						 BITS_PER_WORD))
302138fd1498Szrj 		reg = gen_rtx_REG (mode, REGNO (entry_parm));
302238fd1498Szrj 	      else
302338fd1498Szrj 		{
302438fd1498Szrj 		  reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
302538fd1498Szrj 		  reg = convert_to_mode (mode, copy_to_reg (reg), 1);
302638fd1498Szrj 		}
302738fd1498Szrj 	      emit_move_insn (change_address (mem, mode, 0), reg);
302838fd1498Szrj 	    }
302938fd1498Szrj 
303038fd1498Szrj #ifdef BLOCK_REG_PADDING
303138fd1498Szrj 	  /* Storing the register in memory as a full word, as
303238fd1498Szrj 	     move_block_from_reg below would do, and then using the
303338fd1498Szrj 	     MEM in a smaller mode, has the effect of shifting right
303438fd1498Szrj 	     if BYTES_BIG_ENDIAN.  If we're bypassing memory, the
303538fd1498Szrj 	     shifting must be explicit.  */
303638fd1498Szrj 	  else if (!MEM_P (mem))
303738fd1498Szrj 	    {
303838fd1498Szrj 	      rtx x;
303938fd1498Szrj 
304038fd1498Szrj 	      /* If the assert below fails, we should have taken the
304138fd1498Szrj 		 mode != BLKmode path above, unless we have downward
304238fd1498Szrj 		 padding of smaller-than-word arguments on a machine
304338fd1498Szrj 		 with little-endian bytes, which would likely require
304438fd1498Szrj 		 additional changes to work correctly.  */
304538fd1498Szrj 	      gcc_checking_assert (BYTES_BIG_ENDIAN
304638fd1498Szrj 				   && (BLOCK_REG_PADDING (mode,
304738fd1498Szrj 							  data->passed_type, 1)
304838fd1498Szrj 				       == PAD_UPWARD));
304938fd1498Szrj 
305038fd1498Szrj 	      int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
305138fd1498Szrj 
305238fd1498Szrj 	      x = gen_rtx_REG (word_mode, REGNO (entry_parm));
305338fd1498Szrj 	      x = expand_shift (RSHIFT_EXPR, word_mode, x, by,
305438fd1498Szrj 				NULL_RTX, 1);
305538fd1498Szrj 	      x = force_reg (word_mode, x);
305638fd1498Szrj 	      x = gen_lowpart_SUBREG (GET_MODE (mem), x);
305738fd1498Szrj 
305838fd1498Szrj 	      emit_move_insn (mem, x);
305938fd1498Szrj 	    }
306038fd1498Szrj #endif
306138fd1498Szrj 
306238fd1498Szrj 	  /* Blocks smaller than a word on a BYTES_BIG_ENDIAN
306338fd1498Szrj 	     machine must be aligned to the left before storing
306438fd1498Szrj 	     to memory.  Note that the previous test doesn't
306538fd1498Szrj 	     handle all cases (e.g. SIZE == 3).  */
306638fd1498Szrj 	  else if (size != UNITS_PER_WORD
306738fd1498Szrj #ifdef BLOCK_REG_PADDING
306838fd1498Szrj 		   && (BLOCK_REG_PADDING (mode, data->passed_type, 1)
306938fd1498Szrj 		       == PAD_DOWNWARD)
307038fd1498Szrj #else
307138fd1498Szrj 		   && BYTES_BIG_ENDIAN
307238fd1498Szrj #endif
307338fd1498Szrj 		   )
307438fd1498Szrj 	    {
307538fd1498Szrj 	      rtx tem, x;
307638fd1498Szrj 	      int by = (UNITS_PER_WORD - size) * BITS_PER_UNIT;
307738fd1498Szrj 	      rtx reg = gen_rtx_REG (word_mode, REGNO (entry_parm));
307838fd1498Szrj 
307938fd1498Szrj 	      x = expand_shift (LSHIFT_EXPR, word_mode, reg, by, NULL_RTX, 1);
308038fd1498Szrj 	      tem = change_address (mem, word_mode, 0);
308138fd1498Szrj 	      emit_move_insn (tem, x);
308238fd1498Szrj 	    }
308338fd1498Szrj 	  else
308438fd1498Szrj 	    move_block_from_reg (REGNO (entry_parm), mem,
308538fd1498Szrj 				 size_stored / UNITS_PER_WORD);
308638fd1498Szrj 	}
308738fd1498Szrj       else if (!MEM_P (mem))
308838fd1498Szrj 	{
308938fd1498Szrj 	  gcc_checking_assert (size > UNITS_PER_WORD);
309038fd1498Szrj #ifdef BLOCK_REG_PADDING
309138fd1498Szrj 	  gcc_checking_assert (BLOCK_REG_PADDING (GET_MODE (mem),
309238fd1498Szrj 						  data->passed_type, 0)
309338fd1498Szrj 			       == PAD_UPWARD);
309438fd1498Szrj #endif
309538fd1498Szrj 	  emit_move_insn (mem, entry_parm);
309638fd1498Szrj 	}
309738fd1498Szrj       else
309838fd1498Szrj 	move_block_from_reg (REGNO (entry_parm), mem,
309938fd1498Szrj 			     size_stored / UNITS_PER_WORD);
310038fd1498Szrj     }
310138fd1498Szrj   else if (data->stack_parm == 0)
310238fd1498Szrj     {
310338fd1498Szrj       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
310438fd1498Szrj       emit_block_move (stack_parm, data->entry_parm, GEN_INT (size),
310538fd1498Szrj 		       BLOCK_OP_NORMAL);
310638fd1498Szrj       all->first_conversion_insn = get_insns ();
310738fd1498Szrj       all->last_conversion_insn = get_last_insn ();
310838fd1498Szrj       end_sequence ();
310938fd1498Szrj       in_conversion_seq = true;
311038fd1498Szrj     }
311138fd1498Szrj 
311238fd1498Szrj   if (target_reg)
311338fd1498Szrj     {
311438fd1498Szrj       if (!in_conversion_seq)
311538fd1498Szrj 	emit_move_insn (target_reg, stack_parm);
311638fd1498Szrj       else
311738fd1498Szrj 	{
311838fd1498Szrj 	  push_to_sequence2 (all->first_conversion_insn,
311938fd1498Szrj 			     all->last_conversion_insn);
312038fd1498Szrj 	  emit_move_insn (target_reg, stack_parm);
312138fd1498Szrj 	  all->first_conversion_insn = get_insns ();
312238fd1498Szrj 	  all->last_conversion_insn = get_last_insn ();
312338fd1498Szrj 	  end_sequence ();
312438fd1498Szrj 	}
312538fd1498Szrj       stack_parm = target_reg;
312638fd1498Szrj     }
312738fd1498Szrj 
312838fd1498Szrj   data->stack_parm = stack_parm;
312938fd1498Szrj   set_parm_rtl (parm, stack_parm);
313038fd1498Szrj }
313138fd1498Szrj 
313238fd1498Szrj /* A subroutine of assign_parms.  Allocate a pseudo to hold the current
313338fd1498Szrj    parameter.  Get it there.  Perform all ABI specified conversions.  */
313438fd1498Szrj 
313538fd1498Szrj static void
assign_parm_setup_reg(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)313638fd1498Szrj assign_parm_setup_reg (struct assign_parm_data_all *all, tree parm,
313738fd1498Szrj 		       struct assign_parm_data_one *data)
313838fd1498Szrj {
313938fd1498Szrj   rtx parmreg, validated_mem;
314038fd1498Szrj   rtx equiv_stack_parm;
314138fd1498Szrj   machine_mode promoted_nominal_mode;
314238fd1498Szrj   int unsignedp = TYPE_UNSIGNED (TREE_TYPE (parm));
314338fd1498Szrj   bool did_conversion = false;
314438fd1498Szrj   bool need_conversion, moved;
314538fd1498Szrj   rtx rtl;
314638fd1498Szrj 
314738fd1498Szrj   /* Store the parm in a pseudoregister during the function, but we may
314838fd1498Szrj      need to do it in a wider mode.  Using 2 here makes the result
314938fd1498Szrj      consistent with promote_decl_mode and thus expand_expr_real_1.  */
315038fd1498Szrj   promoted_nominal_mode
315138fd1498Szrj     = promote_function_mode (data->nominal_type, data->nominal_mode, &unsignedp,
315238fd1498Szrj 			     TREE_TYPE (current_function_decl), 2);
315338fd1498Szrj 
315438fd1498Szrj   parmreg = gen_reg_rtx (promoted_nominal_mode);
315538fd1498Szrj   if (!DECL_ARTIFICIAL (parm))
315638fd1498Szrj     mark_user_reg (parmreg);
315738fd1498Szrj 
315838fd1498Szrj   /* If this was an item that we received a pointer to,
315938fd1498Szrj      set rtl appropriately.  */
316038fd1498Szrj   if (data->passed_pointer)
316138fd1498Szrj     {
316238fd1498Szrj       rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data->passed_type)), parmreg);
316338fd1498Szrj       set_mem_attributes (rtl, parm, 1);
316438fd1498Szrj     }
316538fd1498Szrj   else
316638fd1498Szrj     rtl = parmreg;
316738fd1498Szrj 
316838fd1498Szrj   assign_parm_remove_parallels (data);
316938fd1498Szrj 
317038fd1498Szrj   /* Copy the value into the register, thus bridging between
317138fd1498Szrj      assign_parm_find_data_types and expand_expr_real_1.  */
317238fd1498Szrj 
317338fd1498Szrj   equiv_stack_parm = data->stack_parm;
317438fd1498Szrj   validated_mem = validize_mem (copy_rtx (data->entry_parm));
317538fd1498Szrj 
317638fd1498Szrj   need_conversion = (data->nominal_mode != data->passed_mode
317738fd1498Szrj 		     || promoted_nominal_mode != data->promoted_mode);
317838fd1498Szrj   moved = false;
317938fd1498Szrj 
318038fd1498Szrj   if (need_conversion
318138fd1498Szrj       && GET_MODE_CLASS (data->nominal_mode) == MODE_INT
318238fd1498Szrj       && data->nominal_mode == data->passed_mode
318338fd1498Szrj       && data->nominal_mode == GET_MODE (data->entry_parm))
318438fd1498Szrj     {
318538fd1498Szrj       /* ENTRY_PARM has been converted to PROMOTED_MODE, its
318638fd1498Szrj 	 mode, by the caller.  We now have to convert it to
318738fd1498Szrj 	 NOMINAL_MODE, if different.  However, PARMREG may be in
318838fd1498Szrj 	 a different mode than NOMINAL_MODE if it is being stored
318938fd1498Szrj 	 promoted.
319038fd1498Szrj 
319138fd1498Szrj 	 If ENTRY_PARM is a hard register, it might be in a register
319238fd1498Szrj 	 not valid for operating in its mode (e.g., an odd-numbered
319338fd1498Szrj 	 register for a DFmode).  In that case, moves are the only
319438fd1498Szrj 	 thing valid, so we can't do a convert from there.  This
319538fd1498Szrj 	 occurs when the calling sequence allow such misaligned
319638fd1498Szrj 	 usages.
319738fd1498Szrj 
319838fd1498Szrj 	 In addition, the conversion may involve a call, which could
319938fd1498Szrj 	 clobber parameters which haven't been copied to pseudo
320038fd1498Szrj 	 registers yet.
320138fd1498Szrj 
320238fd1498Szrj 	 First, we try to emit an insn which performs the necessary
320338fd1498Szrj 	 conversion.  We verify that this insn does not clobber any
320438fd1498Szrj 	 hard registers.  */
320538fd1498Szrj 
320638fd1498Szrj       enum insn_code icode;
320738fd1498Szrj       rtx op0, op1;
320838fd1498Szrj 
320938fd1498Szrj       icode = can_extend_p (promoted_nominal_mode, data->passed_mode,
321038fd1498Szrj 			    unsignedp);
321138fd1498Szrj 
321238fd1498Szrj       op0 = parmreg;
321338fd1498Szrj       op1 = validated_mem;
321438fd1498Szrj       if (icode != CODE_FOR_nothing
321538fd1498Szrj 	  && insn_operand_matches (icode, 0, op0)
321638fd1498Szrj 	  && insn_operand_matches (icode, 1, op1))
321738fd1498Szrj 	{
321838fd1498Szrj 	  enum rtx_code code = unsignedp ? ZERO_EXTEND : SIGN_EXTEND;
321938fd1498Szrj 	  rtx_insn *insn, *insns;
322038fd1498Szrj 	  rtx t = op1;
322138fd1498Szrj 	  HARD_REG_SET hardregs;
322238fd1498Szrj 
322338fd1498Szrj 	  start_sequence ();
322438fd1498Szrj 	  /* If op1 is a hard register that is likely spilled, first
322538fd1498Szrj 	     force it into a pseudo, otherwise combiner might extend
322638fd1498Szrj 	     its lifetime too much.  */
322738fd1498Szrj 	  if (GET_CODE (t) == SUBREG)
322838fd1498Szrj 	    t = SUBREG_REG (t);
322938fd1498Szrj 	  if (REG_P (t)
323038fd1498Szrj 	      && HARD_REGISTER_P (t)
323138fd1498Szrj 	      && ! TEST_HARD_REG_BIT (fixed_reg_set, REGNO (t))
323238fd1498Szrj 	      && targetm.class_likely_spilled_p (REGNO_REG_CLASS (REGNO (t))))
323338fd1498Szrj 	    {
323438fd1498Szrj 	      t = gen_reg_rtx (GET_MODE (op1));
323538fd1498Szrj 	      emit_move_insn (t, op1);
323638fd1498Szrj 	    }
323738fd1498Szrj 	  else
323838fd1498Szrj 	    t = op1;
323938fd1498Szrj 	  rtx_insn *pat = gen_extend_insn (op0, t, promoted_nominal_mode,
324038fd1498Szrj 					   data->passed_mode, unsignedp);
324138fd1498Szrj 	  emit_insn (pat);
324238fd1498Szrj 	  insns = get_insns ();
324338fd1498Szrj 
324438fd1498Szrj 	  moved = true;
324538fd1498Szrj 	  CLEAR_HARD_REG_SET (hardregs);
324638fd1498Szrj 	  for (insn = insns; insn && moved; insn = NEXT_INSN (insn))
324738fd1498Szrj 	    {
324838fd1498Szrj 	      if (INSN_P (insn))
324938fd1498Szrj 		note_stores (PATTERN (insn), record_hard_reg_sets,
325038fd1498Szrj 			     &hardregs);
325138fd1498Szrj 	      if (!hard_reg_set_empty_p (hardregs))
325238fd1498Szrj 		moved = false;
325338fd1498Szrj 	    }
325438fd1498Szrj 
325538fd1498Szrj 	  end_sequence ();
325638fd1498Szrj 
325738fd1498Szrj 	  if (moved)
325838fd1498Szrj 	    {
325938fd1498Szrj 	      emit_insn (insns);
326038fd1498Szrj 	      if (equiv_stack_parm != NULL_RTX)
326138fd1498Szrj 		equiv_stack_parm = gen_rtx_fmt_e (code, GET_MODE (parmreg),
326238fd1498Szrj 						  equiv_stack_parm);
326338fd1498Szrj 	    }
326438fd1498Szrj 	}
326538fd1498Szrj     }
326638fd1498Szrj 
326738fd1498Szrj   if (moved)
326838fd1498Szrj     /* Nothing to do.  */
326938fd1498Szrj     ;
327038fd1498Szrj   else if (need_conversion)
327138fd1498Szrj     {
327238fd1498Szrj       /* We did not have an insn to convert directly, or the sequence
327338fd1498Szrj 	 generated appeared unsafe.  We must first copy the parm to a
327438fd1498Szrj 	 pseudo reg, and save the conversion until after all
327538fd1498Szrj 	 parameters have been moved.  */
327638fd1498Szrj 
327738fd1498Szrj       int save_tree_used;
327838fd1498Szrj       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
327938fd1498Szrj 
328038fd1498Szrj       emit_move_insn (tempreg, validated_mem);
328138fd1498Szrj 
328238fd1498Szrj       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
328338fd1498Szrj       tempreg = convert_to_mode (data->nominal_mode, tempreg, unsignedp);
328438fd1498Szrj 
328538fd1498Szrj       if (partial_subreg_p (tempreg)
328638fd1498Szrj 	  && GET_MODE (tempreg) == data->nominal_mode
328738fd1498Szrj 	  && REG_P (SUBREG_REG (tempreg))
328838fd1498Szrj 	  && data->nominal_mode == data->passed_mode
328938fd1498Szrj 	  && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (data->entry_parm))
329038fd1498Szrj 	{
329138fd1498Szrj 	  /* The argument is already sign/zero extended, so note it
329238fd1498Szrj 	     into the subreg.  */
329338fd1498Szrj 	  SUBREG_PROMOTED_VAR_P (tempreg) = 1;
329438fd1498Szrj 	  SUBREG_PROMOTED_SET (tempreg, unsignedp);
329538fd1498Szrj 	}
329638fd1498Szrj 
329738fd1498Szrj       /* TREE_USED gets set erroneously during expand_assignment.  */
329838fd1498Szrj       save_tree_used = TREE_USED (parm);
329938fd1498Szrj       SET_DECL_RTL (parm, rtl);
330038fd1498Szrj       expand_assignment (parm, make_tree (data->nominal_type, tempreg), false);
330138fd1498Szrj       SET_DECL_RTL (parm, NULL_RTX);
330238fd1498Szrj       TREE_USED (parm) = save_tree_used;
330338fd1498Szrj       all->first_conversion_insn = get_insns ();
330438fd1498Szrj       all->last_conversion_insn = get_last_insn ();
330538fd1498Szrj       end_sequence ();
330638fd1498Szrj 
330738fd1498Szrj       did_conversion = true;
330838fd1498Szrj     }
330938fd1498Szrj   else
331038fd1498Szrj     emit_move_insn (parmreg, validated_mem);
331138fd1498Szrj 
331238fd1498Szrj   /* If we were passed a pointer but the actual value can safely live
331338fd1498Szrj      in a register, retrieve it and use it directly.  */
331438fd1498Szrj   if (data->passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode)
331538fd1498Szrj     {
331638fd1498Szrj       /* We can't use nominal_mode, because it will have been set to
331738fd1498Szrj 	 Pmode above.  We must use the actual mode of the parm.  */
331838fd1498Szrj       if (use_register_for_decl (parm))
331938fd1498Szrj 	{
332038fd1498Szrj 	  parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
332138fd1498Szrj 	  mark_user_reg (parmreg);
332238fd1498Szrj 	}
332338fd1498Szrj       else
332438fd1498Szrj 	{
332538fd1498Szrj 	  int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
332638fd1498Szrj 					    TYPE_MODE (TREE_TYPE (parm)),
332738fd1498Szrj 					    TYPE_ALIGN (TREE_TYPE (parm)));
332838fd1498Szrj 	  parmreg
332938fd1498Szrj 	    = assign_stack_local (TYPE_MODE (TREE_TYPE (parm)),
333038fd1498Szrj 				  GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (parm))),
333138fd1498Szrj 				  align);
333238fd1498Szrj 	  set_mem_attributes (parmreg, parm, 1);
333338fd1498Szrj 	}
333438fd1498Szrj 
333538fd1498Szrj       /* We need to preserve an address based on VIRTUAL_STACK_VARS_REGNUM for
333638fd1498Szrj 	 the debug info in case it is not legitimate.  */
333738fd1498Szrj       if (GET_MODE (parmreg) != GET_MODE (rtl))
333838fd1498Szrj 	{
333938fd1498Szrj 	  rtx tempreg = gen_reg_rtx (GET_MODE (rtl));
334038fd1498Szrj 	  int unsigned_p = TYPE_UNSIGNED (TREE_TYPE (parm));
334138fd1498Szrj 
334238fd1498Szrj 	  push_to_sequence2 (all->first_conversion_insn,
334338fd1498Szrj 			     all->last_conversion_insn);
334438fd1498Szrj 	  emit_move_insn (tempreg, rtl);
334538fd1498Szrj 	  tempreg = convert_to_mode (GET_MODE (parmreg), tempreg, unsigned_p);
334638fd1498Szrj 	  emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg,
334738fd1498Szrj 			  tempreg);
334838fd1498Szrj 	  all->first_conversion_insn = get_insns ();
334938fd1498Szrj 	  all->last_conversion_insn = get_last_insn ();
335038fd1498Szrj 	  end_sequence ();
335138fd1498Szrj 
335238fd1498Szrj 	  did_conversion = true;
335338fd1498Szrj 	}
335438fd1498Szrj       else
335538fd1498Szrj 	emit_move_insn (MEM_P (parmreg) ? copy_rtx (parmreg) : parmreg, rtl);
335638fd1498Szrj 
335738fd1498Szrj       rtl = parmreg;
335838fd1498Szrj 
335938fd1498Szrj       /* STACK_PARM is the pointer, not the parm, and PARMREG is
336038fd1498Szrj 	 now the parm.  */
336138fd1498Szrj       data->stack_parm = NULL;
336238fd1498Szrj     }
336338fd1498Szrj 
336438fd1498Szrj   set_parm_rtl (parm, rtl);
336538fd1498Szrj 
336638fd1498Szrj   /* Mark the register as eliminable if we did no conversion and it was
336738fd1498Szrj      copied from memory at a fixed offset, and the arg pointer was not
336838fd1498Szrj      copied to a pseudo-reg.  If the arg pointer is a pseudo reg or the
336938fd1498Szrj      offset formed an invalid address, such memory-equivalences as we
337038fd1498Szrj      make here would screw up life analysis for it.  */
337138fd1498Szrj   if (data->nominal_mode == data->passed_mode
337238fd1498Szrj       && !did_conversion
337338fd1498Szrj       && data->stack_parm != 0
337438fd1498Szrj       && MEM_P (data->stack_parm)
337538fd1498Szrj       && data->locate.offset.var == 0
337638fd1498Szrj       && reg_mentioned_p (virtual_incoming_args_rtx,
337738fd1498Szrj 			  XEXP (data->stack_parm, 0)))
337838fd1498Szrj     {
337938fd1498Szrj       rtx_insn *linsn = get_last_insn ();
338038fd1498Szrj       rtx_insn *sinsn;
338138fd1498Szrj       rtx set;
338238fd1498Szrj 
338338fd1498Szrj       /* Mark complex types separately.  */
338438fd1498Szrj       if (GET_CODE (parmreg) == CONCAT)
338538fd1498Szrj 	{
338638fd1498Szrj 	  scalar_mode submode = GET_MODE_INNER (GET_MODE (parmreg));
338738fd1498Szrj 	  int regnor = REGNO (XEXP (parmreg, 0));
338838fd1498Szrj 	  int regnoi = REGNO (XEXP (parmreg, 1));
338938fd1498Szrj 	  rtx stackr = adjust_address_nv (data->stack_parm, submode, 0);
339038fd1498Szrj 	  rtx stacki = adjust_address_nv (data->stack_parm, submode,
339138fd1498Szrj 					  GET_MODE_SIZE (submode));
339238fd1498Szrj 
339338fd1498Szrj 	  /* Scan backwards for the set of the real and
339438fd1498Szrj 	     imaginary parts.  */
339538fd1498Szrj 	  for (sinsn = linsn; sinsn != 0;
339638fd1498Szrj 	       sinsn = prev_nonnote_insn (sinsn))
339738fd1498Szrj 	    {
339838fd1498Szrj 	      set = single_set (sinsn);
339938fd1498Szrj 	      if (set == 0)
340038fd1498Szrj 		continue;
340138fd1498Szrj 
340238fd1498Szrj 	      if (SET_DEST (set) == regno_reg_rtx [regnoi])
340338fd1498Szrj 		set_unique_reg_note (sinsn, REG_EQUIV, stacki);
340438fd1498Szrj 	      else if (SET_DEST (set) == regno_reg_rtx [regnor])
340538fd1498Szrj 		set_unique_reg_note (sinsn, REG_EQUIV, stackr);
340638fd1498Szrj 	    }
340738fd1498Szrj 	}
340838fd1498Szrj       else
340938fd1498Szrj 	set_dst_reg_note (linsn, REG_EQUIV, equiv_stack_parm, parmreg);
341038fd1498Szrj     }
341138fd1498Szrj 
341238fd1498Szrj   /* For pointer data type, suggest pointer register.  */
341338fd1498Szrj   if (POINTER_TYPE_P (TREE_TYPE (parm)))
341438fd1498Szrj     mark_reg_pointer (parmreg,
341538fd1498Szrj 		      TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
341638fd1498Szrj }
341738fd1498Szrj 
341838fd1498Szrj /* A subroutine of assign_parms.  Allocate stack space to hold the current
341938fd1498Szrj    parameter.  Get it there.  Perform all ABI specified conversions.  */
342038fd1498Szrj 
342138fd1498Szrj static void
assign_parm_setup_stack(struct assign_parm_data_all * all,tree parm,struct assign_parm_data_one * data)342238fd1498Szrj assign_parm_setup_stack (struct assign_parm_data_all *all, tree parm,
342338fd1498Szrj 		         struct assign_parm_data_one *data)
342438fd1498Szrj {
342538fd1498Szrj   /* Value must be stored in the stack slot STACK_PARM during function
342638fd1498Szrj      execution.  */
342738fd1498Szrj   bool to_conversion = false;
342838fd1498Szrj 
342938fd1498Szrj   assign_parm_remove_parallels (data);
343038fd1498Szrj 
343138fd1498Szrj   if (data->promoted_mode != data->nominal_mode)
343238fd1498Szrj     {
343338fd1498Szrj       /* Conversion is required.  */
343438fd1498Szrj       rtx tempreg = gen_reg_rtx (GET_MODE (data->entry_parm));
343538fd1498Szrj 
343638fd1498Szrj       emit_move_insn (tempreg, validize_mem (copy_rtx (data->entry_parm)));
343738fd1498Szrj 
343838fd1498Szrj       push_to_sequence2 (all->first_conversion_insn, all->last_conversion_insn);
343938fd1498Szrj       to_conversion = true;
344038fd1498Szrj 
344138fd1498Szrj       data->entry_parm = convert_to_mode (data->nominal_mode, tempreg,
344238fd1498Szrj 					  TYPE_UNSIGNED (TREE_TYPE (parm)));
344338fd1498Szrj 
344438fd1498Szrj       if (data->stack_parm)
344538fd1498Szrj 	{
344638fd1498Szrj 	  poly_int64 offset
344738fd1498Szrj 	    = subreg_lowpart_offset (data->nominal_mode,
344838fd1498Szrj 				     GET_MODE (data->stack_parm));
344938fd1498Szrj 	  /* ??? This may need a big-endian conversion on sparc64.  */
345038fd1498Szrj 	  data->stack_parm
345138fd1498Szrj 	    = adjust_address (data->stack_parm, data->nominal_mode, 0);
345238fd1498Szrj 	  if (maybe_ne (offset, 0) && MEM_OFFSET_KNOWN_P (data->stack_parm))
345338fd1498Szrj 	    set_mem_offset (data->stack_parm,
345438fd1498Szrj 			    MEM_OFFSET (data->stack_parm) + offset);
345538fd1498Szrj 	}
345638fd1498Szrj     }
345738fd1498Szrj 
345838fd1498Szrj   if (data->entry_parm != data->stack_parm)
345938fd1498Szrj     {
346038fd1498Szrj       rtx src, dest;
346138fd1498Szrj 
346238fd1498Szrj       if (data->stack_parm == 0)
346338fd1498Szrj 	{
346438fd1498Szrj 	  int align = STACK_SLOT_ALIGNMENT (data->passed_type,
346538fd1498Szrj 					    GET_MODE (data->entry_parm),
346638fd1498Szrj 					    TYPE_ALIGN (data->passed_type));
346738fd1498Szrj 	  data->stack_parm
346838fd1498Szrj 	    = assign_stack_local (GET_MODE (data->entry_parm),
346938fd1498Szrj 				  GET_MODE_SIZE (GET_MODE (data->entry_parm)),
347038fd1498Szrj 				  align);
347138fd1498Szrj 	  set_mem_attributes (data->stack_parm, parm, 1);
347238fd1498Szrj 	}
347338fd1498Szrj 
347438fd1498Szrj       dest = validize_mem (copy_rtx (data->stack_parm));
347538fd1498Szrj       src = validize_mem (copy_rtx (data->entry_parm));
347638fd1498Szrj 
347738fd1498Szrj       if (MEM_P (src))
347838fd1498Szrj 	{
347938fd1498Szrj 	  /* Use a block move to handle potentially misaligned entry_parm.  */
348038fd1498Szrj 	  if (!to_conversion)
348138fd1498Szrj 	    push_to_sequence2 (all->first_conversion_insn,
348238fd1498Szrj 			       all->last_conversion_insn);
348338fd1498Szrj 	  to_conversion = true;
348438fd1498Szrj 
348538fd1498Szrj 	  emit_block_move (dest, src,
348638fd1498Szrj 			   GEN_INT (int_size_in_bytes (data->passed_type)),
348738fd1498Szrj 			   BLOCK_OP_NORMAL);
348838fd1498Szrj 	}
348938fd1498Szrj       else
349038fd1498Szrj 	{
349138fd1498Szrj 	  if (!REG_P (src))
349238fd1498Szrj 	    src = force_reg (GET_MODE (src), src);
349338fd1498Szrj 	  emit_move_insn (dest, src);
349438fd1498Szrj 	}
349538fd1498Szrj     }
349638fd1498Szrj 
349738fd1498Szrj   if (to_conversion)
349838fd1498Szrj     {
349938fd1498Szrj       all->first_conversion_insn = get_insns ();
350038fd1498Szrj       all->last_conversion_insn = get_last_insn ();
350138fd1498Szrj       end_sequence ();
350238fd1498Szrj     }
350338fd1498Szrj 
350438fd1498Szrj   set_parm_rtl (parm, data->stack_parm);
350538fd1498Szrj }
350638fd1498Szrj 
350738fd1498Szrj /* A subroutine of assign_parms.  If the ABI splits complex arguments, then
350838fd1498Szrj    undo the frobbing that we did in assign_parms_augmented_arg_list.  */
350938fd1498Szrj 
351038fd1498Szrj static void
assign_parms_unsplit_complex(struct assign_parm_data_all * all,vec<tree> fnargs)351138fd1498Szrj assign_parms_unsplit_complex (struct assign_parm_data_all *all,
351238fd1498Szrj 			      vec<tree> fnargs)
351338fd1498Szrj {
351438fd1498Szrj   tree parm;
351538fd1498Szrj   tree orig_fnargs = all->orig_fnargs;
351638fd1498Szrj   unsigned i = 0;
351738fd1498Szrj 
351838fd1498Szrj   for (parm = orig_fnargs; parm; parm = TREE_CHAIN (parm), ++i)
351938fd1498Szrj     {
352038fd1498Szrj       if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
352138fd1498Szrj 	  && targetm.calls.split_complex_arg (TREE_TYPE (parm)))
352238fd1498Szrj 	{
352338fd1498Szrj 	  rtx tmp, real, imag;
352438fd1498Szrj 	  scalar_mode inner = GET_MODE_INNER (DECL_MODE (parm));
352538fd1498Szrj 
352638fd1498Szrj 	  real = DECL_RTL (fnargs[i]);
352738fd1498Szrj 	  imag = DECL_RTL (fnargs[i + 1]);
352838fd1498Szrj 	  if (inner != GET_MODE (real))
352938fd1498Szrj 	    {
353038fd1498Szrj 	      real = gen_lowpart_SUBREG (inner, real);
353138fd1498Szrj 	      imag = gen_lowpart_SUBREG (inner, imag);
353238fd1498Szrj 	    }
353338fd1498Szrj 
353438fd1498Szrj 	  if (TREE_ADDRESSABLE (parm))
353538fd1498Szrj 	    {
353638fd1498Szrj 	      rtx rmem, imem;
353738fd1498Szrj 	      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (parm));
353838fd1498Szrj 	      int align = STACK_SLOT_ALIGNMENT (TREE_TYPE (parm),
353938fd1498Szrj 						DECL_MODE (parm),
354038fd1498Szrj 						TYPE_ALIGN (TREE_TYPE (parm)));
354138fd1498Szrj 
354238fd1498Szrj 	      /* split_complex_arg put the real and imag parts in
354338fd1498Szrj 		 pseudos.  Move them to memory.  */
354438fd1498Szrj 	      tmp = assign_stack_local (DECL_MODE (parm), size, align);
354538fd1498Szrj 	      set_mem_attributes (tmp, parm, 1);
354638fd1498Szrj 	      rmem = adjust_address_nv (tmp, inner, 0);
354738fd1498Szrj 	      imem = adjust_address_nv (tmp, inner, GET_MODE_SIZE (inner));
354838fd1498Szrj 	      push_to_sequence2 (all->first_conversion_insn,
354938fd1498Szrj 				 all->last_conversion_insn);
355038fd1498Szrj 	      emit_move_insn (rmem, real);
355138fd1498Szrj 	      emit_move_insn (imem, imag);
355238fd1498Szrj 	      all->first_conversion_insn = get_insns ();
355338fd1498Szrj 	      all->last_conversion_insn = get_last_insn ();
355438fd1498Szrj 	      end_sequence ();
355538fd1498Szrj 	    }
355638fd1498Szrj 	  else
355738fd1498Szrj 	    tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
355838fd1498Szrj 	  set_parm_rtl (parm, tmp);
355938fd1498Szrj 
356038fd1498Szrj 	  real = DECL_INCOMING_RTL (fnargs[i]);
356138fd1498Szrj 	  imag = DECL_INCOMING_RTL (fnargs[i + 1]);
356238fd1498Szrj 	  if (inner != GET_MODE (real))
356338fd1498Szrj 	    {
356438fd1498Szrj 	      real = gen_lowpart_SUBREG (inner, real);
356538fd1498Szrj 	      imag = gen_lowpart_SUBREG (inner, imag);
356638fd1498Szrj 	    }
356738fd1498Szrj 	  tmp = gen_rtx_CONCAT (DECL_MODE (parm), real, imag);
356838fd1498Szrj 	  set_decl_incoming_rtl (parm, tmp, false);
356938fd1498Szrj 	  i++;
357038fd1498Szrj 	}
357138fd1498Szrj     }
357238fd1498Szrj }
357338fd1498Szrj 
357438fd1498Szrj /* Load bounds of PARM from bounds table.  */
357538fd1498Szrj static void
assign_parm_load_bounds(struct assign_parm_data_one * data,tree parm,rtx entry,unsigned bound_no)357638fd1498Szrj assign_parm_load_bounds (struct assign_parm_data_one *data,
357738fd1498Szrj 			 tree parm,
357838fd1498Szrj 			 rtx entry,
357938fd1498Szrj 			 unsigned bound_no)
358038fd1498Szrj {
358138fd1498Szrj   bitmap_iterator bi;
358238fd1498Szrj   unsigned i, offs = 0;
358338fd1498Szrj   int bnd_no = -1;
358438fd1498Szrj   rtx slot = NULL, ptr = NULL;
358538fd1498Szrj 
358638fd1498Szrj   if (parm)
358738fd1498Szrj     {
358838fd1498Szrj       bitmap slots;
358938fd1498Szrj       bitmap_obstack_initialize (NULL);
359038fd1498Szrj       slots = BITMAP_ALLOC (NULL);
359138fd1498Szrj       chkp_find_bound_slots (TREE_TYPE (parm), slots);
359238fd1498Szrj       EXECUTE_IF_SET_IN_BITMAP (slots, 0, i, bi)
359338fd1498Szrj 	{
359438fd1498Szrj 	  if (bound_no)
359538fd1498Szrj 	    bound_no--;
359638fd1498Szrj 	  else
359738fd1498Szrj 	    {
359838fd1498Szrj 	      bnd_no = i;
359938fd1498Szrj 	      break;
360038fd1498Szrj 	    }
360138fd1498Szrj 	}
360238fd1498Szrj       BITMAP_FREE (slots);
360338fd1498Szrj       bitmap_obstack_release (NULL);
360438fd1498Szrj     }
360538fd1498Szrj 
360638fd1498Szrj   /* We may have bounds not associated with any pointer.  */
360738fd1498Szrj   if (bnd_no != -1)
360838fd1498Szrj     offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
360938fd1498Szrj 
361038fd1498Szrj   /* Find associated pointer.  */
361138fd1498Szrj   if (bnd_no == -1)
361238fd1498Szrj     {
361338fd1498Szrj       /* If bounds are not associated with any bounds,
361438fd1498Szrj 	 then it is passed in a register or special slot.  */
361538fd1498Szrj       gcc_assert (data->entry_parm);
361638fd1498Szrj       ptr = const0_rtx;
361738fd1498Szrj     }
361838fd1498Szrj   else if (MEM_P (entry))
361938fd1498Szrj     slot = adjust_address (entry, Pmode, offs);
362038fd1498Szrj   else if (REG_P (entry))
362138fd1498Szrj     ptr = gen_rtx_REG (Pmode, REGNO (entry) + bnd_no);
362238fd1498Szrj   else if (GET_CODE (entry) == PARALLEL)
362338fd1498Szrj     ptr = chkp_get_value_with_offs (entry, GEN_INT (offs));
362438fd1498Szrj   else
362538fd1498Szrj     gcc_unreachable ();
362638fd1498Szrj   data->entry_parm = targetm.calls.load_bounds_for_arg (slot, ptr,
362738fd1498Szrj 							data->entry_parm);
362838fd1498Szrj }
362938fd1498Szrj 
363038fd1498Szrj /* Assign RTL expressions to the function's bounds parameters BNDARGS.  */
363138fd1498Szrj 
363238fd1498Szrj static void
assign_bounds(vec<bounds_parm_data> & bndargs,struct assign_parm_data_all & all,bool assign_regs,bool assign_special,bool assign_bt)363338fd1498Szrj assign_bounds (vec<bounds_parm_data> &bndargs,
363438fd1498Szrj 	       struct assign_parm_data_all &all,
363538fd1498Szrj 	       bool assign_regs, bool assign_special,
363638fd1498Szrj 	       bool assign_bt)
363738fd1498Szrj {
363838fd1498Szrj   unsigned i, pass;
363938fd1498Szrj   bounds_parm_data *pbdata;
364038fd1498Szrj 
364138fd1498Szrj   if (!bndargs.exists ())
364238fd1498Szrj     return;
364338fd1498Szrj 
364438fd1498Szrj   /* We make few passes to store input bounds.  Firstly handle bounds
364538fd1498Szrj      passed in registers.  After that we load bounds passed in special
364638fd1498Szrj      slots.  Finally we load bounds from Bounds Table.  */
364738fd1498Szrj   for (pass = 0; pass < 3; pass++)
364838fd1498Szrj     FOR_EACH_VEC_ELT (bndargs, i, pbdata)
364938fd1498Szrj       {
365038fd1498Szrj 	/* Pass 0 => regs only.  */
365138fd1498Szrj 	if (pass == 0
365238fd1498Szrj 	    && (!assign_regs
365338fd1498Szrj 		||(!pbdata->parm_data.entry_parm
365438fd1498Szrj 		   || GET_CODE (pbdata->parm_data.entry_parm) != REG)))
365538fd1498Szrj 	  continue;
365638fd1498Szrj 	/* Pass 1 => slots only.  */
365738fd1498Szrj 	else if (pass == 1
365838fd1498Szrj 		 && (!assign_special
365938fd1498Szrj 		     || (!pbdata->parm_data.entry_parm
366038fd1498Szrj 			 || GET_CODE (pbdata->parm_data.entry_parm) == REG)))
366138fd1498Szrj 	  continue;
366238fd1498Szrj 	/* Pass 2 => BT only.  */
366338fd1498Szrj 	else if (pass == 2
366438fd1498Szrj 		 && (!assign_bt
366538fd1498Szrj 		     || pbdata->parm_data.entry_parm))
366638fd1498Szrj 	  continue;
366738fd1498Szrj 
366838fd1498Szrj 	if (!pbdata->parm_data.entry_parm
366938fd1498Szrj 	    || GET_CODE (pbdata->parm_data.entry_parm) != REG)
367038fd1498Szrj 	  assign_parm_load_bounds (&pbdata->parm_data, pbdata->ptr_parm,
367138fd1498Szrj 				   pbdata->ptr_entry, pbdata->bound_no);
367238fd1498Szrj 
367338fd1498Szrj 	set_decl_incoming_rtl (pbdata->bounds_parm,
367438fd1498Szrj 			       pbdata->parm_data.entry_parm, false);
367538fd1498Szrj 
367638fd1498Szrj 	if (assign_parm_setup_block_p (&pbdata->parm_data))
367738fd1498Szrj 	  assign_parm_setup_block (&all, pbdata->bounds_parm,
367838fd1498Szrj 				   &pbdata->parm_data);
367938fd1498Szrj 	else if (pbdata->parm_data.passed_pointer
368038fd1498Szrj 		 || use_register_for_decl (pbdata->bounds_parm))
368138fd1498Szrj 	  assign_parm_setup_reg (&all, pbdata->bounds_parm,
368238fd1498Szrj 				 &pbdata->parm_data);
368338fd1498Szrj 	else
368438fd1498Szrj 	  assign_parm_setup_stack (&all, pbdata->bounds_parm,
368538fd1498Szrj 				   &pbdata->parm_data);
368638fd1498Szrj       }
368738fd1498Szrj }
368838fd1498Szrj 
368938fd1498Szrj /* Assign RTL expressions to the function's parameters.  This may involve
369038fd1498Szrj    copying them into registers and using those registers as the DECL_RTL.  */
369138fd1498Szrj 
369238fd1498Szrj static void
assign_parms(tree fndecl)369338fd1498Szrj assign_parms (tree fndecl)
369438fd1498Szrj {
369538fd1498Szrj   struct assign_parm_data_all all;
369638fd1498Szrj   tree parm;
369738fd1498Szrj   vec<tree> fnargs;
369838fd1498Szrj   unsigned i, bound_no = 0;
369938fd1498Szrj   tree last_arg = NULL;
370038fd1498Szrj   rtx last_arg_entry = NULL;
370138fd1498Szrj   vec<bounds_parm_data> bndargs = vNULL;
370238fd1498Szrj   bounds_parm_data bdata;
370338fd1498Szrj 
370438fd1498Szrj   crtl->args.internal_arg_pointer
370538fd1498Szrj     = targetm.calls.internal_arg_pointer ();
370638fd1498Szrj 
370738fd1498Szrj   assign_parms_initialize_all (&all);
370838fd1498Szrj   fnargs = assign_parms_augmented_arg_list (&all);
370938fd1498Szrj 
371038fd1498Szrj   FOR_EACH_VEC_ELT (fnargs, i, parm)
371138fd1498Szrj     {
371238fd1498Szrj       struct assign_parm_data_one data;
371338fd1498Szrj 
371438fd1498Szrj       /* Extract the type of PARM; adjust it according to ABI.  */
371538fd1498Szrj       assign_parm_find_data_types (&all, parm, &data);
371638fd1498Szrj 
371738fd1498Szrj       /* Early out for errors and void parameters.  */
371838fd1498Szrj       if (data.passed_mode == VOIDmode)
371938fd1498Szrj 	{
372038fd1498Szrj 	  SET_DECL_RTL (parm, const0_rtx);
372138fd1498Szrj 	  DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
372238fd1498Szrj 	  continue;
372338fd1498Szrj 	}
372438fd1498Szrj 
372538fd1498Szrj       /* Estimate stack alignment from parameter alignment.  */
372638fd1498Szrj       if (SUPPORTS_STACK_ALIGNMENT)
372738fd1498Szrj         {
372838fd1498Szrj           unsigned int align
372938fd1498Szrj 	    = targetm.calls.function_arg_boundary (data.promoted_mode,
373038fd1498Szrj 						   data.passed_type);
373138fd1498Szrj 	  align = MINIMUM_ALIGNMENT (data.passed_type, data.promoted_mode,
373238fd1498Szrj 				     align);
373338fd1498Szrj 	  if (TYPE_ALIGN (data.nominal_type) > align)
373438fd1498Szrj 	    align = MINIMUM_ALIGNMENT (data.nominal_type,
373538fd1498Szrj 				       TYPE_MODE (data.nominal_type),
373638fd1498Szrj 				       TYPE_ALIGN (data.nominal_type));
373738fd1498Szrj 	  if (crtl->stack_alignment_estimated < align)
373838fd1498Szrj 	    {
373938fd1498Szrj 	      gcc_assert (!crtl->stack_realign_processed);
374038fd1498Szrj 	      crtl->stack_alignment_estimated = align;
374138fd1498Szrj 	    }
374238fd1498Szrj 	}
374338fd1498Szrj 
374438fd1498Szrj       /* Find out where the parameter arrives in this function.  */
374538fd1498Szrj       assign_parm_find_entry_rtl (&all, &data);
374638fd1498Szrj 
374738fd1498Szrj       /* Find out where stack space for this parameter might be.  */
374838fd1498Szrj       if (assign_parm_is_stack_parm (&all, &data))
374938fd1498Szrj 	{
375038fd1498Szrj 	  assign_parm_find_stack_rtl (parm, &data);
375138fd1498Szrj 	  assign_parm_adjust_entry_rtl (&data);
375238fd1498Szrj 	}
375338fd1498Szrj       if (!POINTER_BOUNDS_TYPE_P (data.passed_type))
375438fd1498Szrj 	{
375538fd1498Szrj 	  /* Remember where last non bounds arg was passed in case
375638fd1498Szrj 	     we have to load associated bounds for it from Bounds
375738fd1498Szrj 	     Table.  */
375838fd1498Szrj 	  last_arg = parm;
375938fd1498Szrj 	  last_arg_entry = data.entry_parm;
376038fd1498Szrj 	  bound_no = 0;
376138fd1498Szrj 	}
376238fd1498Szrj       /* Record permanently how this parm was passed.  */
376338fd1498Szrj       if (data.passed_pointer)
376438fd1498Szrj 	{
376538fd1498Szrj 	  rtx incoming_rtl
376638fd1498Szrj 	    = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (data.passed_type)),
376738fd1498Szrj 			   data.entry_parm);
376838fd1498Szrj 	  set_decl_incoming_rtl (parm, incoming_rtl, true);
376938fd1498Szrj 	}
377038fd1498Szrj       else
377138fd1498Szrj 	set_decl_incoming_rtl (parm, data.entry_parm, false);
377238fd1498Szrj 
377338fd1498Szrj       assign_parm_adjust_stack_rtl (&data);
377438fd1498Szrj 
377538fd1498Szrj       /* Bounds should be loaded in the particular order to
377638fd1498Szrj 	 have registers allocated correctly.  Collect info about
377738fd1498Szrj 	 input bounds and load them later.  */
377838fd1498Szrj       if (POINTER_BOUNDS_TYPE_P (data.passed_type))
377938fd1498Szrj 	{
378038fd1498Szrj 	  /* Expect bounds in instrumented functions only.  */
378138fd1498Szrj 	  gcc_assert (chkp_function_instrumented_p (fndecl));
378238fd1498Szrj 
378338fd1498Szrj 	  bdata.parm_data = data;
378438fd1498Szrj 	  bdata.bounds_parm = parm;
378538fd1498Szrj 	  bdata.ptr_parm = last_arg;
378638fd1498Szrj 	  bdata.ptr_entry = last_arg_entry;
378738fd1498Szrj 	  bdata.bound_no = bound_no;
378838fd1498Szrj 	  bndargs.safe_push (bdata);
378938fd1498Szrj 	}
379038fd1498Szrj       else
379138fd1498Szrj 	{
379238fd1498Szrj 	  if (assign_parm_setup_block_p (&data))
379338fd1498Szrj 	    assign_parm_setup_block (&all, parm, &data);
379438fd1498Szrj 	  else if (data.passed_pointer || use_register_for_decl (parm))
379538fd1498Szrj 	    assign_parm_setup_reg (&all, parm, &data);
379638fd1498Szrj 	  else
379738fd1498Szrj 	    assign_parm_setup_stack (&all, parm, &data);
379838fd1498Szrj 	}
379938fd1498Szrj 
380038fd1498Szrj       if (cfun->stdarg && !DECL_CHAIN (parm))
380138fd1498Szrj 	{
380238fd1498Szrj 	  int pretend_bytes = 0;
380338fd1498Szrj 
380438fd1498Szrj 	  assign_parms_setup_varargs (&all, &data, false);
380538fd1498Szrj 
380638fd1498Szrj 	  if (chkp_function_instrumented_p (fndecl))
380738fd1498Szrj 	    {
380838fd1498Szrj 	      /* We expect this is the last parm.  Otherwise it is wrong
380938fd1498Szrj 		 to assign bounds right now.  */
381038fd1498Szrj 	      gcc_assert (i == (fnargs.length () - 1));
381138fd1498Szrj 	      assign_bounds (bndargs, all, true, false, false);
381238fd1498Szrj 	      targetm.calls.setup_incoming_vararg_bounds (all.args_so_far,
381338fd1498Szrj 							  data.promoted_mode,
381438fd1498Szrj 							  data.passed_type,
381538fd1498Szrj 							  &pretend_bytes,
381638fd1498Szrj 							  false);
381738fd1498Szrj 	      assign_bounds (bndargs, all, false, true, true);
381838fd1498Szrj 	      bndargs.release ();
381938fd1498Szrj 	    }
382038fd1498Szrj 	}
382138fd1498Szrj 
382238fd1498Szrj       /* Update info on where next arg arrives in registers.  */
382338fd1498Szrj       targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
382438fd1498Szrj 					  data.passed_type, data.named_arg);
382538fd1498Szrj 
382638fd1498Szrj       if (POINTER_BOUNDS_TYPE_P (data.passed_type))
382738fd1498Szrj 	bound_no++;
382838fd1498Szrj     }
382938fd1498Szrj 
383038fd1498Szrj   assign_bounds (bndargs, all, true, true, true);
383138fd1498Szrj   bndargs.release ();
383238fd1498Szrj 
383338fd1498Szrj   if (targetm.calls.split_complex_arg)
383438fd1498Szrj     assign_parms_unsplit_complex (&all, fnargs);
383538fd1498Szrj 
383638fd1498Szrj   fnargs.release ();
383738fd1498Szrj 
383838fd1498Szrj   /* Output all parameter conversion instructions (possibly including calls)
383938fd1498Szrj      now that all parameters have been copied out of hard registers.  */
384038fd1498Szrj   emit_insn (all.first_conversion_insn);
384138fd1498Szrj 
384238fd1498Szrj   /* Estimate reload stack alignment from scalar return mode.  */
384338fd1498Szrj   if (SUPPORTS_STACK_ALIGNMENT)
384438fd1498Szrj     {
384538fd1498Szrj       if (DECL_RESULT (fndecl))
384638fd1498Szrj 	{
384738fd1498Szrj 	  tree type = TREE_TYPE (DECL_RESULT (fndecl));
384838fd1498Szrj 	  machine_mode mode = TYPE_MODE (type);
384938fd1498Szrj 
385038fd1498Szrj 	  if (mode != BLKmode
385138fd1498Szrj 	      && mode != VOIDmode
385238fd1498Szrj 	      && !AGGREGATE_TYPE_P (type))
385338fd1498Szrj 	    {
385438fd1498Szrj 	      unsigned int align = GET_MODE_ALIGNMENT (mode);
385538fd1498Szrj 	      if (crtl->stack_alignment_estimated < align)
385638fd1498Szrj 		{
385738fd1498Szrj 		  gcc_assert (!crtl->stack_realign_processed);
385838fd1498Szrj 		  crtl->stack_alignment_estimated = align;
385938fd1498Szrj 		}
386038fd1498Szrj 	    }
386138fd1498Szrj 	}
386238fd1498Szrj     }
386338fd1498Szrj 
386438fd1498Szrj   /* If we are receiving a struct value address as the first argument, set up
386538fd1498Szrj      the RTL for the function result. As this might require code to convert
386638fd1498Szrj      the transmitted address to Pmode, we do this here to ensure that possible
386738fd1498Szrj      preliminary conversions of the address have been emitted already.  */
386838fd1498Szrj   if (all.function_result_decl)
386938fd1498Szrj     {
387038fd1498Szrj       tree result = DECL_RESULT (current_function_decl);
387138fd1498Szrj       rtx addr = DECL_RTL (all.function_result_decl);
387238fd1498Szrj       rtx x;
387338fd1498Szrj 
387438fd1498Szrj       if (DECL_BY_REFERENCE (result))
387538fd1498Szrj 	{
387638fd1498Szrj 	  SET_DECL_VALUE_EXPR (result, all.function_result_decl);
387738fd1498Szrj 	  x = addr;
387838fd1498Szrj 	}
387938fd1498Szrj       else
388038fd1498Szrj 	{
388138fd1498Szrj 	  SET_DECL_VALUE_EXPR (result,
388238fd1498Szrj 			       build1 (INDIRECT_REF, TREE_TYPE (result),
388338fd1498Szrj 				       all.function_result_decl));
388438fd1498Szrj 	  addr = convert_memory_address (Pmode, addr);
388538fd1498Szrj 	  x = gen_rtx_MEM (DECL_MODE (result), addr);
388638fd1498Szrj 	  set_mem_attributes (x, result, 1);
388738fd1498Szrj 	}
388838fd1498Szrj 
388938fd1498Szrj       DECL_HAS_VALUE_EXPR_P (result) = 1;
389038fd1498Szrj 
389138fd1498Szrj       set_parm_rtl (result, x);
389238fd1498Szrj     }
389338fd1498Szrj 
389438fd1498Szrj   /* We have aligned all the args, so add space for the pretend args.  */
389538fd1498Szrj   crtl->args.pretend_args_size = all.pretend_args_size;
389638fd1498Szrj   all.stack_args_size.constant += all.extra_pretend_bytes;
389738fd1498Szrj   crtl->args.size = all.stack_args_size.constant;
389838fd1498Szrj 
389938fd1498Szrj   /* Adjust function incoming argument size for alignment and
390038fd1498Szrj      minimum length.  */
390138fd1498Szrj 
390238fd1498Szrj   crtl->args.size = upper_bound (crtl->args.size, all.reg_parm_stack_space);
390338fd1498Szrj   crtl->args.size = aligned_upper_bound (crtl->args.size,
390438fd1498Szrj 					 PARM_BOUNDARY / BITS_PER_UNIT);
390538fd1498Szrj 
390638fd1498Szrj   if (ARGS_GROW_DOWNWARD)
390738fd1498Szrj     {
390838fd1498Szrj       crtl->args.arg_offset_rtx
390938fd1498Szrj 	= (all.stack_args_size.var == 0
391038fd1498Szrj 	   ? gen_int_mode (-all.stack_args_size.constant, Pmode)
391138fd1498Szrj 	   : expand_expr (size_diffop (all.stack_args_size.var,
391238fd1498Szrj 				       size_int (-all.stack_args_size.constant)),
391338fd1498Szrj 			  NULL_RTX, VOIDmode, EXPAND_NORMAL));
391438fd1498Szrj     }
391538fd1498Szrj   else
391638fd1498Szrj     crtl->args.arg_offset_rtx = ARGS_SIZE_RTX (all.stack_args_size);
391738fd1498Szrj 
391838fd1498Szrj   /* See how many bytes, if any, of its args a function should try to pop
391938fd1498Szrj      on return.  */
392038fd1498Szrj 
392138fd1498Szrj   crtl->args.pops_args = targetm.calls.return_pops_args (fndecl,
392238fd1498Szrj 							 TREE_TYPE (fndecl),
392338fd1498Szrj 							 crtl->args.size);
392438fd1498Szrj 
392538fd1498Szrj   /* For stdarg.h function, save info about
392638fd1498Szrj      regs and stack space used by the named args.  */
392738fd1498Szrj 
392838fd1498Szrj   crtl->args.info = all.args_so_far_v;
392938fd1498Szrj 
393038fd1498Szrj   /* Set the rtx used for the function return value.  Put this in its
393138fd1498Szrj      own variable so any optimizers that need this information don't have
393238fd1498Szrj      to include tree.h.  Do this here so it gets done when an inlined
393338fd1498Szrj      function gets output.  */
393438fd1498Szrj 
393538fd1498Szrj   crtl->return_rtx
393638fd1498Szrj     = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
393738fd1498Szrj        ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
393838fd1498Szrj 
393938fd1498Szrj   /* If scalar return value was computed in a pseudo-reg, or was a named
394038fd1498Szrj      return value that got dumped to the stack, copy that to the hard
394138fd1498Szrj      return register.  */
394238fd1498Szrj   if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
394338fd1498Szrj     {
394438fd1498Szrj       tree decl_result = DECL_RESULT (fndecl);
394538fd1498Szrj       rtx decl_rtl = DECL_RTL (decl_result);
394638fd1498Szrj 
394738fd1498Szrj       if (REG_P (decl_rtl)
394838fd1498Szrj 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
394938fd1498Szrj 	  : DECL_REGISTER (decl_result))
395038fd1498Szrj 	{
395138fd1498Szrj 	  rtx real_decl_rtl;
395238fd1498Szrj 
395338fd1498Szrj 	  real_decl_rtl = targetm.calls.function_value (TREE_TYPE (decl_result),
395438fd1498Szrj 							fndecl, true);
395538fd1498Szrj 	  if (chkp_function_instrumented_p (fndecl))
395638fd1498Szrj 	    crtl->return_bnd
395738fd1498Szrj 	      = targetm.calls.chkp_function_value_bounds (TREE_TYPE (decl_result),
395838fd1498Szrj 							  fndecl, true);
395938fd1498Szrj 	  REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
396038fd1498Szrj 	  /* The delay slot scheduler assumes that crtl->return_rtx
396138fd1498Szrj 	     holds the hard register containing the return value, not a
396238fd1498Szrj 	     temporary pseudo.  */
396338fd1498Szrj 	  crtl->return_rtx = real_decl_rtl;
396438fd1498Szrj 	}
396538fd1498Szrj     }
396638fd1498Szrj }
396738fd1498Szrj 
396838fd1498Szrj /* A subroutine of gimplify_parameters, invoked via walk_tree.
396938fd1498Szrj    For all seen types, gimplify their sizes.  */
397038fd1498Szrj 
397138fd1498Szrj static tree
gimplify_parm_type(tree * tp,int * walk_subtrees,void * data)397238fd1498Szrj gimplify_parm_type (tree *tp, int *walk_subtrees, void *data)
397338fd1498Szrj {
397438fd1498Szrj   tree t = *tp;
397538fd1498Szrj 
397638fd1498Szrj   *walk_subtrees = 0;
397738fd1498Szrj   if (TYPE_P (t))
397838fd1498Szrj     {
397938fd1498Szrj       if (POINTER_TYPE_P (t))
398038fd1498Szrj 	*walk_subtrees = 1;
398138fd1498Szrj       else if (TYPE_SIZE (t) && !TREE_CONSTANT (TYPE_SIZE (t))
398238fd1498Szrj 	       && !TYPE_SIZES_GIMPLIFIED (t))
398338fd1498Szrj 	{
398438fd1498Szrj 	  gimplify_type_sizes (t, (gimple_seq *) data);
398538fd1498Szrj 	  *walk_subtrees = 1;
398638fd1498Szrj 	}
398738fd1498Szrj     }
398838fd1498Szrj 
398938fd1498Szrj   return NULL;
399038fd1498Szrj }
399138fd1498Szrj 
399238fd1498Szrj /* Gimplify the parameter list for current_function_decl.  This involves
399338fd1498Szrj    evaluating SAVE_EXPRs of variable sized parameters and generating code
399438fd1498Szrj    to implement callee-copies reference parameters.  Returns a sequence of
399538fd1498Szrj    statements to add to the beginning of the function.  */
399638fd1498Szrj 
399738fd1498Szrj gimple_seq
gimplify_parameters(gimple_seq * cleanup)399838fd1498Szrj gimplify_parameters (gimple_seq *cleanup)
399938fd1498Szrj {
400038fd1498Szrj   struct assign_parm_data_all all;
400138fd1498Szrj   tree parm;
400238fd1498Szrj   gimple_seq stmts = NULL;
400338fd1498Szrj   vec<tree> fnargs;
400438fd1498Szrj   unsigned i;
400538fd1498Szrj 
400638fd1498Szrj   assign_parms_initialize_all (&all);
400738fd1498Szrj   fnargs = assign_parms_augmented_arg_list (&all);
400838fd1498Szrj 
400938fd1498Szrj   FOR_EACH_VEC_ELT (fnargs, i, parm)
401038fd1498Szrj     {
401138fd1498Szrj       struct assign_parm_data_one data;
401238fd1498Szrj 
401338fd1498Szrj       /* Extract the type of PARM; adjust it according to ABI.  */
401438fd1498Szrj       assign_parm_find_data_types (&all, parm, &data);
401538fd1498Szrj 
401638fd1498Szrj       /* Early out for errors and void parameters.  */
401738fd1498Szrj       if (data.passed_mode == VOIDmode || DECL_SIZE (parm) == NULL)
401838fd1498Szrj 	continue;
401938fd1498Szrj 
402038fd1498Szrj       /* Update info on where next arg arrives in registers.  */
402138fd1498Szrj       targetm.calls.function_arg_advance (all.args_so_far, data.promoted_mode,
402238fd1498Szrj 					  data.passed_type, data.named_arg);
402338fd1498Szrj 
402438fd1498Szrj       /* ??? Once upon a time variable_size stuffed parameter list
402538fd1498Szrj 	 SAVE_EXPRs (amongst others) onto a pending sizes list.  This
402638fd1498Szrj 	 turned out to be less than manageable in the gimple world.
402738fd1498Szrj 	 Now we have to hunt them down ourselves.  */
402838fd1498Szrj       walk_tree_without_duplicates (&data.passed_type,
402938fd1498Szrj 				    gimplify_parm_type, &stmts);
403038fd1498Szrj 
403138fd1498Szrj       if (TREE_CODE (DECL_SIZE_UNIT (parm)) != INTEGER_CST)
403238fd1498Szrj 	{
403338fd1498Szrj 	  gimplify_one_sizepos (&DECL_SIZE (parm), &stmts);
403438fd1498Szrj 	  gimplify_one_sizepos (&DECL_SIZE_UNIT (parm), &stmts);
403538fd1498Szrj 	}
403638fd1498Szrj 
403738fd1498Szrj       if (data.passed_pointer)
403838fd1498Szrj 	{
403938fd1498Szrj           tree type = TREE_TYPE (data.passed_type);
404038fd1498Szrj 	  if (reference_callee_copied (&all.args_so_far_v, TYPE_MODE (type),
404138fd1498Szrj 				       type, data.named_arg))
404238fd1498Szrj 	    {
404338fd1498Szrj 	      tree local, t;
404438fd1498Szrj 
404538fd1498Szrj 	      /* For constant-sized objects, this is trivial; for
404638fd1498Szrj 		 variable-sized objects, we have to play games.  */
404738fd1498Szrj 	      if (TREE_CODE (DECL_SIZE_UNIT (parm)) == INTEGER_CST
404838fd1498Szrj 		  && !(flag_stack_check == GENERIC_STACK_CHECK
404938fd1498Szrj 		       && compare_tree_int (DECL_SIZE_UNIT (parm),
405038fd1498Szrj 					    STACK_CHECK_MAX_VAR_SIZE) > 0))
405138fd1498Szrj 		{
405238fd1498Szrj 		  local = create_tmp_var (type, get_name (parm));
405338fd1498Szrj 		  DECL_IGNORED_P (local) = 0;
405438fd1498Szrj 		  /* If PARM was addressable, move that flag over
405538fd1498Szrj 		     to the local copy, as its address will be taken,
405638fd1498Szrj 		     not the PARMs.  Keep the parms address taken
405738fd1498Szrj 		     as we'll query that flag during gimplification.  */
405838fd1498Szrj 		  if (TREE_ADDRESSABLE (parm))
405938fd1498Szrj 		    TREE_ADDRESSABLE (local) = 1;
406038fd1498Szrj 		  else if (TREE_CODE (type) == COMPLEX_TYPE
406138fd1498Szrj 			   || TREE_CODE (type) == VECTOR_TYPE)
406238fd1498Szrj 		    DECL_GIMPLE_REG_P (local) = 1;
406338fd1498Szrj 
406438fd1498Szrj 		  if (!is_gimple_reg (local)
406538fd1498Szrj 		      && flag_stack_reuse != SR_NONE)
406638fd1498Szrj 		    {
406738fd1498Szrj 		      tree clobber = build_constructor (type, NULL);
406838fd1498Szrj 		      gimple *clobber_stmt;
406938fd1498Szrj 		      TREE_THIS_VOLATILE (clobber) = 1;
407038fd1498Szrj 		      clobber_stmt = gimple_build_assign (local, clobber);
407138fd1498Szrj 		      gimple_seq_add_stmt (cleanup, clobber_stmt);
407238fd1498Szrj 		    }
407338fd1498Szrj 		}
407438fd1498Szrj 	      else
407538fd1498Szrj 		{
407638fd1498Szrj 		  tree ptr_type, addr;
407738fd1498Szrj 
407838fd1498Szrj 		  ptr_type = build_pointer_type (type);
407938fd1498Szrj 		  addr = create_tmp_reg (ptr_type, get_name (parm));
408038fd1498Szrj 		  DECL_IGNORED_P (addr) = 0;
408138fd1498Szrj 		  local = build_fold_indirect_ref (addr);
408238fd1498Szrj 
408338fd1498Szrj 		  t = build_alloca_call_expr (DECL_SIZE_UNIT (parm),
408438fd1498Szrj 					      DECL_ALIGN (parm),
408538fd1498Szrj 					      max_int_size_in_bytes (type));
408638fd1498Szrj 		  /* The call has been built for a variable-sized object.  */
408738fd1498Szrj 		  CALL_ALLOCA_FOR_VAR_P (t) = 1;
408838fd1498Szrj 		  t = fold_convert (ptr_type, t);
408938fd1498Szrj 		  t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
409038fd1498Szrj 		  gimplify_and_add (t, &stmts);
409138fd1498Szrj 		}
409238fd1498Szrj 
409338fd1498Szrj 	      gimplify_assign (local, parm, &stmts);
409438fd1498Szrj 
409538fd1498Szrj 	      SET_DECL_VALUE_EXPR (parm, local);
409638fd1498Szrj 	      DECL_HAS_VALUE_EXPR_P (parm) = 1;
409738fd1498Szrj 	    }
409838fd1498Szrj 	}
409938fd1498Szrj     }
410038fd1498Szrj 
410138fd1498Szrj   fnargs.release ();
410238fd1498Szrj 
410338fd1498Szrj   return stmts;
410438fd1498Szrj }
410538fd1498Szrj 
410638fd1498Szrj /* Compute the size and offset from the start of the stacked arguments for a
410738fd1498Szrj    parm passed in mode PASSED_MODE and with type TYPE.
410838fd1498Szrj 
410938fd1498Szrj    INITIAL_OFFSET_PTR points to the current offset into the stacked
411038fd1498Szrj    arguments.
411138fd1498Szrj 
411238fd1498Szrj    The starting offset and size for this parm are returned in
411338fd1498Szrj    LOCATE->OFFSET and LOCATE->SIZE, respectively.  When IN_REGS is
411438fd1498Szrj    nonzero, the offset is that of stack slot, which is returned in
411538fd1498Szrj    LOCATE->SLOT_OFFSET.  LOCATE->ALIGNMENT_PAD is the amount of
411638fd1498Szrj    padding required from the initial offset ptr to the stack slot.
411738fd1498Szrj 
411838fd1498Szrj    IN_REGS is nonzero if the argument will be passed in registers.  It will
411938fd1498Szrj    never be set if REG_PARM_STACK_SPACE is not defined.
412038fd1498Szrj 
412138fd1498Szrj    REG_PARM_STACK_SPACE is the number of bytes of stack space reserved
412238fd1498Szrj    for arguments which are passed in registers.
412338fd1498Szrj 
412438fd1498Szrj    FNDECL is the function in which the argument was defined.
412538fd1498Szrj 
412638fd1498Szrj    There are two types of rounding that are done.  The first, controlled by
412738fd1498Szrj    TARGET_FUNCTION_ARG_BOUNDARY, forces the offset from the start of the
412838fd1498Szrj    argument list to be aligned to the specific boundary (in bits).  This
412938fd1498Szrj    rounding affects the initial and starting offsets, but not the argument
413038fd1498Szrj    size.
413138fd1498Szrj 
413238fd1498Szrj    The second, controlled by TARGET_FUNCTION_ARG_PADDING and PARM_BOUNDARY,
413338fd1498Szrj    optionally rounds the size of the parm to PARM_BOUNDARY.  The
413438fd1498Szrj    initial offset is not affected by this rounding, while the size always
413538fd1498Szrj    is and the starting offset may be.  */
413638fd1498Szrj 
413738fd1498Szrj /*  LOCATE->OFFSET will be negative for ARGS_GROW_DOWNWARD case;
413838fd1498Szrj     INITIAL_OFFSET_PTR is positive because locate_and_pad_parm's
413938fd1498Szrj     callers pass in the total size of args so far as
414038fd1498Szrj     INITIAL_OFFSET_PTR.  LOCATE->SIZE is always positive.  */
414138fd1498Szrj 
414238fd1498Szrj void
locate_and_pad_parm(machine_mode passed_mode,tree type,int in_regs,int reg_parm_stack_space,int partial,tree fndecl ATTRIBUTE_UNUSED,struct args_size * initial_offset_ptr,struct locate_and_pad_arg_data * locate)414338fd1498Szrj locate_and_pad_parm (machine_mode passed_mode, tree type, int in_regs,
414438fd1498Szrj 		     int reg_parm_stack_space, int partial,
414538fd1498Szrj 		     tree fndecl ATTRIBUTE_UNUSED,
414638fd1498Szrj 		     struct args_size *initial_offset_ptr,
414738fd1498Szrj 		     struct locate_and_pad_arg_data *locate)
414838fd1498Szrj {
414938fd1498Szrj   tree sizetree;
415038fd1498Szrj   pad_direction where_pad;
415138fd1498Szrj   unsigned int boundary, round_boundary;
415238fd1498Szrj   int part_size_in_regs;
415338fd1498Szrj 
415438fd1498Szrj   /* If we have found a stack parm before we reach the end of the
415538fd1498Szrj      area reserved for registers, skip that area.  */
415638fd1498Szrj   if (! in_regs)
415738fd1498Szrj     {
415838fd1498Szrj       if (reg_parm_stack_space > 0)
415938fd1498Szrj 	{
416038fd1498Szrj 	  if (initial_offset_ptr->var
416138fd1498Szrj 	      || !ordered_p (initial_offset_ptr->constant,
416238fd1498Szrj 			     reg_parm_stack_space))
416338fd1498Szrj 	    {
416438fd1498Szrj 	      initial_offset_ptr->var
416538fd1498Szrj 		= size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
416638fd1498Szrj 			      ssize_int (reg_parm_stack_space));
416738fd1498Szrj 	      initial_offset_ptr->constant = 0;
416838fd1498Szrj 	    }
416938fd1498Szrj 	  else
417038fd1498Szrj 	    initial_offset_ptr->constant
417138fd1498Szrj 	      = ordered_max (initial_offset_ptr->constant,
417238fd1498Szrj 			     reg_parm_stack_space);
417338fd1498Szrj 	}
417438fd1498Szrj     }
417538fd1498Szrj 
417638fd1498Szrj   part_size_in_regs = (reg_parm_stack_space == 0 ? partial : 0);
417738fd1498Szrj 
417838fd1498Szrj   sizetree = (type
417938fd1498Szrj 	      ? arg_size_in_bytes (type)
418038fd1498Szrj 	      : size_int (GET_MODE_SIZE (passed_mode)));
418138fd1498Szrj   where_pad = targetm.calls.function_arg_padding (passed_mode, type);
418238fd1498Szrj   boundary = targetm.calls.function_arg_boundary (passed_mode, type);
418338fd1498Szrj   round_boundary = targetm.calls.function_arg_round_boundary (passed_mode,
418438fd1498Szrj 							      type);
418538fd1498Szrj   locate->where_pad = where_pad;
418638fd1498Szrj 
418738fd1498Szrj   /* Alignment can't exceed MAX_SUPPORTED_STACK_ALIGNMENT.  */
418838fd1498Szrj   if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
418938fd1498Szrj     boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
419038fd1498Szrj 
419138fd1498Szrj   locate->boundary = boundary;
419238fd1498Szrj 
419338fd1498Szrj   if (SUPPORTS_STACK_ALIGNMENT)
419438fd1498Szrj     {
419538fd1498Szrj       /* stack_alignment_estimated can't change after stack has been
419638fd1498Szrj 	 realigned.  */
419738fd1498Szrj       if (crtl->stack_alignment_estimated < boundary)
419838fd1498Szrj         {
419938fd1498Szrj           if (!crtl->stack_realign_processed)
420038fd1498Szrj 	    crtl->stack_alignment_estimated = boundary;
420138fd1498Szrj 	  else
420238fd1498Szrj 	    {
420338fd1498Szrj 	      /* If stack is realigned and stack alignment value
420438fd1498Szrj 		 hasn't been finalized, it is OK not to increase
420538fd1498Szrj 		 stack_alignment_estimated.  The bigger alignment
420638fd1498Szrj 		 requirement is recorded in stack_alignment_needed
420738fd1498Szrj 		 below.  */
420838fd1498Szrj 	      gcc_assert (!crtl->stack_realign_finalized
420938fd1498Szrj 			  && crtl->stack_realign_needed);
421038fd1498Szrj 	    }
421138fd1498Szrj 	}
421238fd1498Szrj     }
421338fd1498Szrj 
421438fd1498Szrj   /* Remember if the outgoing parameter requires extra alignment on the
421538fd1498Szrj      calling function side.  */
421638fd1498Szrj   if (crtl->stack_alignment_needed < boundary)
421738fd1498Szrj     crtl->stack_alignment_needed = boundary;
421838fd1498Szrj   if (crtl->preferred_stack_boundary < boundary)
421938fd1498Szrj     crtl->preferred_stack_boundary = boundary;
422038fd1498Szrj 
422138fd1498Szrj   if (ARGS_GROW_DOWNWARD)
422238fd1498Szrj     {
422338fd1498Szrj       locate->slot_offset.constant = -initial_offset_ptr->constant;
422438fd1498Szrj       if (initial_offset_ptr->var)
422538fd1498Szrj 	locate->slot_offset.var = size_binop (MINUS_EXPR, ssize_int (0),
422638fd1498Szrj 					      initial_offset_ptr->var);
422738fd1498Szrj 
422838fd1498Szrj       {
422938fd1498Szrj 	tree s2 = sizetree;
423038fd1498Szrj 	if (where_pad != PAD_NONE
423138fd1498Szrj 	    && (!tree_fits_uhwi_p (sizetree)
423238fd1498Szrj 		|| (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
423338fd1498Szrj 	  s2 = round_up (s2, round_boundary / BITS_PER_UNIT);
423438fd1498Szrj 	SUB_PARM_SIZE (locate->slot_offset, s2);
423538fd1498Szrj       }
423638fd1498Szrj 
423738fd1498Szrj       locate->slot_offset.constant += part_size_in_regs;
423838fd1498Szrj 
423938fd1498Szrj       if (!in_regs || reg_parm_stack_space > 0)
424038fd1498Szrj 	pad_to_arg_alignment (&locate->slot_offset, boundary,
424138fd1498Szrj 			      &locate->alignment_pad);
424238fd1498Szrj 
424338fd1498Szrj       locate->size.constant = (-initial_offset_ptr->constant
424438fd1498Szrj 			       - locate->slot_offset.constant);
424538fd1498Szrj       if (initial_offset_ptr->var)
424638fd1498Szrj 	locate->size.var = size_binop (MINUS_EXPR,
424738fd1498Szrj 				       size_binop (MINUS_EXPR,
424838fd1498Szrj 						   ssize_int (0),
424938fd1498Szrj 						   initial_offset_ptr->var),
425038fd1498Szrj 				       locate->slot_offset.var);
425138fd1498Szrj 
425238fd1498Szrj       /* Pad_below needs the pre-rounded size to know how much to pad
425338fd1498Szrj 	 below.  */
425438fd1498Szrj       locate->offset = locate->slot_offset;
425538fd1498Szrj       if (where_pad == PAD_DOWNWARD)
425638fd1498Szrj 	pad_below (&locate->offset, passed_mode, sizetree);
425738fd1498Szrj 
425838fd1498Szrj     }
425938fd1498Szrj   else
426038fd1498Szrj     {
426138fd1498Szrj       if (!in_regs || reg_parm_stack_space > 0)
426238fd1498Szrj 	pad_to_arg_alignment (initial_offset_ptr, boundary,
426338fd1498Szrj 			      &locate->alignment_pad);
426438fd1498Szrj       locate->slot_offset = *initial_offset_ptr;
426538fd1498Szrj 
426638fd1498Szrj #ifdef PUSH_ROUNDING
426738fd1498Szrj       if (passed_mode != BLKmode)
426838fd1498Szrj 	sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
426938fd1498Szrj #endif
427038fd1498Szrj 
427138fd1498Szrj       /* Pad_below needs the pre-rounded size to know how much to pad below
427238fd1498Szrj 	 so this must be done before rounding up.  */
427338fd1498Szrj       locate->offset = locate->slot_offset;
427438fd1498Szrj       if (where_pad == PAD_DOWNWARD)
427538fd1498Szrj 	pad_below (&locate->offset, passed_mode, sizetree);
427638fd1498Szrj 
427738fd1498Szrj       if (where_pad != PAD_NONE
427838fd1498Szrj 	  && (!tree_fits_uhwi_p (sizetree)
427938fd1498Szrj 	      || (tree_to_uhwi (sizetree) * BITS_PER_UNIT) % round_boundary))
428038fd1498Szrj 	sizetree = round_up (sizetree, round_boundary / BITS_PER_UNIT);
428138fd1498Szrj 
428238fd1498Szrj       ADD_PARM_SIZE (locate->size, sizetree);
428338fd1498Szrj 
428438fd1498Szrj       locate->size.constant -= part_size_in_regs;
428538fd1498Szrj     }
428638fd1498Szrj 
428738fd1498Szrj   locate->offset.constant
428838fd1498Szrj     += targetm.calls.function_arg_offset (passed_mode, type);
428938fd1498Szrj }
429038fd1498Szrj 
429138fd1498Szrj /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
429238fd1498Szrj    BOUNDARY is measured in bits, but must be a multiple of a storage unit.  */
429338fd1498Szrj 
429438fd1498Szrj static void
pad_to_arg_alignment(struct args_size * offset_ptr,int boundary,struct args_size * alignment_pad)429538fd1498Szrj pad_to_arg_alignment (struct args_size *offset_ptr, int boundary,
429638fd1498Szrj 		      struct args_size *alignment_pad)
429738fd1498Szrj {
429838fd1498Szrj   tree save_var = NULL_TREE;
429938fd1498Szrj   poly_int64 save_constant = 0;
430038fd1498Szrj   int boundary_in_bytes = boundary / BITS_PER_UNIT;
430138fd1498Szrj   poly_int64 sp_offset = STACK_POINTER_OFFSET;
430238fd1498Szrj 
430338fd1498Szrj #ifdef SPARC_STACK_BOUNDARY_HACK
430438fd1498Szrj   /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
430538fd1498Szrj      the real alignment of %sp.  However, when it does this, the
430638fd1498Szrj      alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY.  */
430738fd1498Szrj   if (SPARC_STACK_BOUNDARY_HACK)
430838fd1498Szrj     sp_offset = 0;
430938fd1498Szrj #endif
431038fd1498Szrj 
431138fd1498Szrj   if (boundary > PARM_BOUNDARY)
431238fd1498Szrj     {
431338fd1498Szrj       save_var = offset_ptr->var;
431438fd1498Szrj       save_constant = offset_ptr->constant;
431538fd1498Szrj     }
431638fd1498Szrj 
431738fd1498Szrj   alignment_pad->var = NULL_TREE;
431838fd1498Szrj   alignment_pad->constant = 0;
431938fd1498Szrj 
432038fd1498Szrj   if (boundary > BITS_PER_UNIT)
432138fd1498Szrj     {
432238fd1498Szrj       int misalign;
432338fd1498Szrj       if (offset_ptr->var
432438fd1498Szrj 	  || !known_misalignment (offset_ptr->constant + sp_offset,
432538fd1498Szrj 				  boundary_in_bytes, &misalign))
432638fd1498Szrj 	{
432738fd1498Szrj 	  tree sp_offset_tree = ssize_int (sp_offset);
432838fd1498Szrj 	  tree offset = size_binop (PLUS_EXPR,
432938fd1498Szrj 				    ARGS_SIZE_TREE (*offset_ptr),
433038fd1498Szrj 				    sp_offset_tree);
433138fd1498Szrj 	  tree rounded;
433238fd1498Szrj 	  if (ARGS_GROW_DOWNWARD)
433338fd1498Szrj 	    rounded = round_down (offset, boundary / BITS_PER_UNIT);
433438fd1498Szrj 	  else
433538fd1498Szrj 	    rounded = round_up   (offset, boundary / BITS_PER_UNIT);
433638fd1498Szrj 
433738fd1498Szrj 	  offset_ptr->var = size_binop (MINUS_EXPR, rounded, sp_offset_tree);
433838fd1498Szrj 	  /* ARGS_SIZE_TREE includes constant term.  */
433938fd1498Szrj 	  offset_ptr->constant = 0;
434038fd1498Szrj 	  if (boundary > PARM_BOUNDARY)
434138fd1498Szrj 	    alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
434238fd1498Szrj 					     save_var);
434338fd1498Szrj 	}
434438fd1498Szrj       else
434538fd1498Szrj 	{
434638fd1498Szrj 	  if (ARGS_GROW_DOWNWARD)
434738fd1498Szrj 	    offset_ptr->constant -= misalign;
434838fd1498Szrj 	  else
434938fd1498Szrj 	    offset_ptr->constant += -misalign & (boundary_in_bytes - 1);
435038fd1498Szrj 
435138fd1498Szrj 	  if (boundary > PARM_BOUNDARY)
435238fd1498Szrj 	    alignment_pad->constant = offset_ptr->constant - save_constant;
435338fd1498Szrj 	}
435438fd1498Szrj     }
435538fd1498Szrj }
435638fd1498Szrj 
435738fd1498Szrj static void
pad_below(struct args_size * offset_ptr,machine_mode passed_mode,tree sizetree)435838fd1498Szrj pad_below (struct args_size *offset_ptr, machine_mode passed_mode, tree sizetree)
435938fd1498Szrj {
436038fd1498Szrj   unsigned int align = PARM_BOUNDARY / BITS_PER_UNIT;
436138fd1498Szrj   int misalign;
436238fd1498Szrj   if (passed_mode != BLKmode
436338fd1498Szrj       && known_misalignment (GET_MODE_SIZE (passed_mode), align, &misalign))
436438fd1498Szrj     offset_ptr->constant += -misalign & (align - 1);
436538fd1498Szrj   else
436638fd1498Szrj     {
436738fd1498Szrj       if (TREE_CODE (sizetree) != INTEGER_CST
436838fd1498Szrj 	  || (TREE_INT_CST_LOW (sizetree) & (align - 1)) != 0)
436938fd1498Szrj 	{
437038fd1498Szrj 	  /* Round the size up to multiple of PARM_BOUNDARY bits.  */
437138fd1498Szrj 	  tree s2 = round_up (sizetree, align);
437238fd1498Szrj 	  /* Add it in.  */
437338fd1498Szrj 	  ADD_PARM_SIZE (*offset_ptr, s2);
437438fd1498Szrj 	  SUB_PARM_SIZE (*offset_ptr, sizetree);
437538fd1498Szrj 	}
437638fd1498Szrj     }
437738fd1498Szrj }
437838fd1498Szrj 
437938fd1498Szrj 
438038fd1498Szrj /* True if register REGNO was alive at a place where `setjmp' was
438138fd1498Szrj    called and was set more than once or is an argument.  Such regs may
438238fd1498Szrj    be clobbered by `longjmp'.  */
438338fd1498Szrj 
438438fd1498Szrj static bool
regno_clobbered_at_setjmp(bitmap setjmp_crosses,int regno)438538fd1498Szrj regno_clobbered_at_setjmp (bitmap setjmp_crosses, int regno)
438638fd1498Szrj {
438738fd1498Szrj   /* There appear to be cases where some local vars never reach the
438838fd1498Szrj      backend but have bogus regnos.  */
438938fd1498Szrj   if (regno >= max_reg_num ())
439038fd1498Szrj     return false;
439138fd1498Szrj 
439238fd1498Szrj   return ((REG_N_SETS (regno) > 1
439338fd1498Szrj 	   || REGNO_REG_SET_P (df_get_live_out (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
439438fd1498Szrj 			       regno))
439538fd1498Szrj 	  && REGNO_REG_SET_P (setjmp_crosses, regno));
439638fd1498Szrj }
439738fd1498Szrj 
439838fd1498Szrj /* Walk the tree of blocks describing the binding levels within a
439938fd1498Szrj    function and warn about variables the might be killed by setjmp or
440038fd1498Szrj    vfork.  This is done after calling flow_analysis before register
440138fd1498Szrj    allocation since that will clobber the pseudo-regs to hard
440238fd1498Szrj    regs.  */
440338fd1498Szrj 
440438fd1498Szrj static void
setjmp_vars_warning(bitmap setjmp_crosses,tree block)440538fd1498Szrj setjmp_vars_warning (bitmap setjmp_crosses, tree block)
440638fd1498Szrj {
440738fd1498Szrj   tree decl, sub;
440838fd1498Szrj 
440938fd1498Szrj   for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
441038fd1498Szrj     {
441138fd1498Szrj       if (VAR_P (decl)
441238fd1498Szrj 	  && DECL_RTL_SET_P (decl)
441338fd1498Szrj 	  && REG_P (DECL_RTL (decl))
441438fd1498Szrj 	  && regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
441538fd1498Szrj 	warning (OPT_Wclobbered, "variable %q+D might be clobbered by"
441638fd1498Szrj                  " %<longjmp%> or %<vfork%>", decl);
441738fd1498Szrj     }
441838fd1498Szrj 
441938fd1498Szrj   for (sub = BLOCK_SUBBLOCKS (block); sub; sub = BLOCK_CHAIN (sub))
442038fd1498Szrj     setjmp_vars_warning (setjmp_crosses, sub);
442138fd1498Szrj }
442238fd1498Szrj 
442338fd1498Szrj /* Do the appropriate part of setjmp_vars_warning
442438fd1498Szrj    but for arguments instead of local variables.  */
442538fd1498Szrj 
442638fd1498Szrj static void
setjmp_args_warning(bitmap setjmp_crosses)442738fd1498Szrj setjmp_args_warning (bitmap setjmp_crosses)
442838fd1498Szrj {
442938fd1498Szrj   tree decl;
443038fd1498Szrj   for (decl = DECL_ARGUMENTS (current_function_decl);
443138fd1498Szrj        decl; decl = DECL_CHAIN (decl))
443238fd1498Szrj     if (DECL_RTL (decl) != 0
443338fd1498Szrj 	&& REG_P (DECL_RTL (decl))
443438fd1498Szrj 	&& regno_clobbered_at_setjmp (setjmp_crosses, REGNO (DECL_RTL (decl))))
443538fd1498Szrj       warning (OPT_Wclobbered,
443638fd1498Szrj                "argument %q+D might be clobbered by %<longjmp%> or %<vfork%>",
443738fd1498Szrj 	       decl);
443838fd1498Szrj }
443938fd1498Szrj 
444038fd1498Szrj /* Generate warning messages for variables live across setjmp.  */
444138fd1498Szrj 
444238fd1498Szrj void
generate_setjmp_warnings(void)444338fd1498Szrj generate_setjmp_warnings (void)
444438fd1498Szrj {
444538fd1498Szrj   bitmap setjmp_crosses = regstat_get_setjmp_crosses ();
444638fd1498Szrj 
444738fd1498Szrj   if (n_basic_blocks_for_fn (cfun) == NUM_FIXED_BLOCKS
444838fd1498Szrj       || bitmap_empty_p (setjmp_crosses))
444938fd1498Szrj     return;
445038fd1498Szrj 
445138fd1498Szrj   setjmp_vars_warning (setjmp_crosses, DECL_INITIAL (current_function_decl));
445238fd1498Szrj   setjmp_args_warning (setjmp_crosses);
445338fd1498Szrj }
445438fd1498Szrj 
445538fd1498Szrj 
445638fd1498Szrj /* Reverse the order of elements in the fragment chain T of blocks,
445738fd1498Szrj    and return the new head of the chain (old last element).
445838fd1498Szrj    In addition to that clear BLOCK_SAME_RANGE flags when needed
445938fd1498Szrj    and adjust BLOCK_SUPERCONTEXT from the super fragment to
446038fd1498Szrj    its super fragment origin.  */
446138fd1498Szrj 
446238fd1498Szrj static tree
block_fragments_nreverse(tree t)446338fd1498Szrj block_fragments_nreverse (tree t)
446438fd1498Szrj {
446538fd1498Szrj   tree prev = 0, block, next, prev_super = 0;
446638fd1498Szrj   tree super = BLOCK_SUPERCONTEXT (t);
446738fd1498Szrj   if (BLOCK_FRAGMENT_ORIGIN (super))
446838fd1498Szrj     super = BLOCK_FRAGMENT_ORIGIN (super);
446938fd1498Szrj   for (block = t; block; block = next)
447038fd1498Szrj     {
447138fd1498Szrj       next = BLOCK_FRAGMENT_CHAIN (block);
447238fd1498Szrj       BLOCK_FRAGMENT_CHAIN (block) = prev;
447338fd1498Szrj       if ((prev && !BLOCK_SAME_RANGE (prev))
447438fd1498Szrj 	  || (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (block))
447538fd1498Szrj 	      != prev_super))
447638fd1498Szrj 	BLOCK_SAME_RANGE (block) = 0;
447738fd1498Szrj       prev_super = BLOCK_SUPERCONTEXT (block);
447838fd1498Szrj       BLOCK_SUPERCONTEXT (block) = super;
447938fd1498Szrj       prev = block;
448038fd1498Szrj     }
448138fd1498Szrj   t = BLOCK_FRAGMENT_ORIGIN (t);
448238fd1498Szrj   if (BLOCK_FRAGMENT_CHAIN (BLOCK_SUPERCONTEXT (t))
448338fd1498Szrj       != prev_super)
448438fd1498Szrj     BLOCK_SAME_RANGE (t) = 0;
448538fd1498Szrj   BLOCK_SUPERCONTEXT (t) = super;
448638fd1498Szrj   return prev;
448738fd1498Szrj }
448838fd1498Szrj 
448938fd1498Szrj /* Reverse the order of elements in the chain T of blocks,
449038fd1498Szrj    and return the new head of the chain (old last element).
449138fd1498Szrj    Also do the same on subblocks and reverse the order of elements
449238fd1498Szrj    in BLOCK_FRAGMENT_CHAIN as well.  */
449338fd1498Szrj 
449438fd1498Szrj static tree
blocks_nreverse_all(tree t)449538fd1498Szrj blocks_nreverse_all (tree t)
449638fd1498Szrj {
449738fd1498Szrj   tree prev = 0, block, next;
449838fd1498Szrj   for (block = t; block; block = next)
449938fd1498Szrj     {
450038fd1498Szrj       next = BLOCK_CHAIN (block);
450138fd1498Szrj       BLOCK_CHAIN (block) = prev;
450238fd1498Szrj       if (BLOCK_FRAGMENT_CHAIN (block)
450338fd1498Szrj 	  && BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE)
450438fd1498Szrj 	{
450538fd1498Szrj 	  BLOCK_FRAGMENT_CHAIN (block)
450638fd1498Szrj 	    = block_fragments_nreverse (BLOCK_FRAGMENT_CHAIN (block));
450738fd1498Szrj 	  if (!BLOCK_SAME_RANGE (BLOCK_FRAGMENT_CHAIN (block)))
450838fd1498Szrj 	    BLOCK_SAME_RANGE (block) = 0;
450938fd1498Szrj 	}
451038fd1498Szrj       BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
451138fd1498Szrj       prev = block;
451238fd1498Szrj     }
451338fd1498Szrj   return prev;
451438fd1498Szrj }
451538fd1498Szrj 
451638fd1498Szrj 
451738fd1498Szrj /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
451838fd1498Szrj    and create duplicate blocks.  */
451938fd1498Szrj /* ??? Need an option to either create block fragments or to create
452038fd1498Szrj    abstract origin duplicates of a source block.  It really depends
452138fd1498Szrj    on what optimization has been performed.  */
452238fd1498Szrj 
452338fd1498Szrj void
reorder_blocks(void)452438fd1498Szrj reorder_blocks (void)
452538fd1498Szrj {
452638fd1498Szrj   tree block = DECL_INITIAL (current_function_decl);
452738fd1498Szrj 
452838fd1498Szrj   if (block == NULL_TREE)
452938fd1498Szrj     return;
453038fd1498Szrj 
453138fd1498Szrj   auto_vec<tree, 10> block_stack;
453238fd1498Szrj 
453338fd1498Szrj   /* Reset the TREE_ASM_WRITTEN bit for all blocks.  */
453438fd1498Szrj   clear_block_marks (block);
453538fd1498Szrj 
453638fd1498Szrj   /* Prune the old trees away, so that they don't get in the way.  */
453738fd1498Szrj   BLOCK_SUBBLOCKS (block) = NULL_TREE;
453838fd1498Szrj   BLOCK_CHAIN (block) = NULL_TREE;
453938fd1498Szrj 
454038fd1498Szrj   /* Recreate the block tree from the note nesting.  */
454138fd1498Szrj   reorder_blocks_1 (get_insns (), block, &block_stack);
454238fd1498Szrj   BLOCK_SUBBLOCKS (block) = blocks_nreverse_all (BLOCK_SUBBLOCKS (block));
454338fd1498Szrj }
454438fd1498Szrj 
454538fd1498Szrj /* Helper function for reorder_blocks.  Reset TREE_ASM_WRITTEN.  */
454638fd1498Szrj 
454738fd1498Szrj void
clear_block_marks(tree block)454838fd1498Szrj clear_block_marks (tree block)
454938fd1498Szrj {
455038fd1498Szrj   while (block)
455138fd1498Szrj     {
455238fd1498Szrj       TREE_ASM_WRITTEN (block) = 0;
455338fd1498Szrj       clear_block_marks (BLOCK_SUBBLOCKS (block));
455438fd1498Szrj       block = BLOCK_CHAIN (block);
455538fd1498Szrj     }
455638fd1498Szrj }
455738fd1498Szrj 
455838fd1498Szrj static void
reorder_blocks_1(rtx_insn * insns,tree current_block,vec<tree> * p_block_stack)455938fd1498Szrj reorder_blocks_1 (rtx_insn *insns, tree current_block,
456038fd1498Szrj 		  vec<tree> *p_block_stack)
456138fd1498Szrj {
456238fd1498Szrj   rtx_insn *insn;
456338fd1498Szrj   tree prev_beg = NULL_TREE, prev_end = NULL_TREE;
456438fd1498Szrj 
456538fd1498Szrj   for (insn = insns; insn; insn = NEXT_INSN (insn))
456638fd1498Szrj     {
456738fd1498Szrj       if (NOTE_P (insn))
456838fd1498Szrj 	{
456938fd1498Szrj 	  if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_BEG)
457038fd1498Szrj 	    {
457138fd1498Szrj 	      tree block = NOTE_BLOCK (insn);
457238fd1498Szrj 	      tree origin;
457338fd1498Szrj 
457438fd1498Szrj 	      gcc_assert (BLOCK_FRAGMENT_ORIGIN (block) == NULL_TREE);
457538fd1498Szrj 	      origin = block;
457638fd1498Szrj 
457738fd1498Szrj 	      if (prev_end)
457838fd1498Szrj 		BLOCK_SAME_RANGE (prev_end) = 0;
457938fd1498Szrj 	      prev_end = NULL_TREE;
458038fd1498Szrj 
458138fd1498Szrj 	      /* If we have seen this block before, that means it now
458238fd1498Szrj 		 spans multiple address regions.  Create a new fragment.  */
458338fd1498Szrj 	      if (TREE_ASM_WRITTEN (block))
458438fd1498Szrj 		{
458538fd1498Szrj 		  tree new_block = copy_node (block);
458638fd1498Szrj 
458738fd1498Szrj 		  BLOCK_SAME_RANGE (new_block) = 0;
458838fd1498Szrj 		  BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
458938fd1498Szrj 		  BLOCK_FRAGMENT_CHAIN (new_block)
459038fd1498Szrj 		    = BLOCK_FRAGMENT_CHAIN (origin);
459138fd1498Szrj 		  BLOCK_FRAGMENT_CHAIN (origin) = new_block;
459238fd1498Szrj 
459338fd1498Szrj 		  NOTE_BLOCK (insn) = new_block;
459438fd1498Szrj 		  block = new_block;
459538fd1498Szrj 		}
459638fd1498Szrj 
459738fd1498Szrj 	      if (prev_beg == current_block && prev_beg)
459838fd1498Szrj 		BLOCK_SAME_RANGE (block) = 1;
459938fd1498Szrj 
460038fd1498Szrj 	      prev_beg = origin;
460138fd1498Szrj 
460238fd1498Szrj 	      BLOCK_SUBBLOCKS (block) = 0;
460338fd1498Szrj 	      TREE_ASM_WRITTEN (block) = 1;
460438fd1498Szrj 	      /* When there's only one block for the entire function,
460538fd1498Szrj 		 current_block == block and we mustn't do this, it
460638fd1498Szrj 		 will cause infinite recursion.  */
460738fd1498Szrj 	      if (block != current_block)
460838fd1498Szrj 		{
460938fd1498Szrj 		  tree super;
461038fd1498Szrj 		  if (block != origin)
461138fd1498Szrj 		    gcc_assert (BLOCK_SUPERCONTEXT (origin) == current_block
461238fd1498Szrj 				|| BLOCK_FRAGMENT_ORIGIN (BLOCK_SUPERCONTEXT
461338fd1498Szrj 								      (origin))
461438fd1498Szrj 				   == current_block);
461538fd1498Szrj 		  if (p_block_stack->is_empty ())
461638fd1498Szrj 		    super = current_block;
461738fd1498Szrj 		  else
461838fd1498Szrj 		    {
461938fd1498Szrj 		      super = p_block_stack->last ();
462038fd1498Szrj 		      gcc_assert (super == current_block
462138fd1498Szrj 				  || BLOCK_FRAGMENT_ORIGIN (super)
462238fd1498Szrj 				     == current_block);
462338fd1498Szrj 		    }
462438fd1498Szrj 		  BLOCK_SUPERCONTEXT (block) = super;
462538fd1498Szrj 		  BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
462638fd1498Szrj 		  BLOCK_SUBBLOCKS (current_block) = block;
462738fd1498Szrj 		  current_block = origin;
462838fd1498Szrj 		}
462938fd1498Szrj 	      p_block_stack->safe_push (block);
463038fd1498Szrj 	    }
463138fd1498Szrj 	  else if (NOTE_KIND (insn) == NOTE_INSN_BLOCK_END)
463238fd1498Szrj 	    {
463338fd1498Szrj 	      NOTE_BLOCK (insn) = p_block_stack->pop ();
463438fd1498Szrj 	      current_block = BLOCK_SUPERCONTEXT (current_block);
463538fd1498Szrj 	      if (BLOCK_FRAGMENT_ORIGIN (current_block))
463638fd1498Szrj 		current_block = BLOCK_FRAGMENT_ORIGIN (current_block);
463738fd1498Szrj 	      prev_beg = NULL_TREE;
463838fd1498Szrj 	      prev_end = BLOCK_SAME_RANGE (NOTE_BLOCK (insn))
463938fd1498Szrj 			 ? NOTE_BLOCK (insn) : NULL_TREE;
464038fd1498Szrj 	    }
464138fd1498Szrj 	}
464238fd1498Szrj       else
464338fd1498Szrj 	{
464438fd1498Szrj 	  prev_beg = NULL_TREE;
464538fd1498Szrj 	  if (prev_end)
464638fd1498Szrj 	    BLOCK_SAME_RANGE (prev_end) = 0;
464738fd1498Szrj 	  prev_end = NULL_TREE;
464838fd1498Szrj 	}
464938fd1498Szrj     }
465038fd1498Szrj }
465138fd1498Szrj 
465238fd1498Szrj /* Reverse the order of elements in the chain T of blocks,
465338fd1498Szrj    and return the new head of the chain (old last element).  */
465438fd1498Szrj 
465538fd1498Szrj tree
blocks_nreverse(tree t)465638fd1498Szrj blocks_nreverse (tree t)
465738fd1498Szrj {
465838fd1498Szrj   tree prev = 0, block, next;
465938fd1498Szrj   for (block = t; block; block = next)
466038fd1498Szrj     {
466138fd1498Szrj       next = BLOCK_CHAIN (block);
466238fd1498Szrj       BLOCK_CHAIN (block) = prev;
466338fd1498Szrj       prev = block;
466438fd1498Szrj     }
466538fd1498Szrj   return prev;
466638fd1498Szrj }
466738fd1498Szrj 
466838fd1498Szrj /* Concatenate two chains of blocks (chained through BLOCK_CHAIN)
466938fd1498Szrj    by modifying the last node in chain 1 to point to chain 2.  */
467038fd1498Szrj 
467138fd1498Szrj tree
block_chainon(tree op1,tree op2)467238fd1498Szrj block_chainon (tree op1, tree op2)
467338fd1498Szrj {
467438fd1498Szrj   tree t1;
467538fd1498Szrj 
467638fd1498Szrj   if (!op1)
467738fd1498Szrj     return op2;
467838fd1498Szrj   if (!op2)
467938fd1498Szrj     return op1;
468038fd1498Szrj 
468138fd1498Szrj   for (t1 = op1; BLOCK_CHAIN (t1); t1 = BLOCK_CHAIN (t1))
468238fd1498Szrj     continue;
468338fd1498Szrj   BLOCK_CHAIN (t1) = op2;
468438fd1498Szrj 
468538fd1498Szrj #ifdef ENABLE_TREE_CHECKING
468638fd1498Szrj   {
468738fd1498Szrj     tree t2;
468838fd1498Szrj     for (t2 = op2; t2; t2 = BLOCK_CHAIN (t2))
468938fd1498Szrj       gcc_assert (t2 != t1);
469038fd1498Szrj   }
469138fd1498Szrj #endif
469238fd1498Szrj 
469338fd1498Szrj   return op1;
469438fd1498Szrj }
469538fd1498Szrj 
469638fd1498Szrj /* Count the subblocks of the list starting with BLOCK.  If VECTOR is
469738fd1498Szrj    non-NULL, list them all into VECTOR, in a depth-first preorder
469838fd1498Szrj    traversal of the block tree.  Also clear TREE_ASM_WRITTEN in all
469938fd1498Szrj    blocks.  */
470038fd1498Szrj 
470138fd1498Szrj static int
all_blocks(tree block,tree * vector)470238fd1498Szrj all_blocks (tree block, tree *vector)
470338fd1498Szrj {
470438fd1498Szrj   int n_blocks = 0;
470538fd1498Szrj 
470638fd1498Szrj   while (block)
470738fd1498Szrj     {
470838fd1498Szrj       TREE_ASM_WRITTEN (block) = 0;
470938fd1498Szrj 
471038fd1498Szrj       /* Record this block.  */
471138fd1498Szrj       if (vector)
471238fd1498Szrj 	vector[n_blocks] = block;
471338fd1498Szrj 
471438fd1498Szrj       ++n_blocks;
471538fd1498Szrj 
471638fd1498Szrj       /* Record the subblocks, and their subblocks...  */
471738fd1498Szrj       n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
471838fd1498Szrj 			      vector ? vector + n_blocks : 0);
471938fd1498Szrj       block = BLOCK_CHAIN (block);
472038fd1498Szrj     }
472138fd1498Szrj 
472238fd1498Szrj   return n_blocks;
472338fd1498Szrj }
472438fd1498Szrj 
472538fd1498Szrj /* Return a vector containing all the blocks rooted at BLOCK.  The
472638fd1498Szrj    number of elements in the vector is stored in N_BLOCKS_P.  The
472738fd1498Szrj    vector is dynamically allocated; it is the caller's responsibility
472838fd1498Szrj    to call `free' on the pointer returned.  */
472938fd1498Szrj 
473038fd1498Szrj static tree *
get_block_vector(tree block,int * n_blocks_p)473138fd1498Szrj get_block_vector (tree block, int *n_blocks_p)
473238fd1498Szrj {
473338fd1498Szrj   tree *block_vector;
473438fd1498Szrj 
473538fd1498Szrj   *n_blocks_p = all_blocks (block, NULL);
473638fd1498Szrj   block_vector = XNEWVEC (tree, *n_blocks_p);
473738fd1498Szrj   all_blocks (block, block_vector);
473838fd1498Szrj 
473938fd1498Szrj   return block_vector;
474038fd1498Szrj }
474138fd1498Szrj 
474238fd1498Szrj static GTY(()) int next_block_index = 2;
474338fd1498Szrj 
474438fd1498Szrj /* Set BLOCK_NUMBER for all the blocks in FN.  */
474538fd1498Szrj 
474638fd1498Szrj void
number_blocks(tree fn)474738fd1498Szrj number_blocks (tree fn)
474838fd1498Szrj {
474938fd1498Szrj   int i;
475038fd1498Szrj   int n_blocks;
475138fd1498Szrj   tree *block_vector;
475238fd1498Szrj 
475338fd1498Szrj   /* For XCOFF debugging output, we start numbering the blocks
475438fd1498Szrj      from 1 within each function, rather than keeping a running
475538fd1498Szrj      count.  */
475638fd1498Szrj #if defined (XCOFF_DEBUGGING_INFO)
475738fd1498Szrj   if (write_symbols == XCOFF_DEBUG)
475838fd1498Szrj     next_block_index = 1;
475938fd1498Szrj #endif
476038fd1498Szrj 
476138fd1498Szrj   block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
476238fd1498Szrj 
476338fd1498Szrj   /* The top-level BLOCK isn't numbered at all.  */
476438fd1498Szrj   for (i = 1; i < n_blocks; ++i)
476538fd1498Szrj     /* We number the blocks from two.  */
476638fd1498Szrj     BLOCK_NUMBER (block_vector[i]) = next_block_index++;
476738fd1498Szrj 
476838fd1498Szrj   free (block_vector);
476938fd1498Szrj 
477038fd1498Szrj   return;
477138fd1498Szrj }
477238fd1498Szrj 
477338fd1498Szrj /* If VAR is present in a subblock of BLOCK, return the subblock.  */
477438fd1498Szrj 
477538fd1498Szrj DEBUG_FUNCTION tree
debug_find_var_in_block_tree(tree var,tree block)477638fd1498Szrj debug_find_var_in_block_tree (tree var, tree block)
477738fd1498Szrj {
477838fd1498Szrj   tree t;
477938fd1498Szrj 
478038fd1498Szrj   for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
478138fd1498Szrj     if (t == var)
478238fd1498Szrj       return block;
478338fd1498Szrj 
478438fd1498Szrj   for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
478538fd1498Szrj     {
478638fd1498Szrj       tree ret = debug_find_var_in_block_tree (var, t);
478738fd1498Szrj       if (ret)
478838fd1498Szrj 	return ret;
478938fd1498Szrj     }
479038fd1498Szrj 
479138fd1498Szrj   return NULL_TREE;
479238fd1498Szrj }
479338fd1498Szrj 
479438fd1498Szrj /* Keep track of whether we're in a dummy function context.  If we are,
479538fd1498Szrj    we don't want to invoke the set_current_function hook, because we'll
479638fd1498Szrj    get into trouble if the hook calls target_reinit () recursively or
479738fd1498Szrj    when the initial initialization is not yet complete.  */
479838fd1498Szrj 
479938fd1498Szrj static bool in_dummy_function;
480038fd1498Szrj 
480138fd1498Szrj /* Invoke the target hook when setting cfun.  Update the optimization options
480238fd1498Szrj    if the function uses different options than the default.  */
480338fd1498Szrj 
480438fd1498Szrj static void
invoke_set_current_function_hook(tree fndecl)480538fd1498Szrj invoke_set_current_function_hook (tree fndecl)
480638fd1498Szrj {
480738fd1498Szrj   if (!in_dummy_function)
480838fd1498Szrj     {
480938fd1498Szrj       tree opts = ((fndecl)
481038fd1498Szrj 		   ? DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl)
481138fd1498Szrj 		   : optimization_default_node);
481238fd1498Szrj 
481338fd1498Szrj       if (!opts)
481438fd1498Szrj 	opts = optimization_default_node;
481538fd1498Szrj 
481638fd1498Szrj       /* Change optimization options if needed.  */
481738fd1498Szrj       if (optimization_current_node != opts)
481838fd1498Szrj 	{
481938fd1498Szrj 	  optimization_current_node = opts;
482038fd1498Szrj 	  cl_optimization_restore (&global_options, TREE_OPTIMIZATION (opts));
482138fd1498Szrj 	}
482238fd1498Szrj 
482338fd1498Szrj       targetm.set_current_function (fndecl);
482438fd1498Szrj       this_fn_optabs = this_target_optabs;
482538fd1498Szrj 
482638fd1498Szrj       if (opts != optimization_default_node)
482738fd1498Szrj 	{
482838fd1498Szrj 	  init_tree_optimization_optabs (opts);
482938fd1498Szrj 	  if (TREE_OPTIMIZATION_OPTABS (opts))
483038fd1498Szrj 	    this_fn_optabs = (struct target_optabs *)
483138fd1498Szrj 	      TREE_OPTIMIZATION_OPTABS (opts);
483238fd1498Szrj 	}
483338fd1498Szrj     }
483438fd1498Szrj }
483538fd1498Szrj 
483638fd1498Szrj /* cfun should never be set directly; use this function.  */
483738fd1498Szrj 
483838fd1498Szrj void
set_cfun(struct function * new_cfun,bool force)483938fd1498Szrj set_cfun (struct function *new_cfun, bool force)
484038fd1498Szrj {
484138fd1498Szrj   if (cfun != new_cfun || force)
484238fd1498Szrj     {
484338fd1498Szrj       cfun = new_cfun;
484438fd1498Szrj       invoke_set_current_function_hook (new_cfun ? new_cfun->decl : NULL_TREE);
484538fd1498Szrj       redirect_edge_var_map_empty ();
484638fd1498Szrj     }
484738fd1498Szrj }
484838fd1498Szrj 
484938fd1498Szrj /* Initialized with NOGC, making this poisonous to the garbage collector.  */
485038fd1498Szrj 
485138fd1498Szrj static vec<function *> cfun_stack;
485238fd1498Szrj 
485338fd1498Szrj /* Push the current cfun onto the stack, and set cfun to new_cfun.  Also set
485438fd1498Szrj    current_function_decl accordingly.  */
485538fd1498Szrj 
485638fd1498Szrj void
push_cfun(struct function * new_cfun)485738fd1498Szrj push_cfun (struct function *new_cfun)
485838fd1498Szrj {
485938fd1498Szrj   gcc_assert ((!cfun && !current_function_decl)
486038fd1498Szrj 	      || (cfun && current_function_decl == cfun->decl));
486138fd1498Szrj   cfun_stack.safe_push (cfun);
486238fd1498Szrj   current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
486338fd1498Szrj   set_cfun (new_cfun);
486438fd1498Szrj }
486538fd1498Szrj 
486638fd1498Szrj /* Pop cfun from the stack.  Also set current_function_decl accordingly.  */
486738fd1498Szrj 
486838fd1498Szrj void
pop_cfun(void)486938fd1498Szrj pop_cfun (void)
487038fd1498Szrj {
487138fd1498Szrj   struct function *new_cfun = cfun_stack.pop ();
487238fd1498Szrj   /* When in_dummy_function, we do have a cfun but current_function_decl is
487338fd1498Szrj      NULL.  We also allow pushing NULL cfun and subsequently changing
487438fd1498Szrj      current_function_decl to something else and have both restored by
487538fd1498Szrj      pop_cfun.  */
487638fd1498Szrj   gcc_checking_assert (in_dummy_function
487738fd1498Szrj 		       || !cfun
487838fd1498Szrj 		       || current_function_decl == cfun->decl);
487938fd1498Szrj   set_cfun (new_cfun);
488038fd1498Szrj   current_function_decl = new_cfun ? new_cfun->decl : NULL_TREE;
488138fd1498Szrj }
488238fd1498Szrj 
488338fd1498Szrj /* Return value of funcdef and increase it.  */
488438fd1498Szrj int
get_next_funcdef_no(void)488538fd1498Szrj get_next_funcdef_no (void)
488638fd1498Szrj {
488738fd1498Szrj   return funcdef_no++;
488838fd1498Szrj }
488938fd1498Szrj 
489038fd1498Szrj /* Return value of funcdef.  */
489138fd1498Szrj int
get_last_funcdef_no(void)489238fd1498Szrj get_last_funcdef_no (void)
489338fd1498Szrj {
489438fd1498Szrj   return funcdef_no;
489538fd1498Szrj }
489638fd1498Szrj 
489738fd1498Szrj /* Allocate a function structure for FNDECL and set its contents
489838fd1498Szrj    to the defaults.  Set cfun to the newly-allocated object.
489938fd1498Szrj    Some of the helper functions invoked during initialization assume
490038fd1498Szrj    that cfun has already been set.  Therefore, assign the new object
490138fd1498Szrj    directly into cfun and invoke the back end hook explicitly at the
490238fd1498Szrj    very end, rather than initializing a temporary and calling set_cfun
490338fd1498Szrj    on it.
490438fd1498Szrj 
490538fd1498Szrj    ABSTRACT_P is true if this is a function that will never be seen by
490638fd1498Szrj    the middle-end.  Such functions are front-end concepts (like C++
490738fd1498Szrj    function templates) that do not correspond directly to functions
490838fd1498Szrj    placed in object files.  */
490938fd1498Szrj 
491038fd1498Szrj void
allocate_struct_function(tree fndecl,bool abstract_p)491138fd1498Szrj allocate_struct_function (tree fndecl, bool abstract_p)
491238fd1498Szrj {
491338fd1498Szrj   tree fntype = fndecl ? TREE_TYPE (fndecl) : NULL_TREE;
491438fd1498Szrj 
491538fd1498Szrj   cfun = ggc_cleared_alloc<function> ();
491638fd1498Szrj 
491738fd1498Szrj   init_eh_for_function ();
491838fd1498Szrj 
491938fd1498Szrj   if (init_machine_status)
492038fd1498Szrj     cfun->machine = (*init_machine_status) ();
492138fd1498Szrj 
492238fd1498Szrj #ifdef OVERRIDE_ABI_FORMAT
492338fd1498Szrj   OVERRIDE_ABI_FORMAT (fndecl);
492438fd1498Szrj #endif
492538fd1498Szrj 
492638fd1498Szrj   if (fndecl != NULL_TREE)
492738fd1498Szrj     {
492838fd1498Szrj       DECL_STRUCT_FUNCTION (fndecl) = cfun;
492938fd1498Szrj       cfun->decl = fndecl;
493038fd1498Szrj       current_function_funcdef_no = get_next_funcdef_no ();
493138fd1498Szrj     }
493238fd1498Szrj 
493338fd1498Szrj   invoke_set_current_function_hook (fndecl);
493438fd1498Szrj 
493538fd1498Szrj   if (fndecl != NULL_TREE)
493638fd1498Szrj     {
493738fd1498Szrj       tree result = DECL_RESULT (fndecl);
493838fd1498Szrj 
493938fd1498Szrj       if (!abstract_p)
494038fd1498Szrj 	{
494138fd1498Szrj 	  /* Now that we have activated any function-specific attributes
494238fd1498Szrj 	     that might affect layout, particularly vector modes, relayout
494338fd1498Szrj 	     each of the parameters and the result.  */
494438fd1498Szrj 	  relayout_decl (result);
494538fd1498Szrj 	  for (tree parm = DECL_ARGUMENTS (fndecl); parm;
494638fd1498Szrj 	       parm = DECL_CHAIN (parm))
494738fd1498Szrj 	    relayout_decl (parm);
494838fd1498Szrj 
494938fd1498Szrj 	  /* Similarly relayout the function decl.  */
495038fd1498Szrj 	  targetm.target_option.relayout_function (fndecl);
495138fd1498Szrj 	}
495238fd1498Szrj 
495338fd1498Szrj       if (!abstract_p && aggregate_value_p (result, fndecl))
495438fd1498Szrj 	{
495538fd1498Szrj #ifdef PCC_STATIC_STRUCT_RETURN
495638fd1498Szrj 	  cfun->returns_pcc_struct = 1;
495738fd1498Szrj #endif
495838fd1498Szrj 	  cfun->returns_struct = 1;
495938fd1498Szrj 	}
496038fd1498Szrj 
496138fd1498Szrj       cfun->stdarg = stdarg_p (fntype);
496238fd1498Szrj 
496338fd1498Szrj       /* Assume all registers in stdarg functions need to be saved.  */
496438fd1498Szrj       cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
496538fd1498Szrj       cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
496638fd1498Szrj 
496738fd1498Szrj       /* ??? This could be set on a per-function basis by the front-end
496838fd1498Szrj          but is this worth the hassle?  */
496938fd1498Szrj       cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
497038fd1498Szrj       cfun->can_delete_dead_exceptions = flag_delete_dead_exceptions;
497138fd1498Szrj 
497238fd1498Szrj       if (!profile_flag && !flag_instrument_function_entry_exit)
497338fd1498Szrj 	DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) = 1;
497438fd1498Szrj     }
497538fd1498Szrj 
497638fd1498Szrj   /* Don't enable begin stmt markers if var-tracking at assignments is
497738fd1498Szrj      disabled.  The markers make little sense without the variable
497838fd1498Szrj      binding annotations among them.  */
497938fd1498Szrj   cfun->debug_nonbind_markers = lang_hooks.emits_begin_stmt
498038fd1498Szrj     && MAY_HAVE_DEBUG_MARKER_STMTS;
498138fd1498Szrj }
498238fd1498Szrj 
498338fd1498Szrj /* This is like allocate_struct_function, but pushes a new cfun for FNDECL
498438fd1498Szrj    instead of just setting it.  */
498538fd1498Szrj 
498638fd1498Szrj void
push_struct_function(tree fndecl)498738fd1498Szrj push_struct_function (tree fndecl)
498838fd1498Szrj {
498938fd1498Szrj   /* When in_dummy_function we might be in the middle of a pop_cfun and
499038fd1498Szrj      current_function_decl and cfun may not match.  */
499138fd1498Szrj   gcc_assert (in_dummy_function
499238fd1498Szrj 	      || (!cfun && !current_function_decl)
499338fd1498Szrj 	      || (cfun && current_function_decl == cfun->decl));
499438fd1498Szrj   cfun_stack.safe_push (cfun);
499538fd1498Szrj   current_function_decl = fndecl;
499638fd1498Szrj   allocate_struct_function (fndecl, false);
499738fd1498Szrj }
499838fd1498Szrj 
499938fd1498Szrj /* Reset crtl and other non-struct-function variables to defaults as
500038fd1498Szrj    appropriate for emitting rtl at the start of a function.  */
500138fd1498Szrj 
500238fd1498Szrj static void
prepare_function_start(void)500338fd1498Szrj prepare_function_start (void)
500438fd1498Szrj {
500538fd1498Szrj   gcc_assert (!get_last_insn ());
500638fd1498Szrj   init_temp_slots ();
500738fd1498Szrj   init_emit ();
500838fd1498Szrj   init_varasm_status ();
500938fd1498Szrj   init_expr ();
501038fd1498Szrj   default_rtl_profile ();
501138fd1498Szrj 
501238fd1498Szrj   if (flag_stack_usage_info)
501338fd1498Szrj     {
501438fd1498Szrj       cfun->su = ggc_cleared_alloc<stack_usage> ();
501538fd1498Szrj       cfun->su->static_stack_size = -1;
501638fd1498Szrj     }
501738fd1498Szrj 
501838fd1498Szrj   cse_not_expected = ! optimize;
501938fd1498Szrj 
502038fd1498Szrj   /* Caller save not needed yet.  */
502138fd1498Szrj   caller_save_needed = 0;
502238fd1498Szrj 
502338fd1498Szrj   /* We haven't done register allocation yet.  */
502438fd1498Szrj   reg_renumber = 0;
502538fd1498Szrj 
502638fd1498Szrj   /* Indicate that we have not instantiated virtual registers yet.  */
502738fd1498Szrj   virtuals_instantiated = 0;
502838fd1498Szrj 
502938fd1498Szrj   /* Indicate that we want CONCATs now.  */
503038fd1498Szrj   generating_concat_p = 1;
503138fd1498Szrj 
503238fd1498Szrj   /* Indicate we have no need of a frame pointer yet.  */
503338fd1498Szrj   frame_pointer_needed = 0;
503438fd1498Szrj }
503538fd1498Szrj 
503638fd1498Szrj void
push_dummy_function(bool with_decl)503738fd1498Szrj push_dummy_function (bool with_decl)
503838fd1498Szrj {
503938fd1498Szrj   tree fn_decl, fn_type, fn_result_decl;
504038fd1498Szrj 
504138fd1498Szrj   gcc_assert (!in_dummy_function);
504238fd1498Szrj   in_dummy_function = true;
504338fd1498Szrj 
504438fd1498Szrj   if (with_decl)
504538fd1498Szrj     {
504638fd1498Szrj       fn_type = build_function_type_list (void_type_node, NULL_TREE);
504738fd1498Szrj       fn_decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
504838fd1498Szrj 			    fn_type);
504938fd1498Szrj       fn_result_decl = build_decl (UNKNOWN_LOCATION, RESULT_DECL,
505038fd1498Szrj 					 NULL_TREE, void_type_node);
505138fd1498Szrj       DECL_RESULT (fn_decl) = fn_result_decl;
505238fd1498Szrj     }
505338fd1498Szrj   else
505438fd1498Szrj     fn_decl = NULL_TREE;
505538fd1498Szrj 
505638fd1498Szrj   push_struct_function (fn_decl);
505738fd1498Szrj }
505838fd1498Szrj 
505938fd1498Szrj /* Initialize the rtl expansion mechanism so that we can do simple things
506038fd1498Szrj    like generate sequences.  This is used to provide a context during global
506138fd1498Szrj    initialization of some passes.  You must call expand_dummy_function_end
506238fd1498Szrj    to exit this context.  */
506338fd1498Szrj 
506438fd1498Szrj void
init_dummy_function_start(void)506538fd1498Szrj init_dummy_function_start (void)
506638fd1498Szrj {
506738fd1498Szrj   push_dummy_function (false);
506838fd1498Szrj   prepare_function_start ();
506938fd1498Szrj }
507038fd1498Szrj 
507138fd1498Szrj /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
507238fd1498Szrj    and initialize static variables for generating RTL for the statements
507338fd1498Szrj    of the function.  */
507438fd1498Szrj 
507538fd1498Szrj void
init_function_start(tree subr)507638fd1498Szrj init_function_start (tree subr)
507738fd1498Szrj {
507838fd1498Szrj   /* Initialize backend, if needed.  */
507938fd1498Szrj   initialize_rtl ();
508038fd1498Szrj 
508138fd1498Szrj   prepare_function_start ();
508238fd1498Szrj   decide_function_section (subr);
508338fd1498Szrj 
508438fd1498Szrj   /* Warn if this value is an aggregate type,
508538fd1498Szrj      regardless of which calling convention we are using for it.  */
508638fd1498Szrj   if (AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
508738fd1498Szrj     warning (OPT_Waggregate_return, "function returns an aggregate");
508838fd1498Szrj }
508938fd1498Szrj 
509038fd1498Szrj /* Expand code to verify the stack_protect_guard.  This is invoked at
509138fd1498Szrj    the end of a function to be protected.  */
509238fd1498Szrj 
509338fd1498Szrj void
stack_protect_epilogue(void)509438fd1498Szrj stack_protect_epilogue (void)
509538fd1498Szrj {
509638fd1498Szrj   tree guard_decl = targetm.stack_protect_guard ();
509738fd1498Szrj   rtx_code_label *label = gen_label_rtx ();
509838fd1498Szrj   rtx x, y;
509938fd1498Szrj   rtx_insn *seq;
510038fd1498Szrj 
510138fd1498Szrj   x = expand_normal (crtl->stack_protect_guard);
510238fd1498Szrj   if (guard_decl)
510338fd1498Szrj     y = expand_normal (guard_decl);
510438fd1498Szrj   else
510538fd1498Szrj     y = const0_rtx;
510638fd1498Szrj 
510738fd1498Szrj   /* Allow the target to compare Y with X without leaking either into
510838fd1498Szrj      a register.  */
510938fd1498Szrj   if (targetm.have_stack_protect_test ()
511038fd1498Szrj       && ((seq = targetm.gen_stack_protect_test (x, y, label)) != NULL_RTX))
511138fd1498Szrj     emit_insn (seq);
511238fd1498Szrj   else
511338fd1498Szrj     emit_cmp_and_jump_insns (x, y, EQ, NULL_RTX, ptr_mode, 1, label);
511438fd1498Szrj 
511538fd1498Szrj   /* The noreturn predictor has been moved to the tree level.  The rtl-level
511638fd1498Szrj      predictors estimate this branch about 20%, which isn't enough to get
511738fd1498Szrj      things moved out of line.  Since this is the only extant case of adding
511838fd1498Szrj      a noreturn function at the rtl level, it doesn't seem worth doing ought
511938fd1498Szrj      except adding the prediction by hand.  */
512038fd1498Szrj   rtx_insn *tmp = get_last_insn ();
512138fd1498Szrj   if (JUMP_P (tmp))
512238fd1498Szrj     predict_insn_def (tmp, PRED_NORETURN, TAKEN);
512338fd1498Szrj 
512438fd1498Szrj   expand_call (targetm.stack_protect_fail (), NULL_RTX, /*ignore=*/true);
512538fd1498Szrj   free_temp_slots ();
512638fd1498Szrj   emit_label (label);
512738fd1498Szrj }
512838fd1498Szrj 
512938fd1498Szrj /* Start the RTL for a new function, and set variables used for
513038fd1498Szrj    emitting RTL.
513138fd1498Szrj    SUBR is the FUNCTION_DECL node.
513238fd1498Szrj    PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
513338fd1498Szrj    the function's parameters, which must be run at any return statement.  */
513438fd1498Szrj 
513538fd1498Szrj void
expand_function_start(tree subr)513638fd1498Szrj expand_function_start (tree subr)
513738fd1498Szrj {
513838fd1498Szrj   /* Make sure volatile mem refs aren't considered
513938fd1498Szrj      valid operands of arithmetic insns.  */
514038fd1498Szrj   init_recog_no_volatile ();
514138fd1498Szrj 
514238fd1498Szrj   crtl->profile
514338fd1498Szrj     = (profile_flag
514438fd1498Szrj        && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
514538fd1498Szrj 
514638fd1498Szrj   crtl->limit_stack
514738fd1498Szrj     = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
514838fd1498Szrj 
514938fd1498Szrj   /* Make the label for return statements to jump to.  Do not special
515038fd1498Szrj      case machines with special return instructions -- they will be
515138fd1498Szrj      handled later during jump, ifcvt, or epilogue creation.  */
515238fd1498Szrj   return_label = gen_label_rtx ();
515338fd1498Szrj 
515438fd1498Szrj   /* Initialize rtx used to return the value.  */
515538fd1498Szrj   /* Do this before assign_parms so that we copy the struct value address
515638fd1498Szrj      before any library calls that assign parms might generate.  */
515738fd1498Szrj 
515838fd1498Szrj   /* Decide whether to return the value in memory or in a register.  */
515938fd1498Szrj   tree res = DECL_RESULT (subr);
516038fd1498Szrj   if (aggregate_value_p (res, subr))
516138fd1498Szrj     {
516238fd1498Szrj       /* Returning something that won't go in a register.  */
516338fd1498Szrj       rtx value_address = 0;
516438fd1498Szrj 
516538fd1498Szrj #ifdef PCC_STATIC_STRUCT_RETURN
516638fd1498Szrj       if (cfun->returns_pcc_struct)
516738fd1498Szrj 	{
516838fd1498Szrj 	  int size = int_size_in_bytes (TREE_TYPE (res));
516938fd1498Szrj 	  value_address = assemble_static_space (size);
517038fd1498Szrj 	}
517138fd1498Szrj       else
517238fd1498Szrj #endif
517338fd1498Szrj 	{
517438fd1498Szrj 	  rtx sv = targetm.calls.struct_value_rtx (TREE_TYPE (subr), 2);
517538fd1498Szrj 	  /* Expect to be passed the address of a place to store the value.
517638fd1498Szrj 	     If it is passed as an argument, assign_parms will take care of
517738fd1498Szrj 	     it.  */
517838fd1498Szrj 	  if (sv)
517938fd1498Szrj 	    {
518038fd1498Szrj 	      value_address = gen_reg_rtx (Pmode);
518138fd1498Szrj 	      emit_move_insn (value_address, sv);
518238fd1498Szrj 	    }
518338fd1498Szrj 	}
518438fd1498Szrj       if (value_address)
518538fd1498Szrj 	{
518638fd1498Szrj 	  rtx x = value_address;
518738fd1498Szrj 	  if (!DECL_BY_REFERENCE (res))
518838fd1498Szrj 	    {
518938fd1498Szrj 	      x = gen_rtx_MEM (DECL_MODE (res), x);
519038fd1498Szrj 	      set_mem_attributes (x, res, 1);
519138fd1498Szrj 	    }
519238fd1498Szrj 	  set_parm_rtl (res, x);
519338fd1498Szrj 	}
519438fd1498Szrj     }
519538fd1498Szrj   else if (DECL_MODE (res) == VOIDmode)
519638fd1498Szrj     /* If return mode is void, this decl rtl should not be used.  */
519738fd1498Szrj     set_parm_rtl (res, NULL_RTX);
519838fd1498Szrj   else
519938fd1498Szrj     {
520038fd1498Szrj       /* Compute the return values into a pseudo reg, which we will copy
520138fd1498Szrj 	 into the true return register after the cleanups are done.  */
520238fd1498Szrj       tree return_type = TREE_TYPE (res);
520338fd1498Szrj 
520438fd1498Szrj       /* If we may coalesce this result, make sure it has the expected mode
520538fd1498Szrj 	 in case it was promoted.  But we need not bother about BLKmode.  */
520638fd1498Szrj       machine_mode promoted_mode
520738fd1498Szrj 	= flag_tree_coalesce_vars && is_gimple_reg (res)
520838fd1498Szrj 	  ? promote_ssa_mode (ssa_default_def (cfun, res), NULL)
520938fd1498Szrj 	  : BLKmode;
521038fd1498Szrj 
521138fd1498Szrj       if (promoted_mode != BLKmode)
521238fd1498Szrj 	set_parm_rtl (res, gen_reg_rtx (promoted_mode));
521338fd1498Szrj       else if (TYPE_MODE (return_type) != BLKmode
521438fd1498Szrj 	       && targetm.calls.return_in_msb (return_type))
521538fd1498Szrj 	/* expand_function_end will insert the appropriate padding in
521638fd1498Szrj 	   this case.  Use the return value's natural (unpadded) mode
521738fd1498Szrj 	   within the function proper.  */
521838fd1498Szrj 	set_parm_rtl (res, gen_reg_rtx (TYPE_MODE (return_type)));
521938fd1498Szrj       else
522038fd1498Szrj 	{
522138fd1498Szrj 	  /* In order to figure out what mode to use for the pseudo, we
522238fd1498Szrj 	     figure out what the mode of the eventual return register will
522338fd1498Szrj 	     actually be, and use that.  */
522438fd1498Szrj 	  rtx hard_reg = hard_function_value (return_type, subr, 0, 1);
522538fd1498Szrj 
522638fd1498Szrj 	  /* Structures that are returned in registers are not
522738fd1498Szrj 	     aggregate_value_p, so we may see a PARALLEL or a REG.  */
522838fd1498Szrj 	  if (REG_P (hard_reg))
522938fd1498Szrj 	    set_parm_rtl (res, gen_reg_rtx (GET_MODE (hard_reg)));
523038fd1498Szrj 	  else
523138fd1498Szrj 	    {
523238fd1498Szrj 	      gcc_assert (GET_CODE (hard_reg) == PARALLEL);
523338fd1498Szrj 	      set_parm_rtl (res, gen_group_rtx (hard_reg));
523438fd1498Szrj 	    }
523538fd1498Szrj 	}
523638fd1498Szrj 
523738fd1498Szrj       /* Set DECL_REGISTER flag so that expand_function_end will copy the
523838fd1498Szrj 	 result to the real return register(s).  */
523938fd1498Szrj       DECL_REGISTER (res) = 1;
524038fd1498Szrj 
524138fd1498Szrj       if (chkp_function_instrumented_p (current_function_decl))
524238fd1498Szrj 	{
524338fd1498Szrj 	  tree return_type = TREE_TYPE (res);
524438fd1498Szrj 	  rtx bounds = targetm.calls.chkp_function_value_bounds (return_type,
524538fd1498Szrj 								 subr, 1);
524638fd1498Szrj 	  SET_DECL_BOUNDS_RTL (res, bounds);
524738fd1498Szrj 	}
524838fd1498Szrj     }
524938fd1498Szrj 
525038fd1498Szrj   /* Initialize rtx for parameters and local variables.
525138fd1498Szrj      In some cases this requires emitting insns.  */
525238fd1498Szrj   assign_parms (subr);
525338fd1498Szrj 
525438fd1498Szrj   /* If function gets a static chain arg, store it.  */
525538fd1498Szrj   if (cfun->static_chain_decl)
525638fd1498Szrj     {
525738fd1498Szrj       tree parm = cfun->static_chain_decl;
525838fd1498Szrj       rtx local, chain;
525938fd1498Szrj       rtx_insn *insn;
526038fd1498Szrj       int unsignedp;
526138fd1498Szrj 
526238fd1498Szrj       local = gen_reg_rtx (promote_decl_mode (parm, &unsignedp));
526338fd1498Szrj       chain = targetm.calls.static_chain (current_function_decl, true);
526438fd1498Szrj 
526538fd1498Szrj       set_decl_incoming_rtl (parm, chain, false);
526638fd1498Szrj       set_parm_rtl (parm, local);
526738fd1498Szrj       mark_reg_pointer (local, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
526838fd1498Szrj 
526938fd1498Szrj       if (GET_MODE (local) != GET_MODE (chain))
527038fd1498Szrj 	{
527138fd1498Szrj 	  convert_move (local, chain, unsignedp);
527238fd1498Szrj 	  insn = get_last_insn ();
527338fd1498Szrj 	}
527438fd1498Szrj       else
527538fd1498Szrj 	insn = emit_move_insn (local, chain);
527638fd1498Szrj 
527738fd1498Szrj       /* Mark the register as eliminable, similar to parameters.  */
527838fd1498Szrj       if (MEM_P (chain)
527938fd1498Szrj 	  && reg_mentioned_p (arg_pointer_rtx, XEXP (chain, 0)))
528038fd1498Szrj 	set_dst_reg_note (insn, REG_EQUIV, chain, local);
528138fd1498Szrj 
528238fd1498Szrj       /* If we aren't optimizing, save the static chain onto the stack.  */
528338fd1498Szrj       if (!optimize)
528438fd1498Szrj 	{
528538fd1498Szrj 	  tree saved_static_chain_decl
528638fd1498Szrj 	    = build_decl (DECL_SOURCE_LOCATION (parm), VAR_DECL,
528738fd1498Szrj 			  DECL_NAME (parm), TREE_TYPE (parm));
528838fd1498Szrj 	  rtx saved_static_chain_rtx
528938fd1498Szrj 	    = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
529038fd1498Szrj 	  SET_DECL_RTL (saved_static_chain_decl, saved_static_chain_rtx);
529138fd1498Szrj 	  emit_move_insn (saved_static_chain_rtx, chain);
529238fd1498Szrj 	  SET_DECL_VALUE_EXPR (parm, saved_static_chain_decl);
529338fd1498Szrj 	  DECL_HAS_VALUE_EXPR_P (parm) = 1;
529438fd1498Szrj 	}
529538fd1498Szrj     }
529638fd1498Szrj 
529738fd1498Szrj   /* The following was moved from init_function_start.
529838fd1498Szrj      The move was supposed to make sdb output more accurate.  */
529938fd1498Szrj   /* Indicate the beginning of the function body,
530038fd1498Szrj      as opposed to parm setup.  */
530138fd1498Szrj   emit_note (NOTE_INSN_FUNCTION_BEG);
530238fd1498Szrj 
530338fd1498Szrj   gcc_assert (NOTE_P (get_last_insn ()));
530438fd1498Szrj 
530538fd1498Szrj   parm_birth_insn = get_last_insn ();
530638fd1498Szrj 
530738fd1498Szrj   /* If the function receives a non-local goto, then store the
530838fd1498Szrj      bits we need to restore the frame pointer.  */
530938fd1498Szrj   if (cfun->nonlocal_goto_save_area)
531038fd1498Szrj     {
531138fd1498Szrj       tree t_save;
531238fd1498Szrj       rtx r_save;
531338fd1498Szrj 
531438fd1498Szrj       tree var = TREE_OPERAND (cfun->nonlocal_goto_save_area, 0);
531538fd1498Szrj       gcc_assert (DECL_RTL_SET_P (var));
531638fd1498Szrj 
531738fd1498Szrj       t_save = build4 (ARRAY_REF,
531838fd1498Szrj 		       TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
531938fd1498Szrj 		       cfun->nonlocal_goto_save_area,
532038fd1498Szrj 		       integer_zero_node, NULL_TREE, NULL_TREE);
532138fd1498Szrj       r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
532238fd1498Szrj       gcc_assert (GET_MODE (r_save) == Pmode);
532338fd1498Szrj 
532438fd1498Szrj       emit_move_insn (r_save, targetm.builtin_setjmp_frame_value ());
532538fd1498Szrj       update_nonlocal_goto_save_area ();
532638fd1498Szrj     }
532738fd1498Szrj 
532838fd1498Szrj   if (crtl->profile)
532938fd1498Szrj     {
533038fd1498Szrj #ifdef PROFILE_HOOK
533138fd1498Szrj       PROFILE_HOOK (current_function_funcdef_no);
533238fd1498Szrj #endif
533338fd1498Szrj     }
533438fd1498Szrj 
533538fd1498Szrj   /* If we are doing generic stack checking, the probe should go here.  */
533638fd1498Szrj   if (flag_stack_check == GENERIC_STACK_CHECK)
533738fd1498Szrj     stack_check_probe_note = emit_note (NOTE_INSN_DELETED);
533838fd1498Szrj }
533938fd1498Szrj 
534038fd1498Szrj void
pop_dummy_function(void)534138fd1498Szrj pop_dummy_function (void)
534238fd1498Szrj {
534338fd1498Szrj   pop_cfun ();
534438fd1498Szrj   in_dummy_function = false;
534538fd1498Szrj }
534638fd1498Szrj 
534738fd1498Szrj /* Undo the effects of init_dummy_function_start.  */
534838fd1498Szrj void
expand_dummy_function_end(void)534938fd1498Szrj expand_dummy_function_end (void)
535038fd1498Szrj {
535138fd1498Szrj   gcc_assert (in_dummy_function);
535238fd1498Szrj 
535338fd1498Szrj   /* End any sequences that failed to be closed due to syntax errors.  */
535438fd1498Szrj   while (in_sequence_p ())
535538fd1498Szrj     end_sequence ();
535638fd1498Szrj 
535738fd1498Szrj   /* Outside function body, can't compute type's actual size
535838fd1498Szrj      until next function's body starts.  */
535938fd1498Szrj 
536038fd1498Szrj   free_after_parsing (cfun);
536138fd1498Szrj   free_after_compilation (cfun);
536238fd1498Szrj   pop_dummy_function ();
536338fd1498Szrj }
536438fd1498Szrj 
536538fd1498Szrj /* Helper for diddle_return_value.  */
536638fd1498Szrj 
536738fd1498Szrj void
diddle_return_value_1(void (* doit)(rtx,void *),void * arg,rtx outgoing)536838fd1498Szrj diddle_return_value_1 (void (*doit) (rtx, void *), void *arg, rtx outgoing)
536938fd1498Szrj {
537038fd1498Szrj   if (! outgoing)
537138fd1498Szrj     return;
537238fd1498Szrj 
537338fd1498Szrj   if (REG_P (outgoing))
537438fd1498Szrj     (*doit) (outgoing, arg);
537538fd1498Szrj   else if (GET_CODE (outgoing) == PARALLEL)
537638fd1498Szrj     {
537738fd1498Szrj       int i;
537838fd1498Szrj 
537938fd1498Szrj       for (i = 0; i < XVECLEN (outgoing, 0); i++)
538038fd1498Szrj 	{
538138fd1498Szrj 	  rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
538238fd1498Szrj 
538338fd1498Szrj 	  if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
538438fd1498Szrj 	    (*doit) (x, arg);
538538fd1498Szrj 	}
538638fd1498Szrj     }
538738fd1498Szrj }
538838fd1498Szrj 
538938fd1498Szrj /* Call DOIT for each hard register used as a return value from
539038fd1498Szrj    the current function.  */
539138fd1498Szrj 
539238fd1498Szrj void
diddle_return_value(void (* doit)(rtx,void *),void * arg)539338fd1498Szrj diddle_return_value (void (*doit) (rtx, void *), void *arg)
539438fd1498Szrj {
539538fd1498Szrj   diddle_return_value_1 (doit, arg, crtl->return_bnd);
539638fd1498Szrj   diddle_return_value_1 (doit, arg, crtl->return_rtx);
539738fd1498Szrj }
539838fd1498Szrj 
539938fd1498Szrj static void
do_clobber_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)540038fd1498Szrj do_clobber_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
540138fd1498Szrj {
540238fd1498Szrj   emit_clobber (reg);
540338fd1498Szrj }
540438fd1498Szrj 
540538fd1498Szrj void
clobber_return_register(void)540638fd1498Szrj clobber_return_register (void)
540738fd1498Szrj {
540838fd1498Szrj   diddle_return_value (do_clobber_return_reg, NULL);
540938fd1498Szrj 
541038fd1498Szrj   /* In case we do use pseudo to return value, clobber it too.  */
541138fd1498Szrj   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
541238fd1498Szrj     {
541338fd1498Szrj       tree decl_result = DECL_RESULT (current_function_decl);
541438fd1498Szrj       rtx decl_rtl = DECL_RTL (decl_result);
541538fd1498Szrj       if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
541638fd1498Szrj 	{
541738fd1498Szrj 	  do_clobber_return_reg (decl_rtl, NULL);
541838fd1498Szrj 	}
541938fd1498Szrj     }
542038fd1498Szrj }
542138fd1498Szrj 
542238fd1498Szrj static void
do_use_return_reg(rtx reg,void * arg ATTRIBUTE_UNUSED)542338fd1498Szrj do_use_return_reg (rtx reg, void *arg ATTRIBUTE_UNUSED)
542438fd1498Szrj {
542538fd1498Szrj   emit_use (reg);
542638fd1498Szrj }
542738fd1498Szrj 
542838fd1498Szrj static void
use_return_register(void)542938fd1498Szrj use_return_register (void)
543038fd1498Szrj {
543138fd1498Szrj   diddle_return_value (do_use_return_reg, NULL);
543238fd1498Szrj }
543338fd1498Szrj 
543438fd1498Szrj /* Set the location of the insn chain starting at INSN to LOC.  */
543538fd1498Szrj 
543638fd1498Szrj static void
set_insn_locations(rtx_insn * insn,int loc)543738fd1498Szrj set_insn_locations (rtx_insn *insn, int loc)
543838fd1498Szrj {
543938fd1498Szrj   while (insn != NULL)
544038fd1498Szrj     {
544138fd1498Szrj       if (INSN_P (insn))
544238fd1498Szrj 	INSN_LOCATION (insn) = loc;
544338fd1498Szrj       insn = NEXT_INSN (insn);
544438fd1498Szrj     }
544538fd1498Szrj }
544638fd1498Szrj 
544738fd1498Szrj /* Generate RTL for the end of the current function.  */
544838fd1498Szrj 
544938fd1498Szrj void
expand_function_end(void)545038fd1498Szrj expand_function_end (void)
545138fd1498Szrj {
545238fd1498Szrj   /* If arg_pointer_save_area was referenced only from a nested
545338fd1498Szrj      function, we will not have initialized it yet.  Do that now.  */
545438fd1498Szrj   if (arg_pointer_save_area && ! crtl->arg_pointer_save_area_init)
545538fd1498Szrj     get_arg_pointer_save_area ();
545638fd1498Szrj 
545738fd1498Szrj   /* If we are doing generic stack checking and this function makes calls,
545838fd1498Szrj      do a stack probe at the start of the function to ensure we have enough
545938fd1498Szrj      space for another stack frame.  */
546038fd1498Szrj   if (flag_stack_check == GENERIC_STACK_CHECK)
546138fd1498Szrj     {
546238fd1498Szrj       rtx_insn *insn, *seq;
546338fd1498Szrj 
546438fd1498Szrj       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
546538fd1498Szrj 	if (CALL_P (insn))
546638fd1498Szrj 	  {
546738fd1498Szrj 	    rtx max_frame_size = GEN_INT (STACK_CHECK_MAX_FRAME_SIZE);
546838fd1498Szrj 	    start_sequence ();
546938fd1498Szrj 	    if (STACK_CHECK_MOVING_SP)
547038fd1498Szrj 	      anti_adjust_stack_and_probe (max_frame_size, true);
547138fd1498Szrj 	    else
547238fd1498Szrj 	      probe_stack_range (STACK_OLD_CHECK_PROTECT, max_frame_size);
547338fd1498Szrj 	    seq = get_insns ();
547438fd1498Szrj 	    end_sequence ();
547538fd1498Szrj 	    set_insn_locations (seq, prologue_location);
547638fd1498Szrj 	    emit_insn_before (seq, stack_check_probe_note);
547738fd1498Szrj 	    break;
547838fd1498Szrj 	  }
547938fd1498Szrj     }
548038fd1498Szrj 
548138fd1498Szrj   /* End any sequences that failed to be closed due to syntax errors.  */
548238fd1498Szrj   while (in_sequence_p ())
548338fd1498Szrj     end_sequence ();
548438fd1498Szrj 
548538fd1498Szrj   clear_pending_stack_adjust ();
548638fd1498Szrj   do_pending_stack_adjust ();
548738fd1498Szrj 
548838fd1498Szrj   /* Output a linenumber for the end of the function.
548938fd1498Szrj      SDB depended on this.  */
549038fd1498Szrj   set_curr_insn_location (input_location);
549138fd1498Szrj 
549238fd1498Szrj   /* Before the return label (if any), clobber the return
549338fd1498Szrj      registers so that they are not propagated live to the rest of
549438fd1498Szrj      the function.  This can only happen with functions that drop
549538fd1498Szrj      through; if there had been a return statement, there would
549638fd1498Szrj      have either been a return rtx, or a jump to the return label.
549738fd1498Szrj 
549838fd1498Szrj      We delay actual code generation after the current_function_value_rtx
549938fd1498Szrj      is computed.  */
550038fd1498Szrj   rtx_insn *clobber_after = get_last_insn ();
550138fd1498Szrj 
550238fd1498Szrj   /* Output the label for the actual return from the function.  */
550338fd1498Szrj   emit_label (return_label);
550438fd1498Szrj 
550538fd1498Szrj   if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
550638fd1498Szrj     {
550738fd1498Szrj       /* Let except.c know where it should emit the call to unregister
550838fd1498Szrj 	 the function context for sjlj exceptions.  */
550938fd1498Szrj       if (flag_exceptions)
551038fd1498Szrj 	sjlj_emit_function_exit_after (get_last_insn ());
551138fd1498Szrj     }
551238fd1498Szrj   else
551338fd1498Szrj     {
551438fd1498Szrj       /* We want to ensure that instructions that may trap are not
551538fd1498Szrj 	 moved into the epilogue by scheduling, because we don't
551638fd1498Szrj 	 always emit unwind information for the epilogue.  */
551738fd1498Szrj       if (cfun->can_throw_non_call_exceptions)
551838fd1498Szrj 	emit_insn (gen_blockage ());
551938fd1498Szrj     }
552038fd1498Szrj 
552138fd1498Szrj   /* If this is an implementation of throw, do what's necessary to
552238fd1498Szrj      communicate between __builtin_eh_return and the epilogue.  */
552338fd1498Szrj   expand_eh_return ();
552438fd1498Szrj 
552538fd1498Szrj   /* If scalar return value was computed in a pseudo-reg, or was a named
552638fd1498Szrj      return value that got dumped to the stack, copy that to the hard
552738fd1498Szrj      return register.  */
552838fd1498Szrj   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
552938fd1498Szrj     {
553038fd1498Szrj       tree decl_result = DECL_RESULT (current_function_decl);
553138fd1498Szrj       rtx decl_rtl = DECL_RTL (decl_result);
553238fd1498Szrj 
553338fd1498Szrj       if (REG_P (decl_rtl)
553438fd1498Szrj 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
553538fd1498Szrj 	  : DECL_REGISTER (decl_result))
553638fd1498Szrj 	{
553738fd1498Szrj 	  rtx real_decl_rtl = crtl->return_rtx;
553838fd1498Szrj 	  complex_mode cmode;
553938fd1498Szrj 
554038fd1498Szrj 	  /* This should be set in assign_parms.  */
554138fd1498Szrj 	  gcc_assert (REG_FUNCTION_VALUE_P (real_decl_rtl));
554238fd1498Szrj 
554338fd1498Szrj 	  /* If this is a BLKmode structure being returned in registers,
554438fd1498Szrj 	     then use the mode computed in expand_return.  Note that if
554538fd1498Szrj 	     decl_rtl is memory, then its mode may have been changed,
554638fd1498Szrj 	     but that crtl->return_rtx has not.  */
554738fd1498Szrj 	  if (GET_MODE (real_decl_rtl) == BLKmode)
554838fd1498Szrj 	    PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
554938fd1498Szrj 
555038fd1498Szrj 	  /* If a non-BLKmode return value should be padded at the least
555138fd1498Szrj 	     significant end of the register, shift it left by the appropriate
555238fd1498Szrj 	     amount.  BLKmode results are handled using the group load/store
555338fd1498Szrj 	     machinery.  */
555438fd1498Szrj 	  if (TYPE_MODE (TREE_TYPE (decl_result)) != BLKmode
555538fd1498Szrj 	      && REG_P (real_decl_rtl)
555638fd1498Szrj 	      && targetm.calls.return_in_msb (TREE_TYPE (decl_result)))
555738fd1498Szrj 	    {
555838fd1498Szrj 	      emit_move_insn (gen_rtx_REG (GET_MODE (decl_rtl),
555938fd1498Szrj 					   REGNO (real_decl_rtl)),
556038fd1498Szrj 			      decl_rtl);
556138fd1498Szrj 	      shift_return_value (GET_MODE (decl_rtl), true, real_decl_rtl);
556238fd1498Szrj 	    }
556338fd1498Szrj 	  else if (GET_CODE (real_decl_rtl) == PARALLEL)
556438fd1498Szrj 	    {
556538fd1498Szrj 	      /* If expand_function_start has created a PARALLEL for decl_rtl,
556638fd1498Szrj 		 move the result to the real return registers.  Otherwise, do
556738fd1498Szrj 		 a group load from decl_rtl for a named return.  */
556838fd1498Szrj 	      if (GET_CODE (decl_rtl) == PARALLEL)
556938fd1498Szrj 		emit_group_move (real_decl_rtl, decl_rtl);
557038fd1498Szrj 	      else
557138fd1498Szrj 		emit_group_load (real_decl_rtl, decl_rtl,
557238fd1498Szrj 				 TREE_TYPE (decl_result),
557338fd1498Szrj 				 int_size_in_bytes (TREE_TYPE (decl_result)));
557438fd1498Szrj 	    }
557538fd1498Szrj 	  /* In the case of complex integer modes smaller than a word, we'll
557638fd1498Szrj 	     need to generate some non-trivial bitfield insertions.  Do that
557738fd1498Szrj 	     on a pseudo and not the hard register.  */
557838fd1498Szrj 	  else if (GET_CODE (decl_rtl) == CONCAT
557938fd1498Szrj 		   && is_complex_int_mode (GET_MODE (decl_rtl), &cmode)
558038fd1498Szrj 		   && GET_MODE_BITSIZE (cmode) <= BITS_PER_WORD)
558138fd1498Szrj 	    {
558238fd1498Szrj 	      int old_generating_concat_p;
558338fd1498Szrj 	      rtx tmp;
558438fd1498Szrj 
558538fd1498Szrj 	      old_generating_concat_p = generating_concat_p;
558638fd1498Szrj 	      generating_concat_p = 0;
558738fd1498Szrj 	      tmp = gen_reg_rtx (GET_MODE (decl_rtl));
558838fd1498Szrj 	      generating_concat_p = old_generating_concat_p;
558938fd1498Szrj 
559038fd1498Szrj 	      emit_move_insn (tmp, decl_rtl);
559138fd1498Szrj 	      emit_move_insn (real_decl_rtl, tmp);
559238fd1498Szrj 	    }
559338fd1498Szrj 	  /* If a named return value dumped decl_return to memory, then
559438fd1498Szrj 	     we may need to re-do the PROMOTE_MODE signed/unsigned
559538fd1498Szrj 	     extension.  */
559638fd1498Szrj 	  else if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
559738fd1498Szrj 	    {
559838fd1498Szrj 	      int unsignedp = TYPE_UNSIGNED (TREE_TYPE (decl_result));
559938fd1498Szrj 	      promote_function_mode (TREE_TYPE (decl_result),
560038fd1498Szrj 				     GET_MODE (decl_rtl), &unsignedp,
560138fd1498Szrj 				     TREE_TYPE (current_function_decl), 1);
560238fd1498Szrj 
560338fd1498Szrj 	      convert_move (real_decl_rtl, decl_rtl, unsignedp);
560438fd1498Szrj 	    }
560538fd1498Szrj 	  else
560638fd1498Szrj 	    emit_move_insn (real_decl_rtl, decl_rtl);
560738fd1498Szrj 	}
560838fd1498Szrj     }
560938fd1498Szrj 
561038fd1498Szrj   /* If returning a structure, arrange to return the address of the value
561138fd1498Szrj      in a place where debuggers expect to find it.
561238fd1498Szrj 
561338fd1498Szrj      If returning a structure PCC style,
561438fd1498Szrj      the caller also depends on this value.
561538fd1498Szrj      And cfun->returns_pcc_struct is not necessarily set.  */
561638fd1498Szrj   if ((cfun->returns_struct || cfun->returns_pcc_struct)
561738fd1498Szrj       && !targetm.calls.omit_struct_return_reg)
561838fd1498Szrj     {
561938fd1498Szrj       rtx value_address = DECL_RTL (DECL_RESULT (current_function_decl));
562038fd1498Szrj       tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
562138fd1498Szrj       rtx outgoing;
562238fd1498Szrj 
562338fd1498Szrj       if (DECL_BY_REFERENCE (DECL_RESULT (current_function_decl)))
562438fd1498Szrj 	type = TREE_TYPE (type);
562538fd1498Szrj       else
562638fd1498Szrj 	value_address = XEXP (value_address, 0);
562738fd1498Szrj 
562838fd1498Szrj       outgoing = targetm.calls.function_value (build_pointer_type (type),
562938fd1498Szrj 					       current_function_decl, true);
563038fd1498Szrj 
563138fd1498Szrj       /* Mark this as a function return value so integrate will delete the
563238fd1498Szrj 	 assignment and USE below when inlining this function.  */
563338fd1498Szrj       REG_FUNCTION_VALUE_P (outgoing) = 1;
563438fd1498Szrj 
563538fd1498Szrj       /* The address may be ptr_mode and OUTGOING may be Pmode.  */
563638fd1498Szrj       scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (outgoing));
563738fd1498Szrj       value_address = convert_memory_address (mode, value_address);
563838fd1498Szrj 
563938fd1498Szrj       emit_move_insn (outgoing, value_address);
564038fd1498Szrj 
564138fd1498Szrj       /* Show return register used to hold result (in this case the address
564238fd1498Szrj 	 of the result.  */
564338fd1498Szrj       crtl->return_rtx = outgoing;
564438fd1498Szrj     }
564538fd1498Szrj 
564638fd1498Szrj   /* Emit the actual code to clobber return register.  Don't emit
564738fd1498Szrj      it if clobber_after is a barrier, then the previous basic block
564838fd1498Szrj      certainly doesn't fall thru into the exit block.  */
564938fd1498Szrj   if (!BARRIER_P (clobber_after))
565038fd1498Szrj     {
565138fd1498Szrj       start_sequence ();
565238fd1498Szrj       clobber_return_register ();
565338fd1498Szrj       rtx_insn *seq = get_insns ();
565438fd1498Szrj       end_sequence ();
565538fd1498Szrj 
565638fd1498Szrj       emit_insn_after (seq, clobber_after);
565738fd1498Szrj     }
565838fd1498Szrj 
565938fd1498Szrj   /* Output the label for the naked return from the function.  */
566038fd1498Szrj   if (naked_return_label)
566138fd1498Szrj     emit_label (naked_return_label);
566238fd1498Szrj 
566338fd1498Szrj   /* @@@ This is a kludge.  We want to ensure that instructions that
566438fd1498Szrj      may trap are not moved into the epilogue by scheduling, because
566538fd1498Szrj      we don't always emit unwind information for the epilogue.  */
566638fd1498Szrj   if (cfun->can_throw_non_call_exceptions
566738fd1498Szrj       && targetm_common.except_unwind_info (&global_options) != UI_SJLJ)
566838fd1498Szrj     emit_insn (gen_blockage ());
566938fd1498Szrj 
567038fd1498Szrj   /* If stack protection is enabled for this function, check the guard.  */
567138fd1498Szrj   if (crtl->stack_protect_guard && targetm.stack_protect_runtime_enabled_p ())
567238fd1498Szrj     stack_protect_epilogue ();
567338fd1498Szrj 
567438fd1498Szrj   /* If we had calls to alloca, and this machine needs
567538fd1498Szrj      an accurate stack pointer to exit the function,
567638fd1498Szrj      insert some code to save and restore the stack pointer.  */
567738fd1498Szrj   if (! EXIT_IGNORE_STACK
567838fd1498Szrj       && cfun->calls_alloca)
567938fd1498Szrj     {
568038fd1498Szrj       rtx tem = 0;
568138fd1498Szrj 
568238fd1498Szrj       start_sequence ();
568338fd1498Szrj       emit_stack_save (SAVE_FUNCTION, &tem);
568438fd1498Szrj       rtx_insn *seq = get_insns ();
568538fd1498Szrj       end_sequence ();
568638fd1498Szrj       emit_insn_before (seq, parm_birth_insn);
568738fd1498Szrj 
568838fd1498Szrj       emit_stack_restore (SAVE_FUNCTION, tem);
568938fd1498Szrj     }
569038fd1498Szrj 
569138fd1498Szrj   /* ??? This should no longer be necessary since stupid is no longer with
569238fd1498Szrj      us, but there are some parts of the compiler (eg reload_combine, and
569338fd1498Szrj      sh mach_dep_reorg) that still try and compute their own lifetime info
569438fd1498Szrj      instead of using the general framework.  */
569538fd1498Szrj   use_return_register ();
569638fd1498Szrj }
569738fd1498Szrj 
569838fd1498Szrj rtx
get_arg_pointer_save_area(void)569938fd1498Szrj get_arg_pointer_save_area (void)
570038fd1498Szrj {
570138fd1498Szrj   rtx ret = arg_pointer_save_area;
570238fd1498Szrj 
570338fd1498Szrj   if (! ret)
570438fd1498Szrj     {
570538fd1498Szrj       ret = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
570638fd1498Szrj       arg_pointer_save_area = ret;
570738fd1498Szrj     }
570838fd1498Szrj 
570938fd1498Szrj   if (! crtl->arg_pointer_save_area_init)
571038fd1498Szrj     {
571138fd1498Szrj       /* Save the arg pointer at the beginning of the function.  The
571238fd1498Szrj 	 generated stack slot may not be a valid memory address, so we
571338fd1498Szrj 	 have to check it and fix it if necessary.  */
571438fd1498Szrj       start_sequence ();
571538fd1498Szrj       emit_move_insn (validize_mem (copy_rtx (ret)),
571638fd1498Szrj                       crtl->args.internal_arg_pointer);
571738fd1498Szrj       rtx_insn *seq = get_insns ();
571838fd1498Szrj       end_sequence ();
571938fd1498Szrj 
572038fd1498Szrj       push_topmost_sequence ();
572138fd1498Szrj       emit_insn_after (seq, entry_of_function ());
572238fd1498Szrj       pop_topmost_sequence ();
572338fd1498Szrj 
572438fd1498Szrj       crtl->arg_pointer_save_area_init = true;
572538fd1498Szrj     }
572638fd1498Szrj 
572738fd1498Szrj   return ret;
572838fd1498Szrj }
572938fd1498Szrj 
573038fd1498Szrj 
573138fd1498Szrj /* If debugging dumps are requested, dump information about how the
573238fd1498Szrj    target handled -fstack-check=clash for the prologue.
573338fd1498Szrj 
573438fd1498Szrj    PROBES describes what if any probes were emitted.
573538fd1498Szrj 
573638fd1498Szrj    RESIDUALS indicates if the prologue had any residual allocation
573738fd1498Szrj    (i.e. total allocation was not a multiple of PROBE_INTERVAL).  */
573838fd1498Szrj 
573938fd1498Szrj void
dump_stack_clash_frame_info(enum stack_clash_probes probes,bool residuals)574038fd1498Szrj dump_stack_clash_frame_info (enum stack_clash_probes probes, bool residuals)
574138fd1498Szrj {
574238fd1498Szrj   if (!dump_file)
574338fd1498Szrj     return;
574438fd1498Szrj 
574538fd1498Szrj   switch (probes)
574638fd1498Szrj     {
574738fd1498Szrj     case NO_PROBE_NO_FRAME:
574838fd1498Szrj       fprintf (dump_file,
574938fd1498Szrj 	       "Stack clash no probe no stack adjustment in prologue.\n");
575038fd1498Szrj       break;
575138fd1498Szrj     case NO_PROBE_SMALL_FRAME:
575238fd1498Szrj       fprintf (dump_file,
575338fd1498Szrj 	       "Stack clash no probe small stack adjustment in prologue.\n");
575438fd1498Szrj       break;
575538fd1498Szrj     case PROBE_INLINE:
575638fd1498Szrj       fprintf (dump_file, "Stack clash inline probes in prologue.\n");
575738fd1498Szrj       break;
575838fd1498Szrj     case PROBE_LOOP:
575938fd1498Szrj       fprintf (dump_file, "Stack clash probe loop in prologue.\n");
576038fd1498Szrj       break;
576138fd1498Szrj     }
576238fd1498Szrj 
576338fd1498Szrj   if (residuals)
576438fd1498Szrj     fprintf (dump_file, "Stack clash residual allocation in prologue.\n");
576538fd1498Szrj   else
576638fd1498Szrj     fprintf (dump_file, "Stack clash no residual allocation in prologue.\n");
576738fd1498Szrj 
576838fd1498Szrj   if (frame_pointer_needed)
576938fd1498Szrj     fprintf (dump_file, "Stack clash frame pointer needed.\n");
577038fd1498Szrj   else
577138fd1498Szrj     fprintf (dump_file, "Stack clash no frame pointer needed.\n");
577238fd1498Szrj 
577338fd1498Szrj   if (TREE_THIS_VOLATILE (cfun->decl))
577438fd1498Szrj     fprintf (dump_file,
577538fd1498Szrj 	     "Stack clash noreturn prologue, assuming no implicit"
577638fd1498Szrj 	     " probes in caller.\n");
577738fd1498Szrj   else
577838fd1498Szrj     fprintf (dump_file,
577938fd1498Szrj 	     "Stack clash not noreturn prologue.\n");
578038fd1498Szrj }
578138fd1498Szrj 
578238fd1498Szrj /* Add a list of INSNS to the hash HASHP, possibly allocating HASHP
578338fd1498Szrj    for the first time.  */
578438fd1498Szrj 
578538fd1498Szrj static void
record_insns(rtx_insn * insns,rtx end,hash_table<insn_cache_hasher> ** hashp)578638fd1498Szrj record_insns (rtx_insn *insns, rtx end, hash_table<insn_cache_hasher> **hashp)
578738fd1498Szrj {
578838fd1498Szrj   rtx_insn *tmp;
578938fd1498Szrj   hash_table<insn_cache_hasher> *hash = *hashp;
579038fd1498Szrj 
579138fd1498Szrj   if (hash == NULL)
579238fd1498Szrj     *hashp = hash = hash_table<insn_cache_hasher>::create_ggc (17);
579338fd1498Szrj 
579438fd1498Szrj   for (tmp = insns; tmp != end; tmp = NEXT_INSN (tmp))
579538fd1498Szrj     {
579638fd1498Szrj       rtx *slot = hash->find_slot (tmp, INSERT);
579738fd1498Szrj       gcc_assert (*slot == NULL);
579838fd1498Szrj       *slot = tmp;
579938fd1498Szrj     }
580038fd1498Szrj }
580138fd1498Szrj 
580238fd1498Szrj /* INSN has been duplicated or replaced by as COPY, perhaps by duplicating a
580338fd1498Szrj    basic block, splitting or peepholes.  If INSN is a prologue or epilogue
580438fd1498Szrj    insn, then record COPY as well.  */
580538fd1498Szrj 
580638fd1498Szrj void
maybe_copy_prologue_epilogue_insn(rtx insn,rtx copy)580738fd1498Szrj maybe_copy_prologue_epilogue_insn (rtx insn, rtx copy)
580838fd1498Szrj {
580938fd1498Szrj   hash_table<insn_cache_hasher> *hash;
581038fd1498Szrj   rtx *slot;
581138fd1498Szrj 
581238fd1498Szrj   hash = epilogue_insn_hash;
581338fd1498Szrj   if (!hash || !hash->find (insn))
581438fd1498Szrj     {
581538fd1498Szrj       hash = prologue_insn_hash;
581638fd1498Szrj       if (!hash || !hash->find (insn))
581738fd1498Szrj 	return;
581838fd1498Szrj     }
581938fd1498Szrj 
582038fd1498Szrj   slot = hash->find_slot (copy, INSERT);
582138fd1498Szrj   gcc_assert (*slot == NULL);
582238fd1498Szrj   *slot = copy;
582338fd1498Szrj }
582438fd1498Szrj 
582538fd1498Szrj /* Determine if any INSNs in HASH are, or are part of, INSN.  Because
582638fd1498Szrj    we can be running after reorg, SEQUENCE rtl is possible.  */
582738fd1498Szrj 
582838fd1498Szrj static bool
contains(const rtx_insn * insn,hash_table<insn_cache_hasher> * hash)582938fd1498Szrj contains (const rtx_insn *insn, hash_table<insn_cache_hasher> *hash)
583038fd1498Szrj {
583138fd1498Szrj   if (hash == NULL)
583238fd1498Szrj     return false;
583338fd1498Szrj 
583438fd1498Szrj   if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
583538fd1498Szrj     {
583638fd1498Szrj       rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
583738fd1498Szrj       int i;
583838fd1498Szrj       for (i = seq->len () - 1; i >= 0; i--)
583938fd1498Szrj 	if (hash->find (seq->element (i)))
584038fd1498Szrj 	  return true;
584138fd1498Szrj       return false;
584238fd1498Szrj     }
584338fd1498Szrj 
584438fd1498Szrj   return hash->find (const_cast<rtx_insn *> (insn)) != NULL;
584538fd1498Szrj }
584638fd1498Szrj 
584738fd1498Szrj int
prologue_contains(const rtx_insn * insn)584838fd1498Szrj prologue_contains (const rtx_insn *insn)
584938fd1498Szrj {
585038fd1498Szrj   return contains (insn, prologue_insn_hash);
585138fd1498Szrj }
585238fd1498Szrj 
585338fd1498Szrj int
epilogue_contains(const rtx_insn * insn)585438fd1498Szrj epilogue_contains (const rtx_insn *insn)
585538fd1498Szrj {
585638fd1498Szrj   return contains (insn, epilogue_insn_hash);
585738fd1498Szrj }
585838fd1498Szrj 
585938fd1498Szrj int
prologue_epilogue_contains(const rtx_insn * insn)586038fd1498Szrj prologue_epilogue_contains (const rtx_insn *insn)
586138fd1498Szrj {
586238fd1498Szrj   if (contains (insn, prologue_insn_hash))
586338fd1498Szrj     return 1;
586438fd1498Szrj   if (contains (insn, epilogue_insn_hash))
586538fd1498Szrj     return 1;
586638fd1498Szrj   return 0;
586738fd1498Szrj }
586838fd1498Szrj 
586938fd1498Szrj void
record_prologue_seq(rtx_insn * seq)587038fd1498Szrj record_prologue_seq (rtx_insn *seq)
587138fd1498Szrj {
587238fd1498Szrj   record_insns (seq, NULL, &prologue_insn_hash);
587338fd1498Szrj }
587438fd1498Szrj 
587538fd1498Szrj void
record_epilogue_seq(rtx_insn * seq)587638fd1498Szrj record_epilogue_seq (rtx_insn *seq)
587738fd1498Szrj {
587838fd1498Szrj   record_insns (seq, NULL, &epilogue_insn_hash);
587938fd1498Szrj }
588038fd1498Szrj 
588138fd1498Szrj /* Set JUMP_LABEL for a return insn.  */
588238fd1498Szrj 
588338fd1498Szrj void
set_return_jump_label(rtx_insn * returnjump)588438fd1498Szrj set_return_jump_label (rtx_insn *returnjump)
588538fd1498Szrj {
588638fd1498Szrj   rtx pat = PATTERN (returnjump);
588738fd1498Szrj   if (GET_CODE (pat) == PARALLEL)
588838fd1498Szrj     pat = XVECEXP (pat, 0, 0);
588938fd1498Szrj   if (ANY_RETURN_P (pat))
589038fd1498Szrj     JUMP_LABEL (returnjump) = pat;
589138fd1498Szrj   else
589238fd1498Szrj     JUMP_LABEL (returnjump) = ret_rtx;
589338fd1498Szrj }
589438fd1498Szrj 
589538fd1498Szrj /* Return a sequence to be used as the split prologue for the current
589638fd1498Szrj    function, or NULL.  */
589738fd1498Szrj 
589838fd1498Szrj static rtx_insn *
make_split_prologue_seq(void)589938fd1498Szrj make_split_prologue_seq (void)
590038fd1498Szrj {
590138fd1498Szrj   if (!flag_split_stack
590238fd1498Szrj       || lookup_attribute ("no_split_stack", DECL_ATTRIBUTES (cfun->decl)))
590338fd1498Szrj     return NULL;
590438fd1498Szrj 
590538fd1498Szrj   start_sequence ();
590638fd1498Szrj   emit_insn (targetm.gen_split_stack_prologue ());
590738fd1498Szrj   rtx_insn *seq = get_insns ();
590838fd1498Szrj   end_sequence ();
590938fd1498Szrj 
591038fd1498Szrj   record_insns (seq, NULL, &prologue_insn_hash);
591138fd1498Szrj   set_insn_locations (seq, prologue_location);
591238fd1498Szrj 
591338fd1498Szrj   return seq;
591438fd1498Szrj }
591538fd1498Szrj 
591638fd1498Szrj /* Return a sequence to be used as the prologue for the current function,
591738fd1498Szrj    or NULL.  */
591838fd1498Szrj 
591938fd1498Szrj static rtx_insn *
make_prologue_seq(void)592038fd1498Szrj make_prologue_seq (void)
592138fd1498Szrj {
592238fd1498Szrj   if (!targetm.have_prologue ())
592338fd1498Szrj     return NULL;
592438fd1498Szrj 
592538fd1498Szrj   start_sequence ();
592638fd1498Szrj   rtx_insn *seq = targetm.gen_prologue ();
592738fd1498Szrj   emit_insn (seq);
592838fd1498Szrj 
592938fd1498Szrj   /* Insert an explicit USE for the frame pointer
593038fd1498Szrj      if the profiling is on and the frame pointer is required.  */
593138fd1498Szrj   if (crtl->profile && frame_pointer_needed)
593238fd1498Szrj     emit_use (hard_frame_pointer_rtx);
593338fd1498Szrj 
593438fd1498Szrj   /* Retain a map of the prologue insns.  */
593538fd1498Szrj   record_insns (seq, NULL, &prologue_insn_hash);
593638fd1498Szrj   emit_note (NOTE_INSN_PROLOGUE_END);
593738fd1498Szrj 
593838fd1498Szrj   /* Ensure that instructions are not moved into the prologue when
593938fd1498Szrj      profiling is on.  The call to the profiling routine can be
594038fd1498Szrj      emitted within the live range of a call-clobbered register.  */
594138fd1498Szrj   if (!targetm.profile_before_prologue () && crtl->profile)
594238fd1498Szrj     emit_insn (gen_blockage ());
594338fd1498Szrj 
594438fd1498Szrj   seq = get_insns ();
594538fd1498Szrj   end_sequence ();
594638fd1498Szrj   set_insn_locations (seq, prologue_location);
594738fd1498Szrj 
594838fd1498Szrj   return seq;
594938fd1498Szrj }
595038fd1498Szrj 
595138fd1498Szrj /* Return a sequence to be used as the epilogue for the current function,
595238fd1498Szrj    or NULL.  */
595338fd1498Szrj 
595438fd1498Szrj static rtx_insn *
make_epilogue_seq(void)595538fd1498Szrj make_epilogue_seq (void)
595638fd1498Szrj {
595738fd1498Szrj   if (!targetm.have_epilogue ())
595838fd1498Szrj     return NULL;
595938fd1498Szrj 
596038fd1498Szrj   start_sequence ();
596138fd1498Szrj   emit_note (NOTE_INSN_EPILOGUE_BEG);
596238fd1498Szrj   rtx_insn *seq = targetm.gen_epilogue ();
596338fd1498Szrj   if (seq)
596438fd1498Szrj     emit_jump_insn (seq);
596538fd1498Szrj 
596638fd1498Szrj   /* Retain a map of the epilogue insns.  */
596738fd1498Szrj   record_insns (seq, NULL, &epilogue_insn_hash);
596838fd1498Szrj   set_insn_locations (seq, epilogue_location);
596938fd1498Szrj 
597038fd1498Szrj   seq = get_insns ();
597138fd1498Szrj   rtx_insn *returnjump = get_last_insn ();
597238fd1498Szrj   end_sequence ();
597338fd1498Szrj 
597438fd1498Szrj   if (JUMP_P (returnjump))
597538fd1498Szrj     set_return_jump_label (returnjump);
597638fd1498Szrj 
597738fd1498Szrj   return seq;
597838fd1498Szrj }
597938fd1498Szrj 
598038fd1498Szrj 
598138fd1498Szrj /* Generate the prologue and epilogue RTL if the machine supports it.  Thread
598238fd1498Szrj    this into place with notes indicating where the prologue ends and where
598338fd1498Szrj    the epilogue begins.  Update the basic block information when possible.
598438fd1498Szrj 
598538fd1498Szrj    Notes on epilogue placement:
598638fd1498Szrj    There are several kinds of edges to the exit block:
598738fd1498Szrj    * a single fallthru edge from LAST_BB
598838fd1498Szrj    * possibly, edges from blocks containing sibcalls
598938fd1498Szrj    * possibly, fake edges from infinite loops
599038fd1498Szrj 
599138fd1498Szrj    The epilogue is always emitted on the fallthru edge from the last basic
599238fd1498Szrj    block in the function, LAST_BB, into the exit block.
599338fd1498Szrj 
599438fd1498Szrj    If LAST_BB is empty except for a label, it is the target of every
599538fd1498Szrj    other basic block in the function that ends in a return.  If a
599638fd1498Szrj    target has a return or simple_return pattern (possibly with
599738fd1498Szrj    conditional variants), these basic blocks can be changed so that a
599838fd1498Szrj    return insn is emitted into them, and their target is adjusted to
599938fd1498Szrj    the real exit block.
600038fd1498Szrj 
600138fd1498Szrj    Notes on shrink wrapping: We implement a fairly conservative
600238fd1498Szrj    version of shrink-wrapping rather than the textbook one.  We only
600338fd1498Szrj    generate a single prologue and a single epilogue.  This is
600438fd1498Szrj    sufficient to catch a number of interesting cases involving early
600538fd1498Szrj    exits.
600638fd1498Szrj 
600738fd1498Szrj    First, we identify the blocks that require the prologue to occur before
600838fd1498Szrj    them.  These are the ones that modify a call-saved register, or reference
600938fd1498Szrj    any of the stack or frame pointer registers.  To simplify things, we then
601038fd1498Szrj    mark everything reachable from these blocks as also requiring a prologue.
601138fd1498Szrj    This takes care of loops automatically, and avoids the need to examine
601238fd1498Szrj    whether MEMs reference the frame, since it is sufficient to check for
601338fd1498Szrj    occurrences of the stack or frame pointer.
601438fd1498Szrj 
601538fd1498Szrj    We then compute the set of blocks for which the need for a prologue
601638fd1498Szrj    is anticipatable (borrowing terminology from the shrink-wrapping
601738fd1498Szrj    description in Muchnick's book).  These are the blocks which either
601838fd1498Szrj    require a prologue themselves, or those that have only successors
601938fd1498Szrj    where the prologue is anticipatable.  The prologue needs to be
602038fd1498Szrj    inserted on all edges from BB1->BB2 where BB2 is in ANTIC and BB1
602138fd1498Szrj    is not.  For the moment, we ensure that only one such edge exists.
602238fd1498Szrj 
602338fd1498Szrj    The epilogue is placed as described above, but we make a
602438fd1498Szrj    distinction between inserting return and simple_return patterns
602538fd1498Szrj    when modifying other blocks that end in a return.  Blocks that end
602638fd1498Szrj    in a sibcall omit the sibcall_epilogue if the block is not in
602738fd1498Szrj    ANTIC.  */
602838fd1498Szrj 
602938fd1498Szrj void
thread_prologue_and_epilogue_insns(void)603038fd1498Szrj thread_prologue_and_epilogue_insns (void)
603138fd1498Szrj {
603238fd1498Szrj   df_analyze ();
603338fd1498Szrj 
603438fd1498Szrj   /* Can't deal with multiple successors of the entry block at the
603538fd1498Szrj      moment.  Function should always have at least one entry
603638fd1498Szrj      point.  */
603738fd1498Szrj   gcc_assert (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
603838fd1498Szrj 
603938fd1498Szrj   edge entry_edge = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
604038fd1498Szrj   edge orig_entry_edge = entry_edge;
604138fd1498Szrj 
604238fd1498Szrj   rtx_insn *split_prologue_seq = make_split_prologue_seq ();
604338fd1498Szrj   rtx_insn *prologue_seq = make_prologue_seq ();
604438fd1498Szrj   rtx_insn *epilogue_seq = make_epilogue_seq ();
604538fd1498Szrj 
604638fd1498Szrj   /* Try to perform a kind of shrink-wrapping, making sure the
604738fd1498Szrj      prologue/epilogue is emitted only around those parts of the
604838fd1498Szrj      function that require it.  */
604938fd1498Szrj   try_shrink_wrapping (&entry_edge, prologue_seq);
605038fd1498Szrj 
605138fd1498Szrj   /* If the target can handle splitting the prologue/epilogue into separate
605238fd1498Szrj      components, try to shrink-wrap these components separately.  */
605338fd1498Szrj   try_shrink_wrapping_separate (entry_edge->dest);
605438fd1498Szrj 
605538fd1498Szrj   /* If that did anything for any component we now need the generate the
605638fd1498Szrj      "main" prologue again.  Because some targets require some of these
605738fd1498Szrj      to be called in a specific order (i386 requires the split prologue
605838fd1498Szrj      to be first, for example), we create all three sequences again here.
605938fd1498Szrj      If this does not work for some target, that target should not enable
606038fd1498Szrj      separate shrink-wrapping.  */
606138fd1498Szrj   if (crtl->shrink_wrapped_separate)
606238fd1498Szrj     {
606338fd1498Szrj       split_prologue_seq = make_split_prologue_seq ();
606438fd1498Szrj       prologue_seq = make_prologue_seq ();
606538fd1498Szrj       epilogue_seq = make_epilogue_seq ();
606638fd1498Szrj     }
606738fd1498Szrj 
606838fd1498Szrj   rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
606938fd1498Szrj 
607038fd1498Szrj   /* A small fib -- epilogue is not yet completed, but we wish to re-use
607138fd1498Szrj      this marker for the splits of EH_RETURN patterns, and nothing else
607238fd1498Szrj      uses the flag in the meantime.  */
607338fd1498Szrj   epilogue_completed = 1;
607438fd1498Szrj 
607538fd1498Szrj   /* Find non-fallthru edges that end with EH_RETURN instructions.  On
607638fd1498Szrj      some targets, these get split to a special version of the epilogue
607738fd1498Szrj      code.  In order to be able to properly annotate these with unwind
607838fd1498Szrj      info, try to split them now.  If we get a valid split, drop an
607938fd1498Szrj      EPILOGUE_BEG note and mark the insns as epilogue insns.  */
608038fd1498Szrj   edge e;
608138fd1498Szrj   edge_iterator ei;
608238fd1498Szrj   FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
608338fd1498Szrj     {
608438fd1498Szrj       rtx_insn *prev, *last, *trial;
608538fd1498Szrj 
608638fd1498Szrj       if (e->flags & EDGE_FALLTHRU)
608738fd1498Szrj 	continue;
608838fd1498Szrj       last = BB_END (e->src);
608938fd1498Szrj       if (!eh_returnjump_p (last))
609038fd1498Szrj 	continue;
609138fd1498Szrj 
609238fd1498Szrj       prev = PREV_INSN (last);
609338fd1498Szrj       trial = try_split (PATTERN (last), last, 1);
609438fd1498Szrj       if (trial == last)
609538fd1498Szrj 	continue;
609638fd1498Szrj 
609738fd1498Szrj       record_insns (NEXT_INSN (prev), NEXT_INSN (trial), &epilogue_insn_hash);
609838fd1498Szrj       emit_note_after (NOTE_INSN_EPILOGUE_BEG, prev);
609938fd1498Szrj     }
610038fd1498Szrj 
610138fd1498Szrj   edge exit_fallthru_edge = find_fallthru_edge (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
610238fd1498Szrj 
610338fd1498Szrj   if (exit_fallthru_edge)
610438fd1498Szrj     {
610538fd1498Szrj       if (epilogue_seq)
610638fd1498Szrj 	{
610738fd1498Szrj 	  insert_insn_on_edge (epilogue_seq, exit_fallthru_edge);
610838fd1498Szrj 	  commit_edge_insertions ();
610938fd1498Szrj 
611038fd1498Szrj 	  /* The epilogue insns we inserted may cause the exit edge to no longer
611138fd1498Szrj 	     be fallthru.  */
611238fd1498Szrj 	  FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
611338fd1498Szrj 	    {
611438fd1498Szrj 	      if (((e->flags & EDGE_FALLTHRU) != 0)
611538fd1498Szrj 		  && returnjump_p (BB_END (e->src)))
611638fd1498Szrj 		e->flags &= ~EDGE_FALLTHRU;
611738fd1498Szrj 	    }
611838fd1498Szrj 	}
611938fd1498Szrj       else if (next_active_insn (BB_END (exit_fallthru_edge->src)))
612038fd1498Szrj 	{
612138fd1498Szrj 	  /* We have a fall-through edge to the exit block, the source is not
612238fd1498Szrj 	     at the end of the function, and there will be an assembler epilogue
612338fd1498Szrj 	     at the end of the function.
612438fd1498Szrj 	     We can't use force_nonfallthru here, because that would try to
612538fd1498Szrj 	     use return.  Inserting a jump 'by hand' is extremely messy, so
612638fd1498Szrj 	     we take advantage of cfg_layout_finalize using
612738fd1498Szrj 	     fixup_fallthru_exit_predecessor.  */
612838fd1498Szrj 	  cfg_layout_initialize (0);
612938fd1498Szrj 	  basic_block cur_bb;
613038fd1498Szrj 	  FOR_EACH_BB_FN (cur_bb, cfun)
613138fd1498Szrj 	    if (cur_bb->index >= NUM_FIXED_BLOCKS
613238fd1498Szrj 		&& cur_bb->next_bb->index >= NUM_FIXED_BLOCKS)
613338fd1498Szrj 	      cur_bb->aux = cur_bb->next_bb;
613438fd1498Szrj 	  cfg_layout_finalize ();
613538fd1498Szrj 	}
613638fd1498Szrj     }
613738fd1498Szrj 
613838fd1498Szrj   /* Insert the prologue.  */
613938fd1498Szrj 
614038fd1498Szrj   rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (cfun));
614138fd1498Szrj 
614238fd1498Szrj   if (split_prologue_seq || prologue_seq)
614338fd1498Szrj     {
614438fd1498Szrj       rtx_insn *split_prologue_insn = split_prologue_seq;
614538fd1498Szrj       if (split_prologue_seq)
614638fd1498Szrj 	{
614738fd1498Szrj 	  while (split_prologue_insn && !NONDEBUG_INSN_P (split_prologue_insn))
614838fd1498Szrj 	    split_prologue_insn = NEXT_INSN (split_prologue_insn);
614938fd1498Szrj 	  insert_insn_on_edge (split_prologue_seq, orig_entry_edge);
615038fd1498Szrj 	}
615138fd1498Szrj 
615238fd1498Szrj       rtx_insn *prologue_insn = prologue_seq;
615338fd1498Szrj       if (prologue_seq)
615438fd1498Szrj 	{
615538fd1498Szrj 	  while (prologue_insn && !NONDEBUG_INSN_P (prologue_insn))
615638fd1498Szrj 	    prologue_insn = NEXT_INSN (prologue_insn);
615738fd1498Szrj 	  insert_insn_on_edge (prologue_seq, entry_edge);
615838fd1498Szrj 	}
615938fd1498Szrj 
616038fd1498Szrj       commit_edge_insertions ();
616138fd1498Szrj 
616238fd1498Szrj       /* Look for basic blocks within the prologue insns.  */
616338fd1498Szrj       if (split_prologue_insn
616438fd1498Szrj 	  && BLOCK_FOR_INSN (split_prologue_insn) == NULL)
616538fd1498Szrj 	split_prologue_insn = NULL;
616638fd1498Szrj       if (prologue_insn
616738fd1498Szrj 	  && BLOCK_FOR_INSN (prologue_insn) == NULL)
616838fd1498Szrj 	prologue_insn = NULL;
616938fd1498Szrj       if (split_prologue_insn || prologue_insn)
617038fd1498Szrj 	{
617138fd1498Szrj 	  auto_sbitmap blocks (last_basic_block_for_fn (cfun));
617238fd1498Szrj 	  bitmap_clear (blocks);
617338fd1498Szrj 	  if (split_prologue_insn)
617438fd1498Szrj 	    bitmap_set_bit (blocks,
617538fd1498Szrj 			    BLOCK_FOR_INSN (split_prologue_insn)->index);
617638fd1498Szrj 	  if (prologue_insn)
617738fd1498Szrj 	    bitmap_set_bit (blocks, BLOCK_FOR_INSN (prologue_insn)->index);
617838fd1498Szrj 	  find_many_sub_basic_blocks (blocks);
617938fd1498Szrj 	}
618038fd1498Szrj     }
618138fd1498Szrj 
618238fd1498Szrj   default_rtl_profile ();
618338fd1498Szrj 
618438fd1498Szrj   /* Emit sibling epilogues before any sibling call sites.  */
618538fd1498Szrj   for (ei = ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds);
618638fd1498Szrj        (e = ei_safe_edge (ei));
618738fd1498Szrj        ei_next (&ei))
618838fd1498Szrj     {
618938fd1498Szrj       /* Skip those already handled, the ones that run without prologue.  */
619038fd1498Szrj       if (e->flags & EDGE_IGNORE)
619138fd1498Szrj 	{
619238fd1498Szrj 	  e->flags &= ~EDGE_IGNORE;
619338fd1498Szrj 	  continue;
619438fd1498Szrj 	}
619538fd1498Szrj 
619638fd1498Szrj       rtx_insn *insn = BB_END (e->src);
619738fd1498Szrj 
619838fd1498Szrj       if (!(CALL_P (insn) && SIBLING_CALL_P (insn)))
619938fd1498Szrj 	continue;
620038fd1498Szrj 
620138fd1498Szrj       if (rtx_insn *ep_seq = targetm.gen_sibcall_epilogue ())
620238fd1498Szrj 	{
620338fd1498Szrj 	  start_sequence ();
620438fd1498Szrj 	  emit_note (NOTE_INSN_EPILOGUE_BEG);
620538fd1498Szrj 	  emit_insn (ep_seq);
620638fd1498Szrj 	  rtx_insn *seq = get_insns ();
620738fd1498Szrj 	  end_sequence ();
620838fd1498Szrj 
620938fd1498Szrj 	  /* Retain a map of the epilogue insns.  Used in life analysis to
621038fd1498Szrj 	     avoid getting rid of sibcall epilogue insns.  Do this before we
621138fd1498Szrj 	     actually emit the sequence.  */
621238fd1498Szrj 	  record_insns (seq, NULL, &epilogue_insn_hash);
621338fd1498Szrj 	  set_insn_locations (seq, epilogue_location);
621438fd1498Szrj 
621538fd1498Szrj 	  emit_insn_before (seq, insn);
621638fd1498Szrj 	}
621738fd1498Szrj     }
621838fd1498Szrj 
621938fd1498Szrj   if (epilogue_seq)
622038fd1498Szrj     {
622138fd1498Szrj       rtx_insn *insn, *next;
622238fd1498Szrj 
622338fd1498Szrj       /* Similarly, move any line notes that appear after the epilogue.
622438fd1498Szrj          There is no need, however, to be quite so anal about the existence
622538fd1498Szrj 	 of such a note.  Also possibly move
622638fd1498Szrj 	 NOTE_INSN_FUNCTION_BEG notes, as those can be relevant for debug
622738fd1498Szrj 	 info generation.  */
622838fd1498Szrj       for (insn = epilogue_seq; insn; insn = next)
622938fd1498Szrj 	{
623038fd1498Szrj 	  next = NEXT_INSN (insn);
623138fd1498Szrj 	  if (NOTE_P (insn)
623238fd1498Szrj 	      && (NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG))
623338fd1498Szrj 	    reorder_insns (insn, insn, PREV_INSN (epilogue_seq));
623438fd1498Szrj 	}
623538fd1498Szrj     }
623638fd1498Szrj 
623738fd1498Szrj   /* Threading the prologue and epilogue changes the artificial refs
623838fd1498Szrj      in the entry and exit blocks.  */
623938fd1498Szrj   epilogue_completed = 1;
624038fd1498Szrj   df_update_entry_exit_and_calls ();
624138fd1498Szrj }
624238fd1498Szrj 
624338fd1498Szrj /* Reposition the prologue-end and epilogue-begin notes after
624438fd1498Szrj    instruction scheduling.  */
624538fd1498Szrj 
624638fd1498Szrj void
reposition_prologue_and_epilogue_notes(void)624738fd1498Szrj reposition_prologue_and_epilogue_notes (void)
624838fd1498Szrj {
624938fd1498Szrj   if (!targetm.have_prologue ()
625038fd1498Szrj       && !targetm.have_epilogue ()
625138fd1498Szrj       && !targetm.have_sibcall_epilogue ())
625238fd1498Szrj     return;
625338fd1498Szrj 
625438fd1498Szrj   /* Since the hash table is created on demand, the fact that it is
625538fd1498Szrj      non-null is a signal that it is non-empty.  */
625638fd1498Szrj   if (prologue_insn_hash != NULL)
625738fd1498Szrj     {
625838fd1498Szrj       size_t len = prologue_insn_hash->elements ();
625938fd1498Szrj       rtx_insn *insn, *last = NULL, *note = NULL;
626038fd1498Szrj 
626138fd1498Szrj       /* Scan from the beginning until we reach the last prologue insn.  */
626238fd1498Szrj       /* ??? While we do have the CFG intact, there are two problems:
626338fd1498Szrj 	 (1) The prologue can contain loops (typically probing the stack),
626438fd1498Szrj 	     which means that the end of the prologue isn't in the first bb.
626538fd1498Szrj 	 (2) Sometimes the PROLOGUE_END note gets pushed into the next bb.  */
626638fd1498Szrj       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
626738fd1498Szrj 	{
626838fd1498Szrj 	  if (NOTE_P (insn))
626938fd1498Szrj 	    {
627038fd1498Szrj 	      if (NOTE_KIND (insn) == NOTE_INSN_PROLOGUE_END)
627138fd1498Szrj 		note = insn;
627238fd1498Szrj 	    }
627338fd1498Szrj 	  else if (contains (insn, prologue_insn_hash))
627438fd1498Szrj 	    {
627538fd1498Szrj 	      last = insn;
627638fd1498Szrj 	      if (--len == 0)
627738fd1498Szrj 		break;
627838fd1498Szrj 	    }
627938fd1498Szrj 	}
628038fd1498Szrj 
628138fd1498Szrj       if (last)
628238fd1498Szrj 	{
628338fd1498Szrj 	  if (note == NULL)
628438fd1498Szrj 	    {
628538fd1498Szrj 	      /* Scan forward looking for the PROLOGUE_END note.  It should
628638fd1498Szrj 		 be right at the beginning of the block, possibly with other
628738fd1498Szrj 		 insn notes that got moved there.  */
628838fd1498Szrj 	      for (note = NEXT_INSN (last); ; note = NEXT_INSN (note))
628938fd1498Szrj 		{
629038fd1498Szrj 		  if (NOTE_P (note)
629138fd1498Szrj 		      && NOTE_KIND (note) == NOTE_INSN_PROLOGUE_END)
629238fd1498Szrj 		    break;
629338fd1498Szrj 		}
629438fd1498Szrj 	    }
629538fd1498Szrj 
629638fd1498Szrj 	  /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note.  */
629738fd1498Szrj 	  if (LABEL_P (last))
629838fd1498Szrj 	    last = NEXT_INSN (last);
629938fd1498Szrj 	  reorder_insns (note, note, last);
630038fd1498Szrj 	}
630138fd1498Szrj     }
630238fd1498Szrj 
630338fd1498Szrj   if (epilogue_insn_hash != NULL)
630438fd1498Szrj     {
630538fd1498Szrj       edge_iterator ei;
630638fd1498Szrj       edge e;
630738fd1498Szrj 
630838fd1498Szrj       FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
630938fd1498Szrj 	{
631038fd1498Szrj 	  rtx_insn *insn, *first = NULL, *note = NULL;
631138fd1498Szrj 	  basic_block bb = e->src;
631238fd1498Szrj 
631338fd1498Szrj 	  /* Scan from the beginning until we reach the first epilogue insn. */
631438fd1498Szrj 	  FOR_BB_INSNS (bb, insn)
631538fd1498Szrj 	    {
631638fd1498Szrj 	      if (NOTE_P (insn))
631738fd1498Szrj 		{
631838fd1498Szrj 		  if (NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
631938fd1498Szrj 		    {
632038fd1498Szrj 		      note = insn;
632138fd1498Szrj 		      if (first != NULL)
632238fd1498Szrj 			break;
632338fd1498Szrj 		    }
632438fd1498Szrj 		}
632538fd1498Szrj 	      else if (first == NULL && contains (insn, epilogue_insn_hash))
632638fd1498Szrj 		{
632738fd1498Szrj 		  first = insn;
632838fd1498Szrj 		  if (note != NULL)
632938fd1498Szrj 		    break;
633038fd1498Szrj 		}
633138fd1498Szrj 	    }
633238fd1498Szrj 
633338fd1498Szrj 	  if (note)
633438fd1498Szrj 	    {
633538fd1498Szrj 	      /* If the function has a single basic block, and no real
633638fd1498Szrj 		 epilogue insns (e.g. sibcall with no cleanup), the
633738fd1498Szrj 		 epilogue note can get scheduled before the prologue
633838fd1498Szrj 		 note.  If we have frame related prologue insns, having
633938fd1498Szrj 		 them scanned during the epilogue will result in a crash.
634038fd1498Szrj 		 In this case re-order the epilogue note to just before
634138fd1498Szrj 		 the last insn in the block.  */
634238fd1498Szrj 	      if (first == NULL)
634338fd1498Szrj 		first = BB_END (bb);
634438fd1498Szrj 
634538fd1498Szrj 	      if (PREV_INSN (first) != note)
634638fd1498Szrj 		reorder_insns (note, note, PREV_INSN (first));
634738fd1498Szrj 	    }
634838fd1498Szrj 	}
634938fd1498Szrj     }
635038fd1498Szrj }
635138fd1498Szrj 
635238fd1498Szrj /* Returns the name of function declared by FNDECL.  */
635338fd1498Szrj const char *
fndecl_name(tree fndecl)635438fd1498Szrj fndecl_name (tree fndecl)
635538fd1498Szrj {
635638fd1498Szrj   if (fndecl == NULL)
635738fd1498Szrj     return "(nofn)";
635838fd1498Szrj   return lang_hooks.decl_printable_name (fndecl, 1);
635938fd1498Szrj }
636038fd1498Szrj 
636138fd1498Szrj /* Returns the name of function FN.  */
636238fd1498Szrj const char *
function_name(struct function * fn)636338fd1498Szrj function_name (struct function *fn)
636438fd1498Szrj {
636538fd1498Szrj   tree fndecl = (fn == NULL) ? NULL : fn->decl;
636638fd1498Szrj   return fndecl_name (fndecl);
636738fd1498Szrj }
636838fd1498Szrj 
636938fd1498Szrj /* Returns the name of the current function.  */
637038fd1498Szrj const char *
current_function_name(void)637138fd1498Szrj current_function_name (void)
637238fd1498Szrj {
637338fd1498Szrj   return function_name (cfun);
637438fd1498Szrj }
637538fd1498Szrj 
637638fd1498Szrj 
637738fd1498Szrj static unsigned int
rest_of_handle_check_leaf_regs(void)637838fd1498Szrj rest_of_handle_check_leaf_regs (void)
637938fd1498Szrj {
638038fd1498Szrj #ifdef LEAF_REGISTERS
638138fd1498Szrj   crtl->uses_only_leaf_regs
638238fd1498Szrj     = optimize > 0 && only_leaf_regs_used () && leaf_function_p ();
638338fd1498Szrj #endif
638438fd1498Szrj   return 0;
638538fd1498Szrj }
638638fd1498Szrj 
638738fd1498Szrj /* Insert a TYPE into the used types hash table of CFUN.  */
638838fd1498Szrj 
638938fd1498Szrj static void
used_types_insert_helper(tree type,struct function * func)639038fd1498Szrj used_types_insert_helper (tree type, struct function *func)
639138fd1498Szrj {
639238fd1498Szrj   if (type != NULL && func != NULL)
639338fd1498Szrj     {
639438fd1498Szrj       if (func->used_types_hash == NULL)
639538fd1498Szrj 	func->used_types_hash = hash_set<tree>::create_ggc (37);
639638fd1498Szrj 
639738fd1498Szrj       func->used_types_hash->add (type);
639838fd1498Szrj     }
639938fd1498Szrj }
640038fd1498Szrj 
640138fd1498Szrj /* Given a type, insert it into the used hash table in cfun.  */
640238fd1498Szrj void
used_types_insert(tree t)640338fd1498Szrj used_types_insert (tree t)
640438fd1498Szrj {
640538fd1498Szrj   while (POINTER_TYPE_P (t) || TREE_CODE (t) == ARRAY_TYPE)
640638fd1498Szrj     if (TYPE_NAME (t))
640738fd1498Szrj       break;
640838fd1498Szrj     else
640938fd1498Szrj       t = TREE_TYPE (t);
641038fd1498Szrj   if (TREE_CODE (t) == ERROR_MARK)
641138fd1498Szrj     return;
641238fd1498Szrj   if (TYPE_NAME (t) == NULL_TREE
641338fd1498Szrj       || TYPE_NAME (t) == TYPE_NAME (TYPE_MAIN_VARIANT (t)))
641438fd1498Szrj     t = TYPE_MAIN_VARIANT (t);
641538fd1498Szrj   if (debug_info_level > DINFO_LEVEL_NONE)
641638fd1498Szrj     {
641738fd1498Szrj       if (cfun)
641838fd1498Szrj 	used_types_insert_helper (t, cfun);
641938fd1498Szrj       else
642038fd1498Szrj 	{
642138fd1498Szrj 	  /* So this might be a type referenced by a global variable.
642238fd1498Szrj 	     Record that type so that we can later decide to emit its
642338fd1498Szrj 	     debug information.  */
642438fd1498Szrj 	  vec_safe_push (types_used_by_cur_var_decl, t);
642538fd1498Szrj 	}
642638fd1498Szrj     }
642738fd1498Szrj }
642838fd1498Szrj 
642938fd1498Szrj /* Helper to Hash a struct types_used_by_vars_entry.  */
643038fd1498Szrj 
643138fd1498Szrj static hashval_t
hash_types_used_by_vars_entry(const struct types_used_by_vars_entry * entry)643238fd1498Szrj hash_types_used_by_vars_entry (const struct types_used_by_vars_entry *entry)
643338fd1498Szrj {
643438fd1498Szrj   gcc_assert (entry && entry->var_decl && entry->type);
643538fd1498Szrj 
643638fd1498Szrj   return iterative_hash_object (entry->type,
643738fd1498Szrj 				iterative_hash_object (entry->var_decl, 0));
643838fd1498Szrj }
643938fd1498Szrj 
644038fd1498Szrj /* Hash function of the types_used_by_vars_entry hash table.  */
644138fd1498Szrj 
644238fd1498Szrj hashval_t
hash(types_used_by_vars_entry * entry)644338fd1498Szrj used_type_hasher::hash (types_used_by_vars_entry *entry)
644438fd1498Szrj {
644538fd1498Szrj   return hash_types_used_by_vars_entry (entry);
644638fd1498Szrj }
644738fd1498Szrj 
644838fd1498Szrj /*Equality function of the types_used_by_vars_entry hash table.  */
644938fd1498Szrj 
645038fd1498Szrj bool
equal(types_used_by_vars_entry * e1,types_used_by_vars_entry * e2)645138fd1498Szrj used_type_hasher::equal (types_used_by_vars_entry *e1,
645238fd1498Szrj 			 types_used_by_vars_entry *e2)
645338fd1498Szrj {
645438fd1498Szrj   return (e1->var_decl == e2->var_decl && e1->type == e2->type);
645538fd1498Szrj }
645638fd1498Szrj 
645738fd1498Szrj /* Inserts an entry into the types_used_by_vars_hash hash table. */
645838fd1498Szrj 
645938fd1498Szrj void
types_used_by_var_decl_insert(tree type,tree var_decl)646038fd1498Szrj types_used_by_var_decl_insert (tree type, tree var_decl)
646138fd1498Szrj {
646238fd1498Szrj   if (type != NULL && var_decl != NULL)
646338fd1498Szrj     {
646438fd1498Szrj       types_used_by_vars_entry **slot;
646538fd1498Szrj       struct types_used_by_vars_entry e;
646638fd1498Szrj       e.var_decl = var_decl;
646738fd1498Szrj       e.type = type;
646838fd1498Szrj       if (types_used_by_vars_hash == NULL)
646938fd1498Szrj 	types_used_by_vars_hash
647038fd1498Szrj 	  = hash_table<used_type_hasher>::create_ggc (37);
647138fd1498Szrj 
647238fd1498Szrj       slot = types_used_by_vars_hash->find_slot (&e, INSERT);
647338fd1498Szrj       if (*slot == NULL)
647438fd1498Szrj 	{
647538fd1498Szrj 	  struct types_used_by_vars_entry *entry;
647638fd1498Szrj 	  entry = ggc_alloc<types_used_by_vars_entry> ();
647738fd1498Szrj 	  entry->type = type;
647838fd1498Szrj 	  entry->var_decl = var_decl;
647938fd1498Szrj 	  *slot = entry;
648038fd1498Szrj 	}
648138fd1498Szrj     }
648238fd1498Szrj }
648338fd1498Szrj 
648438fd1498Szrj namespace {
648538fd1498Szrj 
648638fd1498Szrj const pass_data pass_data_leaf_regs =
648738fd1498Szrj {
648838fd1498Szrj   RTL_PASS, /* type */
648938fd1498Szrj   "*leaf_regs", /* name */
649038fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
649138fd1498Szrj   TV_NONE, /* tv_id */
649238fd1498Szrj   0, /* properties_required */
649338fd1498Szrj   0, /* properties_provided */
649438fd1498Szrj   0, /* properties_destroyed */
649538fd1498Szrj   0, /* todo_flags_start */
649638fd1498Szrj   0, /* todo_flags_finish */
649738fd1498Szrj };
649838fd1498Szrj 
649938fd1498Szrj class pass_leaf_regs : public rtl_opt_pass
650038fd1498Szrj {
650138fd1498Szrj public:
pass_leaf_regs(gcc::context * ctxt)650238fd1498Szrj   pass_leaf_regs (gcc::context *ctxt)
650338fd1498Szrj     : rtl_opt_pass (pass_data_leaf_regs, ctxt)
650438fd1498Szrj   {}
650538fd1498Szrj 
650638fd1498Szrj   /* opt_pass methods: */
execute(function *)650738fd1498Szrj   virtual unsigned int execute (function *)
650838fd1498Szrj     {
650938fd1498Szrj       return rest_of_handle_check_leaf_regs ();
651038fd1498Szrj     }
651138fd1498Szrj 
651238fd1498Szrj }; // class pass_leaf_regs
651338fd1498Szrj 
651438fd1498Szrj } // anon namespace
651538fd1498Szrj 
651638fd1498Szrj rtl_opt_pass *
make_pass_leaf_regs(gcc::context * ctxt)651738fd1498Szrj make_pass_leaf_regs (gcc::context *ctxt)
651838fd1498Szrj {
651938fd1498Szrj   return new pass_leaf_regs (ctxt);
652038fd1498Szrj }
652138fd1498Szrj 
652238fd1498Szrj static unsigned int
rest_of_handle_thread_prologue_and_epilogue(void)652338fd1498Szrj rest_of_handle_thread_prologue_and_epilogue (void)
652438fd1498Szrj {
652538fd1498Szrj   /* prepare_shrink_wrap is sensitive to the block structure of the control
652638fd1498Szrj      flow graph, so clean it up first.  */
652738fd1498Szrj   if (optimize)
652838fd1498Szrj     cleanup_cfg (0);
652938fd1498Szrj 
653038fd1498Szrj   /* On some machines, the prologue and epilogue code, or parts thereof,
653138fd1498Szrj      can be represented as RTL.  Doing so lets us schedule insns between
653238fd1498Szrj      it and the rest of the code and also allows delayed branch
653338fd1498Szrj      scheduling to operate in the epilogue.  */
653438fd1498Szrj   thread_prologue_and_epilogue_insns ();
653538fd1498Szrj 
653638fd1498Szrj   /* Some non-cold blocks may now be only reachable from cold blocks.
653738fd1498Szrj      Fix that up.  */
653838fd1498Szrj   fixup_partitions ();
653938fd1498Szrj 
654038fd1498Szrj   /* Shrink-wrapping can result in unreachable edges in the epilogue,
654138fd1498Szrj      see PR57320.  */
654238fd1498Szrj   cleanup_cfg (optimize ? CLEANUP_EXPENSIVE : 0);
654338fd1498Szrj 
654438fd1498Szrj   /* The stack usage info is finalized during prologue expansion.  */
654538fd1498Szrj   if (flag_stack_usage_info)
654638fd1498Szrj     output_stack_usage ();
654738fd1498Szrj 
654838fd1498Szrj   return 0;
654938fd1498Szrj }
655038fd1498Szrj 
655138fd1498Szrj namespace {
655238fd1498Szrj 
655338fd1498Szrj const pass_data pass_data_thread_prologue_and_epilogue =
655438fd1498Szrj {
655538fd1498Szrj   RTL_PASS, /* type */
655638fd1498Szrj   "pro_and_epilogue", /* name */
655738fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
655838fd1498Szrj   TV_THREAD_PROLOGUE_AND_EPILOGUE, /* tv_id */
655938fd1498Szrj   0, /* properties_required */
656038fd1498Szrj   0, /* properties_provided */
656138fd1498Szrj   0, /* properties_destroyed */
656238fd1498Szrj   0, /* todo_flags_start */
656338fd1498Szrj   ( TODO_df_verify | TODO_df_finish ), /* todo_flags_finish */
656438fd1498Szrj };
656538fd1498Szrj 
656638fd1498Szrj class pass_thread_prologue_and_epilogue : public rtl_opt_pass
656738fd1498Szrj {
656838fd1498Szrj public:
pass_thread_prologue_and_epilogue(gcc::context * ctxt)656938fd1498Szrj   pass_thread_prologue_and_epilogue (gcc::context *ctxt)
657038fd1498Szrj     : rtl_opt_pass (pass_data_thread_prologue_and_epilogue, ctxt)
657138fd1498Szrj   {}
657238fd1498Szrj 
657338fd1498Szrj   /* opt_pass methods: */
execute(function *)657438fd1498Szrj   virtual unsigned int execute (function *)
657538fd1498Szrj     {
657638fd1498Szrj       return rest_of_handle_thread_prologue_and_epilogue ();
657738fd1498Szrj     }
657838fd1498Szrj 
657938fd1498Szrj }; // class pass_thread_prologue_and_epilogue
658038fd1498Szrj 
658138fd1498Szrj } // anon namespace
658238fd1498Szrj 
658338fd1498Szrj rtl_opt_pass *
make_pass_thread_prologue_and_epilogue(gcc::context * ctxt)658438fd1498Szrj make_pass_thread_prologue_and_epilogue (gcc::context *ctxt)
658538fd1498Szrj {
658638fd1498Szrj   return new pass_thread_prologue_and_epilogue (ctxt);
658738fd1498Szrj }
658838fd1498Szrj 
658938fd1498Szrj 
659038fd1498Szrj /* This mini-pass fixes fall-out from SSA in asm statements that have
659138fd1498Szrj    in-out constraints.  Say you start with
659238fd1498Szrj 
659338fd1498Szrj      orig = inout;
659438fd1498Szrj      asm ("": "+mr" (inout));
659538fd1498Szrj      use (orig);
659638fd1498Szrj 
659738fd1498Szrj    which is transformed very early to use explicit output and match operands:
659838fd1498Szrj 
659938fd1498Szrj      orig = inout;
660038fd1498Szrj      asm ("": "=mr" (inout) : "0" (inout));
660138fd1498Szrj      use (orig);
660238fd1498Szrj 
660338fd1498Szrj    Or, after SSA and copyprop,
660438fd1498Szrj 
660538fd1498Szrj      asm ("": "=mr" (inout_2) : "0" (inout_1));
660638fd1498Szrj      use (inout_1);
660738fd1498Szrj 
660838fd1498Szrj    Clearly inout_2 and inout_1 can't be coalesced easily anymore, as
660938fd1498Szrj    they represent two separate values, so they will get different pseudo
661038fd1498Szrj    registers during expansion.  Then, since the two operands need to match
661138fd1498Szrj    per the constraints, but use different pseudo registers, reload can
661238fd1498Szrj    only register a reload for these operands.  But reloads can only be
661338fd1498Szrj    satisfied by hardregs, not by memory, so we need a register for this
661438fd1498Szrj    reload, just because we are presented with non-matching operands.
661538fd1498Szrj    So, even though we allow memory for this operand, no memory can be
661638fd1498Szrj    used for it, just because the two operands don't match.  This can
661738fd1498Szrj    cause reload failures on register-starved targets.
661838fd1498Szrj 
661938fd1498Szrj    So it's a symptom of reload not being able to use memory for reloads
662038fd1498Szrj    or, alternatively it's also a symptom of both operands not coming into
662138fd1498Szrj    reload as matching (in which case the pseudo could go to memory just
662238fd1498Szrj    fine, as the alternative allows it, and no reload would be necessary).
662338fd1498Szrj    We fix the latter problem here, by transforming
662438fd1498Szrj 
662538fd1498Szrj      asm ("": "=mr" (inout_2) : "0" (inout_1));
662638fd1498Szrj 
662738fd1498Szrj    back to
662838fd1498Szrj 
662938fd1498Szrj      inout_2 = inout_1;
663038fd1498Szrj      asm ("": "=mr" (inout_2) : "0" (inout_2));  */
663138fd1498Szrj 
663238fd1498Szrj static void
match_asm_constraints_1(rtx_insn * insn,rtx * p_sets,int noutputs)663338fd1498Szrj match_asm_constraints_1 (rtx_insn *insn, rtx *p_sets, int noutputs)
663438fd1498Szrj {
663538fd1498Szrj   int i;
663638fd1498Szrj   bool changed = false;
663738fd1498Szrj   rtx op = SET_SRC (p_sets[0]);
663838fd1498Szrj   int ninputs = ASM_OPERANDS_INPUT_LENGTH (op);
663938fd1498Szrj   rtvec inputs = ASM_OPERANDS_INPUT_VEC (op);
664038fd1498Szrj   bool *output_matched = XALLOCAVEC (bool, noutputs);
664138fd1498Szrj 
664238fd1498Szrj   memset (output_matched, 0, noutputs * sizeof (bool));
664338fd1498Szrj   for (i = 0; i < ninputs; i++)
664438fd1498Szrj     {
664538fd1498Szrj       rtx input, output;
664638fd1498Szrj       rtx_insn *insns;
664738fd1498Szrj       const char *constraint = ASM_OPERANDS_INPUT_CONSTRAINT (op, i);
664838fd1498Szrj       char *end;
664938fd1498Szrj       int match, j;
665038fd1498Szrj 
665138fd1498Szrj       if (*constraint == '%')
665238fd1498Szrj 	constraint++;
665338fd1498Szrj 
665438fd1498Szrj       match = strtoul (constraint, &end, 10);
665538fd1498Szrj       if (end == constraint)
665638fd1498Szrj 	continue;
665738fd1498Szrj 
665838fd1498Szrj       gcc_assert (match < noutputs);
665938fd1498Szrj       output = SET_DEST (p_sets[match]);
666038fd1498Szrj       input = RTVEC_ELT (inputs, i);
666138fd1498Szrj       /* Only do the transformation for pseudos.  */
666238fd1498Szrj       if (! REG_P (output)
666338fd1498Szrj 	  || rtx_equal_p (output, input)
666438fd1498Szrj 	  || !(REG_P (input) || SUBREG_P (input)
666538fd1498Szrj 	       || MEM_P (input) || CONSTANT_P (input))
666638fd1498Szrj 	  || !general_operand (input, GET_MODE (output)))
666738fd1498Szrj 	continue;
666838fd1498Szrj 
666938fd1498Szrj       /* We can't do anything if the output is also used as input,
667038fd1498Szrj 	 as we're going to overwrite it.  */
667138fd1498Szrj       for (j = 0; j < ninputs; j++)
667238fd1498Szrj         if (reg_overlap_mentioned_p (output, RTVEC_ELT (inputs, j)))
667338fd1498Szrj 	  break;
667438fd1498Szrj       if (j != ninputs)
667538fd1498Szrj 	continue;
667638fd1498Szrj 
667738fd1498Szrj       /* Avoid changing the same input several times.  For
667838fd1498Szrj 	 asm ("" : "=mr" (out1), "=mr" (out2) : "0" (in), "1" (in));
667938fd1498Szrj 	 only change in once (to out1), rather than changing it
668038fd1498Szrj 	 first to out1 and afterwards to out2.  */
668138fd1498Szrj       if (i > 0)
668238fd1498Szrj 	{
668338fd1498Szrj 	  for (j = 0; j < noutputs; j++)
668438fd1498Szrj 	    if (output_matched[j] && input == SET_DEST (p_sets[j]))
668538fd1498Szrj 	      break;
668638fd1498Szrj 	  if (j != noutputs)
668738fd1498Szrj 	    continue;
668838fd1498Szrj 	}
668938fd1498Szrj       output_matched[match] = true;
669038fd1498Szrj 
669138fd1498Szrj       start_sequence ();
6692*58e805e6Szrj       emit_move_insn (output, copy_rtx (input));
669338fd1498Szrj       insns = get_insns ();
669438fd1498Szrj       end_sequence ();
669538fd1498Szrj       emit_insn_before (insns, insn);
669638fd1498Szrj 
669738fd1498Szrj       /* Now replace all mentions of the input with output.  We can't
669838fd1498Szrj 	 just replace the occurrence in inputs[i], as the register might
669938fd1498Szrj 	 also be used in some other input (or even in an address of an
670038fd1498Szrj 	 output), which would mean possibly increasing the number of
670138fd1498Szrj 	 inputs by one (namely 'output' in addition), which might pose
670238fd1498Szrj 	 a too complicated problem for reload to solve.  E.g. this situation:
670338fd1498Szrj 
670438fd1498Szrj 	   asm ("" : "=r" (output), "=m" (input) : "0" (input))
670538fd1498Szrj 
670638fd1498Szrj 	 Here 'input' is used in two occurrences as input (once for the
670738fd1498Szrj 	 input operand, once for the address in the second output operand).
670838fd1498Szrj 	 If we would replace only the occurrence of the input operand (to
670938fd1498Szrj 	 make the matching) we would be left with this:
671038fd1498Szrj 
671138fd1498Szrj 	   output = input
671238fd1498Szrj 	   asm ("" : "=r" (output), "=m" (input) : "0" (output))
671338fd1498Szrj 
671438fd1498Szrj 	 Now we suddenly have two different input values (containing the same
671538fd1498Szrj 	 value, but different pseudos) where we formerly had only one.
671638fd1498Szrj 	 With more complicated asms this might lead to reload failures
671738fd1498Szrj 	 which wouldn't have happen without this pass.  So, iterate over
671838fd1498Szrj 	 all operands and replace all occurrences of the register used.  */
671938fd1498Szrj       for (j = 0; j < noutputs; j++)
672038fd1498Szrj 	if (!rtx_equal_p (SET_DEST (p_sets[j]), input)
672138fd1498Szrj 	    && reg_overlap_mentioned_p (input, SET_DEST (p_sets[j])))
672238fd1498Szrj 	  SET_DEST (p_sets[j]) = replace_rtx (SET_DEST (p_sets[j]),
672338fd1498Szrj 					      input, output);
672438fd1498Szrj       for (j = 0; j < ninputs; j++)
672538fd1498Szrj 	if (reg_overlap_mentioned_p (input, RTVEC_ELT (inputs, j)))
672638fd1498Szrj 	  RTVEC_ELT (inputs, j) = replace_rtx (RTVEC_ELT (inputs, j),
672738fd1498Szrj 					       input, output);
672838fd1498Szrj 
672938fd1498Szrj       changed = true;
673038fd1498Szrj     }
673138fd1498Szrj 
673238fd1498Szrj   if (changed)
673338fd1498Szrj     df_insn_rescan (insn);
673438fd1498Szrj }
673538fd1498Szrj 
673638fd1498Szrj /* Add the decl D to the local_decls list of FUN.  */
673738fd1498Szrj 
673838fd1498Szrj void
add_local_decl(struct function * fun,tree d)673938fd1498Szrj add_local_decl (struct function *fun, tree d)
674038fd1498Szrj {
674138fd1498Szrj   gcc_assert (VAR_P (d));
674238fd1498Szrj   vec_safe_push (fun->local_decls, d);
674338fd1498Szrj }
674438fd1498Szrj 
674538fd1498Szrj namespace {
674638fd1498Szrj 
674738fd1498Szrj const pass_data pass_data_match_asm_constraints =
674838fd1498Szrj {
674938fd1498Szrj   RTL_PASS, /* type */
675038fd1498Szrj   "asmcons", /* name */
675138fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
675238fd1498Szrj   TV_NONE, /* tv_id */
675338fd1498Szrj   0, /* properties_required */
675438fd1498Szrj   0, /* properties_provided */
675538fd1498Szrj   0, /* properties_destroyed */
675638fd1498Szrj   0, /* todo_flags_start */
675738fd1498Szrj   0, /* todo_flags_finish */
675838fd1498Szrj };
675938fd1498Szrj 
676038fd1498Szrj class pass_match_asm_constraints : public rtl_opt_pass
676138fd1498Szrj {
676238fd1498Szrj public:
pass_match_asm_constraints(gcc::context * ctxt)676338fd1498Szrj   pass_match_asm_constraints (gcc::context *ctxt)
676438fd1498Szrj     : rtl_opt_pass (pass_data_match_asm_constraints, ctxt)
676538fd1498Szrj   {}
676638fd1498Szrj 
676738fd1498Szrj   /* opt_pass methods: */
676838fd1498Szrj   virtual unsigned int execute (function *);
676938fd1498Szrj 
677038fd1498Szrj }; // class pass_match_asm_constraints
677138fd1498Szrj 
677238fd1498Szrj unsigned
execute(function * fun)677338fd1498Szrj pass_match_asm_constraints::execute (function *fun)
677438fd1498Szrj {
677538fd1498Szrj   basic_block bb;
677638fd1498Szrj   rtx_insn *insn;
677738fd1498Szrj   rtx pat, *p_sets;
677838fd1498Szrj   int noutputs;
677938fd1498Szrj 
678038fd1498Szrj   if (!crtl->has_asm_statement)
678138fd1498Szrj     return 0;
678238fd1498Szrj 
678338fd1498Szrj   df_set_flags (DF_DEFER_INSN_RESCAN);
678438fd1498Szrj   FOR_EACH_BB_FN (bb, fun)
678538fd1498Szrj     {
678638fd1498Szrj       FOR_BB_INSNS (bb, insn)
678738fd1498Szrj 	{
678838fd1498Szrj 	  if (!INSN_P (insn))
678938fd1498Szrj 	    continue;
679038fd1498Szrj 
679138fd1498Szrj 	  pat = PATTERN (insn);
679238fd1498Szrj 	  if (GET_CODE (pat) == PARALLEL)
679338fd1498Szrj 	    p_sets = &XVECEXP (pat, 0, 0), noutputs = XVECLEN (pat, 0);
679438fd1498Szrj 	  else if (GET_CODE (pat) == SET)
679538fd1498Szrj 	    p_sets = &PATTERN (insn), noutputs = 1;
679638fd1498Szrj 	  else
679738fd1498Szrj 	    continue;
679838fd1498Szrj 
679938fd1498Szrj 	  if (GET_CODE (*p_sets) == SET
680038fd1498Szrj 	      && GET_CODE (SET_SRC (*p_sets)) == ASM_OPERANDS)
680138fd1498Szrj 	    match_asm_constraints_1 (insn, p_sets, noutputs);
680238fd1498Szrj 	 }
680338fd1498Szrj     }
680438fd1498Szrj 
680538fd1498Szrj   return TODO_df_finish;
680638fd1498Szrj }
680738fd1498Szrj 
680838fd1498Szrj } // anon namespace
680938fd1498Szrj 
681038fd1498Szrj rtl_opt_pass *
make_pass_match_asm_constraints(gcc::context * ctxt)681138fd1498Szrj make_pass_match_asm_constraints (gcc::context *ctxt)
681238fd1498Szrj {
681338fd1498Szrj   return new pass_match_asm_constraints (ctxt);
681438fd1498Szrj }
681538fd1498Szrj 
681638fd1498Szrj 
681738fd1498Szrj #include "gt-function.h"
6818