xref: /openbsd/gnu/usr.bin/gcc/gcc/function.c (revision 34a73b73)
1c87b03e5Sespie /* Expands front end tree to back end RTL for GNU C-Compiler
2c87b03e5Sespie    Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997,
3c87b03e5Sespie    1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4c87b03e5Sespie 
5c87b03e5Sespie This file is part of GCC.
6c87b03e5Sespie 
7c87b03e5Sespie GCC is free software; you can redistribute it and/or modify it under
8c87b03e5Sespie the terms of the GNU General Public License as published by the Free
9c87b03e5Sespie Software Foundation; either version 2, or (at your option) any later
10c87b03e5Sespie version.
11c87b03e5Sespie 
12c87b03e5Sespie GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13c87b03e5Sespie WARRANTY; without even the implied warranty of MERCHANTABILITY or
14c87b03e5Sespie FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15c87b03e5Sespie for more details.
16c87b03e5Sespie 
17c87b03e5Sespie You should have received a copy of the GNU General Public License
18c87b03e5Sespie along with GCC; see the file COPYING.  If not, write to the Free
19c87b03e5Sespie Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20c87b03e5Sespie 02111-1307, USA.  */
21c87b03e5Sespie 
22c87b03e5Sespie /* This file handles the generation of rtl code from tree structure
23c87b03e5Sespie    at the level of the function as a whole.
24c87b03e5Sespie    It creates the rtl expressions for parameters and auto variables
25c87b03e5Sespie    and has full responsibility for allocating stack slots.
26c87b03e5Sespie 
27c87b03e5Sespie    `expand_function_start' is called at the beginning of a function,
28c87b03e5Sespie    before the function body is parsed, and `expand_function_end' is
29c87b03e5Sespie    called after parsing the body.
30c87b03e5Sespie 
31c87b03e5Sespie    Call `assign_stack_local' to allocate a stack slot for a local variable.
32c87b03e5Sespie    This is usually done during the RTL generation for the function body,
33c87b03e5Sespie    but it can also be done in the reload pass when a pseudo-register does
34c87b03e5Sespie    not get a hard register.
35c87b03e5Sespie 
36c87b03e5Sespie    Call `put_var_into_stack' when you learn, belatedly, that a variable
37c87b03e5Sespie    previously given a pseudo-register must in fact go in the stack.
38c87b03e5Sespie    This function changes the DECL_RTL to be a stack slot instead of a reg
39c87b03e5Sespie    then scans all the RTL instructions so far generated to correct them.  */
40c87b03e5Sespie 
41c87b03e5Sespie #include "config.h"
42c87b03e5Sespie #include "system.h"
43c87b03e5Sespie #include "rtl.h"
44c87b03e5Sespie #include "tree.h"
45c87b03e5Sespie #include "flags.h"
46c87b03e5Sespie #include "except.h"
47c87b03e5Sespie #include "function.h"
48c87b03e5Sespie #include "expr.h"
49c87b03e5Sespie #include "libfuncs.h"
50c87b03e5Sespie #include "regs.h"
51c87b03e5Sespie #include "hard-reg-set.h"
52c87b03e5Sespie #include "insn-config.h"
53c87b03e5Sespie #include "recog.h"
54c87b03e5Sespie #include "output.h"
55c87b03e5Sespie #include "basic-block.h"
56c87b03e5Sespie #include "toplev.h"
57c87b03e5Sespie #include "hashtab.h"
58c87b03e5Sespie #include "ggc.h"
59c87b03e5Sespie #include "tm_p.h"
60c87b03e5Sespie #include "integrate.h"
61c87b03e5Sespie #include "langhooks.h"
625982dd4aSetoh #include "protector.h"
63c87b03e5Sespie 
64c87b03e5Sespie #ifndef TRAMPOLINE_ALIGNMENT
65c87b03e5Sespie #define TRAMPOLINE_ALIGNMENT FUNCTION_BOUNDARY
66c87b03e5Sespie #endif
67c87b03e5Sespie 
68c87b03e5Sespie #ifndef LOCAL_ALIGNMENT
69c87b03e5Sespie #define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
70c87b03e5Sespie #endif
71c87b03e5Sespie 
72c87b03e5Sespie /* Some systems use __main in a way incompatible with its use in gcc, in these
73c87b03e5Sespie    cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
74c87b03e5Sespie    give the same symbol without quotes for an alternative entry point.  You
75c87b03e5Sespie    must define both, or neither.  */
76c87b03e5Sespie #ifndef NAME__MAIN
77c87b03e5Sespie #define NAME__MAIN "__main"
78c87b03e5Sespie #endif
79c87b03e5Sespie 
80c87b03e5Sespie /* Round a value to the lowest integer less than it that is a multiple of
81c87b03e5Sespie    the required alignment.  Avoid using division in case the value is
82c87b03e5Sespie    negative.  Assume the alignment is a power of two.  */
83c87b03e5Sespie #define FLOOR_ROUND(VALUE,ALIGN) ((VALUE) & ~((ALIGN) - 1))
84c87b03e5Sespie 
85c87b03e5Sespie /* Similar, but round to the next highest integer that meets the
86c87b03e5Sespie    alignment.  */
87c87b03e5Sespie #define CEIL_ROUND(VALUE,ALIGN)	(((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
88c87b03e5Sespie 
89c87b03e5Sespie /* NEED_SEPARATE_AP means that we cannot derive ap from the value of fp
90c87b03e5Sespie    during rtl generation.  If they are different register numbers, this is
91c87b03e5Sespie    always true.  It may also be true if
92c87b03e5Sespie    FIRST_PARM_OFFSET - STARTING_FRAME_OFFSET is not a constant during rtl
93c87b03e5Sespie    generation.  See fix_lexical_addr for details.  */
94c87b03e5Sespie 
95c87b03e5Sespie #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
96c87b03e5Sespie #define NEED_SEPARATE_AP
97c87b03e5Sespie #endif
98c87b03e5Sespie 
99c87b03e5Sespie /* Nonzero if function being compiled doesn't contain any calls
100c87b03e5Sespie    (ignoring the prologue and epilogue).  This is set prior to
101c87b03e5Sespie    local register allocation and is valid for the remaining
102c87b03e5Sespie    compiler passes.  */
103c87b03e5Sespie int current_function_is_leaf;
104c87b03e5Sespie 
105c87b03e5Sespie /* Nonzero if function being compiled doesn't contain any instructions
106c87b03e5Sespie    that can throw an exception.  This is set prior to final.  */
107c87b03e5Sespie 
108c87b03e5Sespie int current_function_nothrow;
109c87b03e5Sespie 
110c87b03e5Sespie /* Nonzero if function being compiled doesn't modify the stack pointer
111c87b03e5Sespie    (ignoring the prologue and epilogue).  This is only valid after
112c87b03e5Sespie    life_analysis has run.  */
113c87b03e5Sespie int current_function_sp_is_unchanging;
114c87b03e5Sespie 
115c87b03e5Sespie /* Nonzero if the function being compiled is a leaf function which only
116c87b03e5Sespie    uses leaf registers.  This is valid after reload (specifically after
117c87b03e5Sespie    sched2) and is useful only if the port defines LEAF_REGISTERS.  */
118c87b03e5Sespie int current_function_uses_only_leaf_regs;
119c87b03e5Sespie 
120c87b03e5Sespie /* Nonzero once virtual register instantiation has been done.
121c87b03e5Sespie    assign_stack_local uses frame_pointer_rtx when this is nonzero.
122c87b03e5Sespie    calls.c:emit_library_call_value_1 uses it to set up
123c87b03e5Sespie    post-instantiation libcalls.  */
124c87b03e5Sespie int virtuals_instantiated;
125c87b03e5Sespie 
12606dc6460Sespie /* Nonzero if at least one trampoline has been created.  */
12706dc6460Sespie int trampolines_created;
12806dc6460Sespie 
129c87b03e5Sespie /* Assign unique numbers to labels generated for profiling, debugging, etc.  */
130c87b03e5Sespie static int funcdef_no;
131c87b03e5Sespie 
132c87b03e5Sespie /* These variables hold pointers to functions to create and destroy
133c87b03e5Sespie    target specific, per-function data structures.  */
134c87b03e5Sespie struct machine_function * (*init_machine_status) PARAMS ((void));
135c87b03e5Sespie 
136c87b03e5Sespie /* The FUNCTION_DECL for an inline function currently being expanded.  */
137c87b03e5Sespie tree inline_function_decl;
138c87b03e5Sespie 
139c87b03e5Sespie /* The currently compiled function.  */
140c87b03e5Sespie struct function *cfun = 0;
141c87b03e5Sespie 
142c87b03e5Sespie /* These arrays record the INSN_UIDs of the prologue and epilogue insns.  */
143c87b03e5Sespie static GTY(()) varray_type prologue;
144c87b03e5Sespie static GTY(()) varray_type epilogue;
145c87b03e5Sespie 
146c87b03e5Sespie /* Array of INSN_UIDs to hold the INSN_UIDs for each sibcall epilogue
147c87b03e5Sespie    in this function.  */
148c87b03e5Sespie static GTY(()) varray_type sibcall_epilogue;
1495982dd4aSetoh 
1505982dd4aSetoh /* Current boundary mark for character arrays.  */
1515982dd4aSetoh int temp_boundary_mark = 0;
1525982dd4aSetoh 
153c87b03e5Sespie 
154c87b03e5Sespie /* In order to evaluate some expressions, such as function calls returning
155c87b03e5Sespie    structures in memory, we need to temporarily allocate stack locations.
156c87b03e5Sespie    We record each allocated temporary in the following structure.
157c87b03e5Sespie 
158c87b03e5Sespie    Associated with each temporary slot is a nesting level.  When we pop up
159c87b03e5Sespie    one level, all temporaries associated with the previous level are freed.
160c87b03e5Sespie    Normally, all temporaries are freed after the execution of the statement
161c87b03e5Sespie    in which they were created.  However, if we are inside a ({...}) grouping,
162c87b03e5Sespie    the result may be in a temporary and hence must be preserved.  If the
163c87b03e5Sespie    result could be in a temporary, we preserve it if we can determine which
164c87b03e5Sespie    one it is in.  If we cannot determine which temporary may contain the
165c87b03e5Sespie    result, all temporaries are preserved.  A temporary is preserved by
166c87b03e5Sespie    pretending it was allocated at the previous nesting level.
167c87b03e5Sespie 
168c87b03e5Sespie    Automatic variables are also assigned temporary slots, at the nesting
169c87b03e5Sespie    level where they are defined.  They are marked a "kept" so that
170c87b03e5Sespie    free_temp_slots will not free them.  */
171c87b03e5Sespie 
172c87b03e5Sespie struct temp_slot GTY(())
173c87b03e5Sespie {
174c87b03e5Sespie   /* Points to next temporary slot.  */
175c87b03e5Sespie   struct temp_slot *next;
176c87b03e5Sespie   /* The rtx to used to reference the slot.  */
177c87b03e5Sespie   rtx slot;
178c87b03e5Sespie   /* The rtx used to represent the address if not the address of the
179c87b03e5Sespie      slot above.  May be an EXPR_LIST if multiple addresses exist.  */
180c87b03e5Sespie   rtx address;
181c87b03e5Sespie   /* The alignment (in bits) of the slot.  */
182c87b03e5Sespie   unsigned int align;
183c87b03e5Sespie   /* The size, in units, of the slot.  */
184c87b03e5Sespie   HOST_WIDE_INT size;
185c87b03e5Sespie   /* The type of the object in the slot, or zero if it doesn't correspond
186c87b03e5Sespie      to a type.  We use this to determine whether a slot can be reused.
187c87b03e5Sespie      It can be reused if objects of the type of the new slot will always
188c87b03e5Sespie      conflict with objects of the type of the old slot.  */
189c87b03e5Sespie   tree type;
190c87b03e5Sespie   /* The value of `sequence_rtl_expr' when this temporary is allocated.  */
191c87b03e5Sespie   tree rtl_expr;
192c87b03e5Sespie   /* Nonzero if this temporary is currently in use.  */
193c87b03e5Sespie   char in_use;
194c87b03e5Sespie   /* Nonzero if this temporary has its address taken.  */
195c87b03e5Sespie   char addr_taken;
196c87b03e5Sespie   /* Nesting level at which this slot is being used.  */
197c87b03e5Sespie   int level;
198c87b03e5Sespie   /* Nonzero if this should survive a call to free_temp_slots.  */
199c87b03e5Sespie   int keep;
200c87b03e5Sespie   /* The offset of the slot from the frame_pointer, including extra space
201c87b03e5Sespie      for alignment.  This info is for combine_temp_slots.  */
202c87b03e5Sespie   HOST_WIDE_INT base_offset;
203c87b03e5Sespie   /* The size of the slot, including extra space for alignment.  This
204c87b03e5Sespie      info is for combine_temp_slots.  */
205c87b03e5Sespie   HOST_WIDE_INT full_size;
2065982dd4aSetoh   /* Boundary mark of a character array and the others. This info is for propolice */
2075982dd4aSetoh   int boundary_mark;
208c87b03e5Sespie };
209c87b03e5Sespie 
210c87b03e5Sespie /* This structure is used to record MEMs or pseudos used to replace VAR, any
211c87b03e5Sespie    SUBREGs of VAR, and any MEMs containing VAR as an address.  We need to
212c87b03e5Sespie    maintain this list in case two operands of an insn were required to match;
213c87b03e5Sespie    in that case we must ensure we use the same replacement.  */
214c87b03e5Sespie 
215c87b03e5Sespie struct fixup_replacement GTY(())
216c87b03e5Sespie {
217c87b03e5Sespie   rtx old;
218c87b03e5Sespie   rtx new;
219c87b03e5Sespie   struct fixup_replacement *next;
220c87b03e5Sespie };
221c87b03e5Sespie 
222c87b03e5Sespie struct insns_for_mem_entry
223c87b03e5Sespie {
224c87b03e5Sespie   /* A MEM.  */
225c87b03e5Sespie   rtx key;
226c87b03e5Sespie   /* These are the INSNs which reference the MEM.  */
227c87b03e5Sespie   rtx insns;
228c87b03e5Sespie };
229c87b03e5Sespie 
230c87b03e5Sespie /* Forward declarations.  */
231c87b03e5Sespie 
232c87b03e5Sespie static rtx assign_stack_local_1 PARAMS ((enum machine_mode, HOST_WIDE_INT,
233c87b03e5Sespie 					 int, struct function *));
234c87b03e5Sespie static struct temp_slot *find_temp_slot_from_address  PARAMS ((rtx));
235c87b03e5Sespie static void put_reg_into_stack	PARAMS ((struct function *, rtx, tree,
236e97b50d0Smiod 					 enum machine_mode, unsigned int,
237e97b50d0Smiod 					 int, int, int, htab_t));
238c87b03e5Sespie static void schedule_fixup_var_refs PARAMS ((struct function *, rtx, tree,
239c87b03e5Sespie 					     enum machine_mode,
240c87b03e5Sespie 					     htab_t));
241c87b03e5Sespie static void fixup_var_refs	PARAMS ((rtx, enum machine_mode, int, rtx,
242c87b03e5Sespie 					 htab_t));
243c87b03e5Sespie static struct fixup_replacement
244c87b03e5Sespie   *find_fixup_replacement	PARAMS ((struct fixup_replacement **, rtx));
245c87b03e5Sespie static void fixup_var_refs_insns PARAMS ((rtx, rtx, enum machine_mode,
246c87b03e5Sespie 					  int, int, rtx));
247c87b03e5Sespie static void fixup_var_refs_insns_with_hash
248c87b03e5Sespie 				PARAMS ((htab_t, rtx,
249c87b03e5Sespie 					 enum machine_mode, int, rtx));
250c87b03e5Sespie static void fixup_var_refs_insn PARAMS ((rtx, rtx, enum machine_mode,
251c87b03e5Sespie 					 int, int, rtx));
252c87b03e5Sespie static void fixup_var_refs_1	PARAMS ((rtx, enum machine_mode, rtx *, rtx,
253c87b03e5Sespie 					 struct fixup_replacement **, rtx));
254c87b03e5Sespie static rtx fixup_memory_subreg	PARAMS ((rtx, rtx, enum machine_mode, int));
255c87b03e5Sespie static rtx walk_fixup_memory_subreg  PARAMS ((rtx, rtx, enum machine_mode,
256c87b03e5Sespie 					      int));
257c87b03e5Sespie static rtx fixup_stack_1	PARAMS ((rtx, rtx));
258c87b03e5Sespie static void optimize_bit_field	PARAMS ((rtx, rtx, rtx *));
259c87b03e5Sespie static void instantiate_decls	PARAMS ((tree, int));
260c87b03e5Sespie static void instantiate_decls_1	PARAMS ((tree, int));
261c87b03e5Sespie static void instantiate_decl	PARAMS ((rtx, HOST_WIDE_INT, int));
262c87b03e5Sespie static rtx instantiate_new_reg	PARAMS ((rtx, HOST_WIDE_INT *));
263c87b03e5Sespie static int instantiate_virtual_regs_1 PARAMS ((rtx *, rtx, int));
264c87b03e5Sespie static void delete_handlers	PARAMS ((void));
265c87b03e5Sespie static void pad_to_arg_alignment PARAMS ((struct args_size *, int,
266c87b03e5Sespie 					  struct args_size *));
267c87b03e5Sespie static void pad_below		PARAMS ((struct args_size *, enum machine_mode,
268c87b03e5Sespie 					 tree));
269c87b03e5Sespie static rtx round_trampoline_addr PARAMS ((rtx));
270c87b03e5Sespie static rtx adjust_trampoline_addr PARAMS ((rtx));
271c87b03e5Sespie static tree *identify_blocks_1	PARAMS ((rtx, tree *, tree *, tree *));
272c87b03e5Sespie static void reorder_blocks_0	PARAMS ((tree));
273c87b03e5Sespie static void reorder_blocks_1	PARAMS ((rtx, tree, varray_type *));
274c87b03e5Sespie static void reorder_fix_fragments PARAMS ((tree));
275c87b03e5Sespie static tree blocks_nreverse	PARAMS ((tree));
276c87b03e5Sespie static int all_blocks		PARAMS ((tree, tree *));
277c87b03e5Sespie static tree *get_block_vector   PARAMS ((tree, int *));
278c87b03e5Sespie extern tree debug_find_var_in_block_tree PARAMS ((tree, tree));
279c87b03e5Sespie /* We always define `record_insns' even if its not used so that we
280c87b03e5Sespie    can always export `prologue_epilogue_contains'.  */
281c87b03e5Sespie static void record_insns	PARAMS ((rtx, varray_type *)) ATTRIBUTE_UNUSED;
282c87b03e5Sespie static int contains		PARAMS ((rtx, varray_type));
283c87b03e5Sespie #ifdef HAVE_return
284c87b03e5Sespie static void emit_return_into_block PARAMS ((basic_block, rtx));
285c87b03e5Sespie #endif
286c87b03e5Sespie static void put_addressof_into_stack PARAMS ((rtx, htab_t));
287c87b03e5Sespie static bool purge_addressof_1 PARAMS ((rtx *, rtx, int, int,
288c87b03e5Sespie 					  htab_t));
289c87b03e5Sespie static void purge_single_hard_subreg_set PARAMS ((rtx));
290c87b03e5Sespie #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
291c87b03e5Sespie static rtx keep_stack_depressed PARAMS ((rtx));
292c87b03e5Sespie #endif
293c87b03e5Sespie static int is_addressof		PARAMS ((rtx *, void *));
294c87b03e5Sespie static hashval_t insns_for_mem_hash PARAMS ((const void *));
295c87b03e5Sespie static int insns_for_mem_comp PARAMS ((const void *, const void *));
296c87b03e5Sespie static int insns_for_mem_walk   PARAMS ((rtx *, void *));
297c87b03e5Sespie static void compute_insns_for_mem PARAMS ((rtx, rtx, htab_t));
298c87b03e5Sespie static void prepare_function_start PARAMS ((void));
299c87b03e5Sespie static void do_clobber_return_reg PARAMS ((rtx, void *));
300c87b03e5Sespie static void do_use_return_reg PARAMS ((rtx, void *));
301c87b03e5Sespie static void instantiate_virtual_regs_lossage PARAMS ((rtx));
302c87b03e5Sespie 
303c87b03e5Sespie /* Pointer to chain of `struct function' for containing functions.  */
304c87b03e5Sespie static GTY(()) struct function *outer_function_chain;
305c87b03e5Sespie 
306c87b03e5Sespie /* Given a function decl for a containing function,
307c87b03e5Sespie    return the `struct function' for it.  */
308c87b03e5Sespie 
309c87b03e5Sespie struct function *
find_function_data(decl)310c87b03e5Sespie find_function_data (decl)
311c87b03e5Sespie      tree decl;
312c87b03e5Sespie {
313c87b03e5Sespie   struct function *p;
314c87b03e5Sespie 
315c87b03e5Sespie   for (p = outer_function_chain; p; p = p->outer)
316c87b03e5Sespie     if (p->decl == decl)
317c87b03e5Sespie       return p;
318c87b03e5Sespie 
319c87b03e5Sespie   abort ();
320c87b03e5Sespie }
321c87b03e5Sespie 
322c87b03e5Sespie /* Save the current context for compilation of a nested function.
323c87b03e5Sespie    This is called from language-specific code.  The caller should use
324c87b03e5Sespie    the enter_nested langhook to save any language-specific state,
325c87b03e5Sespie    since this function knows only about language-independent
326c87b03e5Sespie    variables.  */
327c87b03e5Sespie 
328c87b03e5Sespie void
push_function_context_to(context)329c87b03e5Sespie push_function_context_to (context)
330c87b03e5Sespie      tree context;
331c87b03e5Sespie {
332c87b03e5Sespie   struct function *p;
333c87b03e5Sespie 
334c87b03e5Sespie   if (context)
335c87b03e5Sespie     {
336c87b03e5Sespie       if (context == current_function_decl)
337c87b03e5Sespie 	cfun->contains_functions = 1;
338c87b03e5Sespie       else
339c87b03e5Sespie 	{
340c87b03e5Sespie 	  struct function *containing = find_function_data (context);
341c87b03e5Sespie 	  containing->contains_functions = 1;
342c87b03e5Sespie 	}
343c87b03e5Sespie     }
344c87b03e5Sespie 
345c87b03e5Sespie   if (cfun == 0)
346c87b03e5Sespie     init_dummy_function_start ();
347c87b03e5Sespie   p = cfun;
348c87b03e5Sespie 
349c87b03e5Sespie   p->outer = outer_function_chain;
350c87b03e5Sespie   outer_function_chain = p;
351c87b03e5Sespie   p->fixup_var_refs_queue = 0;
352c87b03e5Sespie 
353c87b03e5Sespie   (*lang_hooks.function.enter_nested) (p);
354c87b03e5Sespie 
355c87b03e5Sespie   cfun = 0;
356c87b03e5Sespie }
357c87b03e5Sespie 
358c87b03e5Sespie void
push_function_context()359c87b03e5Sespie push_function_context ()
360c87b03e5Sespie {
361c87b03e5Sespie   push_function_context_to (current_function_decl);
362c87b03e5Sespie }
363c87b03e5Sespie 
364c87b03e5Sespie /* Restore the last saved context, at the end of a nested function.
365c87b03e5Sespie    This function is called from language-specific code.  */
366c87b03e5Sespie 
367c87b03e5Sespie void
pop_function_context_from(context)368c87b03e5Sespie pop_function_context_from (context)
369c87b03e5Sespie      tree context ATTRIBUTE_UNUSED;
370c87b03e5Sespie {
371c87b03e5Sespie   struct function *p = outer_function_chain;
372c87b03e5Sespie   struct var_refs_queue *queue;
373c87b03e5Sespie 
374c87b03e5Sespie   cfun = p;
375c87b03e5Sespie   outer_function_chain = p->outer;
376c87b03e5Sespie 
377c87b03e5Sespie   current_function_decl = p->decl;
378c87b03e5Sespie   reg_renumber = 0;
379c87b03e5Sespie 
380c87b03e5Sespie   restore_emit_status (p);
381c87b03e5Sespie 
382c87b03e5Sespie   (*lang_hooks.function.leave_nested) (p);
383c87b03e5Sespie 
384c87b03e5Sespie   /* Finish doing put_var_into_stack for any of our variables which became
385c87b03e5Sespie      addressable during the nested function.  If only one entry has to be
386c87b03e5Sespie      fixed up, just do that one.  Otherwise, first make a list of MEMs that
387c87b03e5Sespie      are not to be unshared.  */
388c87b03e5Sespie   if (p->fixup_var_refs_queue == 0)
389c87b03e5Sespie     ;
390c87b03e5Sespie   else if (p->fixup_var_refs_queue->next == 0)
391c87b03e5Sespie     fixup_var_refs (p->fixup_var_refs_queue->modified,
392c87b03e5Sespie 		    p->fixup_var_refs_queue->promoted_mode,
393c87b03e5Sespie 		    p->fixup_var_refs_queue->unsignedp,
394c87b03e5Sespie 		    p->fixup_var_refs_queue->modified, 0);
395c87b03e5Sespie   else
396c87b03e5Sespie     {
397c87b03e5Sespie       rtx list = 0;
398c87b03e5Sespie 
399c87b03e5Sespie       for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
400c87b03e5Sespie 	list = gen_rtx_EXPR_LIST (VOIDmode, queue->modified, list);
401c87b03e5Sespie 
402c87b03e5Sespie       for (queue = p->fixup_var_refs_queue; queue; queue = queue->next)
403c87b03e5Sespie 	fixup_var_refs (queue->modified, queue->promoted_mode,
404c87b03e5Sespie 			queue->unsignedp, list, 0);
405c87b03e5Sespie 
406c87b03e5Sespie     }
407c87b03e5Sespie 
408c87b03e5Sespie   p->fixup_var_refs_queue = 0;
409c87b03e5Sespie 
410c87b03e5Sespie   /* Reset variables that have known state during rtx generation.  */
411c87b03e5Sespie   rtx_equal_function_value_matters = 1;
412c87b03e5Sespie   virtuals_instantiated = 0;
413c87b03e5Sespie   generating_concat_p = 1;
414c87b03e5Sespie }
415c87b03e5Sespie 
416c87b03e5Sespie void
pop_function_context()417c87b03e5Sespie pop_function_context ()
418c87b03e5Sespie {
419c87b03e5Sespie   pop_function_context_from (current_function_decl);
420c87b03e5Sespie }
421c87b03e5Sespie 
422c87b03e5Sespie /* Clear out all parts of the state in F that can safely be discarded
423c87b03e5Sespie    after the function has been parsed, but not compiled, to let
424c87b03e5Sespie    garbage collection reclaim the memory.  */
425c87b03e5Sespie 
426c87b03e5Sespie void
free_after_parsing(f)427c87b03e5Sespie free_after_parsing (f)
428c87b03e5Sespie      struct function *f;
429c87b03e5Sespie {
430c87b03e5Sespie   /* f->expr->forced_labels is used by code generation.  */
431c87b03e5Sespie   /* f->emit->regno_reg_rtx is used by code generation.  */
432c87b03e5Sespie   /* f->varasm is used by code generation.  */
433c87b03e5Sespie   /* f->eh->eh_return_stub_label is used by code generation.  */
434c87b03e5Sespie 
435c87b03e5Sespie   (*lang_hooks.function.final) (f);
436c87b03e5Sespie   f->stmt = NULL;
437c87b03e5Sespie }
438c87b03e5Sespie 
439c87b03e5Sespie /* Clear out all parts of the state in F that can safely be discarded
440c87b03e5Sespie    after the function has been compiled, to let garbage collection
441c87b03e5Sespie    reclaim the memory.  */
442c87b03e5Sespie 
443c87b03e5Sespie void
free_after_compilation(f)444c87b03e5Sespie free_after_compilation (f)
445c87b03e5Sespie      struct function *f;
446c87b03e5Sespie {
447c87b03e5Sespie   f->eh = NULL;
448c87b03e5Sespie   f->expr = NULL;
449c87b03e5Sespie   f->emit = NULL;
450c87b03e5Sespie   f->varasm = NULL;
451c87b03e5Sespie   f->machine = NULL;
452c87b03e5Sespie 
453c87b03e5Sespie   f->x_temp_slots = NULL;
454c87b03e5Sespie   f->arg_offset_rtx = NULL;
455c87b03e5Sespie   f->return_rtx = NULL;
456c87b03e5Sespie   f->internal_arg_pointer = NULL;
457c87b03e5Sespie   f->x_nonlocal_labels = NULL;
458c87b03e5Sespie   f->x_nonlocal_goto_handler_slots = NULL;
459c87b03e5Sespie   f->x_nonlocal_goto_handler_labels = NULL;
460c87b03e5Sespie   f->x_nonlocal_goto_stack_level = NULL;
461c87b03e5Sespie   f->x_cleanup_label = NULL;
462c87b03e5Sespie   f->x_return_label = NULL;
463c87b03e5Sespie   f->computed_goto_common_label = NULL;
464c87b03e5Sespie   f->computed_goto_common_reg = NULL;
465c87b03e5Sespie   f->x_save_expr_regs = NULL;
466c87b03e5Sespie   f->x_stack_slot_list = NULL;
467c87b03e5Sespie   f->x_rtl_expr_chain = NULL;
468c87b03e5Sespie   f->x_tail_recursion_label = NULL;
469c87b03e5Sespie   f->x_tail_recursion_reentry = NULL;
470c87b03e5Sespie   f->x_arg_pointer_save_area = NULL;
471c87b03e5Sespie   f->x_clobber_return_insn = NULL;
472c87b03e5Sespie   f->x_context_display = NULL;
473c87b03e5Sespie   f->x_trampoline_list = NULL;
474c87b03e5Sespie   f->x_parm_birth_insn = NULL;
475c87b03e5Sespie   f->x_last_parm_insn = NULL;
476c87b03e5Sespie   f->x_parm_reg_stack_loc = NULL;
477c87b03e5Sespie   f->fixup_var_refs_queue = NULL;
478c87b03e5Sespie   f->original_arg_vector = NULL;
479c87b03e5Sespie   f->original_decl_initial = NULL;
480c87b03e5Sespie   f->inl_last_parm_insn = NULL;
481c87b03e5Sespie   f->epilogue_delay_list = NULL;
482c87b03e5Sespie }
483c87b03e5Sespie 
484c87b03e5Sespie /* Allocate fixed slots in the stack frame of the current function.  */
485c87b03e5Sespie 
486c87b03e5Sespie /* Return size needed for stack frame based on slots so far allocated in
487c87b03e5Sespie    function F.
488c87b03e5Sespie    This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
489c87b03e5Sespie    the caller may have to do that.  */
490c87b03e5Sespie 
491c87b03e5Sespie HOST_WIDE_INT
get_func_frame_size(f)492c87b03e5Sespie get_func_frame_size (f)
493c87b03e5Sespie      struct function *f;
494c87b03e5Sespie {
495c87b03e5Sespie #ifdef FRAME_GROWS_DOWNWARD
496c87b03e5Sespie   return -f->x_frame_offset;
497c87b03e5Sespie #else
498c87b03e5Sespie   return f->x_frame_offset;
499c87b03e5Sespie #endif
500c87b03e5Sespie }
501c87b03e5Sespie 
502c87b03e5Sespie /* Return size needed for stack frame based on slots so far allocated.
503c87b03e5Sespie    This size counts from zero.  It is not rounded to PREFERRED_STACK_BOUNDARY;
504c87b03e5Sespie    the caller may have to do that.  */
505c87b03e5Sespie HOST_WIDE_INT
get_frame_size()506c87b03e5Sespie get_frame_size ()
507c87b03e5Sespie {
508c87b03e5Sespie   return get_func_frame_size (cfun);
509c87b03e5Sespie }
510c87b03e5Sespie 
511c87b03e5Sespie /* Allocate a stack slot of SIZE bytes and return a MEM rtx for it
512c87b03e5Sespie    with machine mode MODE.
513c87b03e5Sespie 
514c87b03e5Sespie    ALIGN controls the amount of alignment for the address of the slot:
515c87b03e5Sespie    0 means according to MODE,
516c87b03e5Sespie    -1 means use BIGGEST_ALIGNMENT and round size to multiple of that,
517e97b50d0Smiod    -2 means use BITS_PER_UNIT,
518c87b03e5Sespie    positive specifies alignment boundary in bits.
519c87b03e5Sespie 
520c87b03e5Sespie    We do not round to stack_boundary here.
521c87b03e5Sespie 
522c87b03e5Sespie    FUNCTION specifies the function to allocate in.  */
523c87b03e5Sespie 
524c87b03e5Sespie static rtx
assign_stack_local_1(mode,size,align,function)525c87b03e5Sespie assign_stack_local_1 (mode, size, align, function)
526c87b03e5Sespie      enum machine_mode mode;
527c87b03e5Sespie      HOST_WIDE_INT size;
528c87b03e5Sespie      int align;
529c87b03e5Sespie      struct function *function;
530c87b03e5Sespie {
531c87b03e5Sespie   rtx x, addr;
532c87b03e5Sespie   int bigend_correction = 0;
533c87b03e5Sespie   int alignment;
534c87b03e5Sespie   int frame_off, frame_alignment, frame_phase;
535c87b03e5Sespie 
536c87b03e5Sespie   if (align == 0)
537c87b03e5Sespie     {
538c87b03e5Sespie       tree type;
539c87b03e5Sespie 
540c87b03e5Sespie       if (mode == BLKmode)
541c87b03e5Sespie 	alignment = BIGGEST_ALIGNMENT;
542c87b03e5Sespie       else
543c87b03e5Sespie 	alignment = GET_MODE_ALIGNMENT (mode);
544c87b03e5Sespie 
545c87b03e5Sespie       /* Allow the target to (possibly) increase the alignment of this
546c87b03e5Sespie 	 stack slot.  */
547c87b03e5Sespie       type = (*lang_hooks.types.type_for_mode) (mode, 0);
548c87b03e5Sespie       if (type)
549c87b03e5Sespie 	alignment = LOCAL_ALIGNMENT (type, alignment);
550c87b03e5Sespie 
551c87b03e5Sespie       alignment /= BITS_PER_UNIT;
552c87b03e5Sespie     }
553c87b03e5Sespie   else if (align == -1)
554c87b03e5Sespie     {
555c87b03e5Sespie       alignment = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
556c87b03e5Sespie       size = CEIL_ROUND (size, alignment);
557c87b03e5Sespie     }
558e97b50d0Smiod   else if (align == -2)
559e97b50d0Smiod     alignment = 1; /* BITS_PER_UNIT / BITS_PER_UNIT */
560c87b03e5Sespie   else
561c87b03e5Sespie     alignment = align / BITS_PER_UNIT;
562c87b03e5Sespie 
563c87b03e5Sespie #ifdef FRAME_GROWS_DOWNWARD
564c87b03e5Sespie   function->x_frame_offset -= size;
565c87b03e5Sespie #endif
566c87b03e5Sespie 
567c87b03e5Sespie   /* Ignore alignment we can't do with expected alignment of the boundary.  */
568c87b03e5Sespie   if (alignment * BITS_PER_UNIT > PREFERRED_STACK_BOUNDARY)
569c87b03e5Sespie     alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
570c87b03e5Sespie 
571c87b03e5Sespie   if (function->stack_alignment_needed < alignment * BITS_PER_UNIT)
572c87b03e5Sespie     function->stack_alignment_needed = alignment * BITS_PER_UNIT;
573c87b03e5Sespie 
574c87b03e5Sespie   /* Calculate how many bytes the start of local variables is off from
575c87b03e5Sespie      stack alignment.  */
576c87b03e5Sespie   frame_alignment = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
577c87b03e5Sespie   frame_off = STARTING_FRAME_OFFSET % frame_alignment;
578c87b03e5Sespie   frame_phase = frame_off ? frame_alignment - frame_off : 0;
579c87b03e5Sespie 
580c87b03e5Sespie   /* Round frame offset to that alignment.
581c87b03e5Sespie      We must be careful here, since FRAME_OFFSET might be negative and
582c87b03e5Sespie      division with a negative dividend isn't as well defined as we might
583c87b03e5Sespie      like.  So we instead assume that ALIGNMENT is a power of two and
584c87b03e5Sespie      use logical operations which are unambiguous.  */
585c87b03e5Sespie #ifdef FRAME_GROWS_DOWNWARD
586c87b03e5Sespie   function->x_frame_offset = FLOOR_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
587c87b03e5Sespie #else
588c87b03e5Sespie   function->x_frame_offset = CEIL_ROUND (function->x_frame_offset - frame_phase, alignment) + frame_phase;
589c87b03e5Sespie #endif
590c87b03e5Sespie 
591c87b03e5Sespie   /* On a big-endian machine, if we are allocating more space than we will use,
592c87b03e5Sespie      use the least significant bytes of those that are allocated.  */
593c87b03e5Sespie   if (BYTES_BIG_ENDIAN && mode != BLKmode)
594c87b03e5Sespie     bigend_correction = size - GET_MODE_SIZE (mode);
595c87b03e5Sespie 
596c87b03e5Sespie   /* If we have already instantiated virtual registers, return the actual
597c87b03e5Sespie      address relative to the frame pointer.  */
598c87b03e5Sespie   if (function == cfun && virtuals_instantiated)
599c87b03e5Sespie     addr = plus_constant (frame_pointer_rtx,
600c87b03e5Sespie 			  (frame_offset + bigend_correction
601c87b03e5Sespie 			   + STARTING_FRAME_OFFSET));
602c87b03e5Sespie   else
603c87b03e5Sespie     addr = plus_constant (virtual_stack_vars_rtx,
604c87b03e5Sespie 			  function->x_frame_offset + bigend_correction);
605c87b03e5Sespie 
606c87b03e5Sespie #ifndef FRAME_GROWS_DOWNWARD
607c87b03e5Sespie   function->x_frame_offset += size;
608c87b03e5Sespie #endif
609c87b03e5Sespie 
610c87b03e5Sespie   x = gen_rtx_MEM (mode, addr);
611c87b03e5Sespie 
612c87b03e5Sespie   function->x_stack_slot_list
613c87b03e5Sespie     = gen_rtx_EXPR_LIST (VOIDmode, x, function->x_stack_slot_list);
614c87b03e5Sespie 
615c87b03e5Sespie   return x;
616c87b03e5Sespie }
617c87b03e5Sespie 
618c87b03e5Sespie /* Wrapper around assign_stack_local_1;  assign a local stack slot for the
619c87b03e5Sespie    current function.  */
620c87b03e5Sespie 
621c87b03e5Sespie rtx
assign_stack_local(mode,size,align)622c87b03e5Sespie assign_stack_local (mode, size, align)
623c87b03e5Sespie      enum machine_mode mode;
624c87b03e5Sespie      HOST_WIDE_INT size;
625c87b03e5Sespie      int align;
626c87b03e5Sespie {
627c87b03e5Sespie   return assign_stack_local_1 (mode, size, align, cfun);
628c87b03e5Sespie }
629c87b03e5Sespie 
630c87b03e5Sespie /* Allocate a temporary stack slot and record it for possible later
631c87b03e5Sespie    reuse.
632c87b03e5Sespie 
633c87b03e5Sespie    MODE is the machine mode to be given to the returned rtx.
634c87b03e5Sespie 
635c87b03e5Sespie    SIZE is the size in units of the space required.  We do no rounding here
636c87b03e5Sespie    since assign_stack_local will do any required rounding.
637c87b03e5Sespie 
638c87b03e5Sespie    KEEP is 1 if this slot is to be retained after a call to
639c87b03e5Sespie    free_temp_slots.  Automatic variables for a block are allocated
640c87b03e5Sespie    with this flag.  KEEP is 2 if we allocate a longer term temporary,
641c87b03e5Sespie    whose lifetime is controlled by CLEANUP_POINT_EXPRs.  KEEP is 3
642c87b03e5Sespie    if we are to allocate something at an inner level to be treated as
643c87b03e5Sespie    a variable in the block (e.g., a SAVE_EXPR).
6445982dd4aSetoh    KEEP is 5 if we allocate a place to return structure.
645c87b03e5Sespie 
646c87b03e5Sespie    TYPE is the type that will be used for the stack slot.  */
647c87b03e5Sespie 
648c87b03e5Sespie rtx
assign_stack_temp_for_type(mode,size,keep,type)649c87b03e5Sespie assign_stack_temp_for_type (mode, size, keep, type)
650c87b03e5Sespie      enum machine_mode mode;
651c87b03e5Sespie      HOST_WIDE_INT size;
652c87b03e5Sespie      int keep;
653c87b03e5Sespie      tree type;
654c87b03e5Sespie {
655c87b03e5Sespie   unsigned int align;
656c87b03e5Sespie   struct temp_slot *p, *best_p = 0;
657c87b03e5Sespie   rtx slot;
6585982dd4aSetoh   int char_array = (flag_propolice_protection
659*34a73b73Smiod 		    && keep == 1 && search_string_def (type));
660c87b03e5Sespie 
661c87b03e5Sespie   /* If SIZE is -1 it means that somebody tried to allocate a temporary
662c87b03e5Sespie      of a variable size.  */
663c87b03e5Sespie   if (size == -1)
664c87b03e5Sespie     abort ();
665c87b03e5Sespie 
666c87b03e5Sespie   if (mode == BLKmode)
667c87b03e5Sespie     align = BIGGEST_ALIGNMENT;
668c87b03e5Sespie   else
669c87b03e5Sespie     align = GET_MODE_ALIGNMENT (mode);
670c87b03e5Sespie 
671c87b03e5Sespie   if (! type)
672c87b03e5Sespie     type = (*lang_hooks.types.type_for_mode) (mode, 0);
673c87b03e5Sespie 
674c87b03e5Sespie   if (type)
675c87b03e5Sespie     align = LOCAL_ALIGNMENT (type, align);
676c87b03e5Sespie 
677c87b03e5Sespie   /* Try to find an available, already-allocated temporary of the proper
678c87b03e5Sespie      mode which meets the size and alignment requirements.  Choose the
679c87b03e5Sespie      smallest one with the closest alignment.  */
680c87b03e5Sespie   for (p = temp_slots; p; p = p->next)
681c87b03e5Sespie     if (p->align >= align && p->size >= size && GET_MODE (p->slot) == mode
682c87b03e5Sespie 	&& ! p->in_use
683c87b03e5Sespie 	&& objects_must_conflict_p (p->type, type)
684c87b03e5Sespie 	&& (best_p == 0 || best_p->size > p->size
6855982dd4aSetoh 	    || (best_p->size == p->size && best_p->align > p->align))
6865982dd4aSetoh 	&& (! char_array || p->boundary_mark != 0))
687c87b03e5Sespie       {
688c87b03e5Sespie 	if (p->align == align && p->size == size)
689c87b03e5Sespie 	  {
690c87b03e5Sespie 	    best_p = 0;
691c87b03e5Sespie 	    break;
692c87b03e5Sespie 	  }
693c87b03e5Sespie 	best_p = p;
694c87b03e5Sespie       }
695c87b03e5Sespie 
696c87b03e5Sespie   /* Make our best, if any, the one to use.  */
697c87b03e5Sespie   if (best_p)
698c87b03e5Sespie     {
699c87b03e5Sespie       /* If there are enough aligned bytes left over, make them into a new
700c87b03e5Sespie 	 temp_slot so that the extra bytes don't get wasted.  Do this only
701c87b03e5Sespie 	 for BLKmode slots, so that we can be sure of the alignment.  */
702c87b03e5Sespie       if (GET_MODE (best_p->slot) == BLKmode)
703c87b03e5Sespie 	{
704c87b03e5Sespie 	  int alignment = best_p->align / BITS_PER_UNIT;
705c87b03e5Sespie 	  HOST_WIDE_INT rounded_size = CEIL_ROUND (size, alignment);
706c87b03e5Sespie 
707c87b03e5Sespie 	  if (best_p->size - rounded_size >= alignment)
708c87b03e5Sespie 	    {
709c87b03e5Sespie 	      p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
710c87b03e5Sespie 	      p->in_use = p->addr_taken = 0;
711c87b03e5Sespie 	      p->size = best_p->size - rounded_size;
712c87b03e5Sespie 	      p->base_offset = best_p->base_offset + rounded_size;
713c87b03e5Sespie 	      p->full_size = best_p->full_size - rounded_size;
714c87b03e5Sespie 	      p->slot = gen_rtx_MEM (BLKmode,
715c87b03e5Sespie 				     plus_constant (XEXP (best_p->slot, 0),
716c87b03e5Sespie 						    rounded_size));
717c87b03e5Sespie 	      p->align = best_p->align;
718c87b03e5Sespie 	      p->address = 0;
719c87b03e5Sespie 	      p->rtl_expr = 0;
720c87b03e5Sespie 	      p->type = best_p->type;
7215982dd4aSetoh 	      p->boundary_mark = best_p->boundary_mark;
722c87b03e5Sespie 	      p->next = temp_slots;
723c87b03e5Sespie 	      temp_slots = p;
724c87b03e5Sespie 
725c87b03e5Sespie 	      stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, p->slot,
726c87b03e5Sespie 						   stack_slot_list);
727c87b03e5Sespie 
728c87b03e5Sespie 	      best_p->size = rounded_size;
729c87b03e5Sespie 	      best_p->full_size = rounded_size;
730c87b03e5Sespie 	    }
731c87b03e5Sespie 	}
732c87b03e5Sespie 
733c87b03e5Sespie       p = best_p;
734c87b03e5Sespie     }
735c87b03e5Sespie 
736c87b03e5Sespie   /* If we still didn't find one, make a new temporary.  */
737c87b03e5Sespie   if (p == 0)
738c87b03e5Sespie     {
739c87b03e5Sespie       HOST_WIDE_INT frame_offset_old = frame_offset;
740c87b03e5Sespie 
741c87b03e5Sespie       p = (struct temp_slot *) ggc_alloc (sizeof (struct temp_slot));
742c87b03e5Sespie 
743c87b03e5Sespie       /* We are passing an explicit alignment request to assign_stack_local.
744c87b03e5Sespie 	 One side effect of that is assign_stack_local will not round SIZE
745c87b03e5Sespie 	 to ensure the frame offset remains suitably aligned.
746c87b03e5Sespie 
747c87b03e5Sespie 	 So for requests which depended on the rounding of SIZE, we go ahead
748c87b03e5Sespie 	 and round it now.  We also make sure ALIGNMENT is at least
749c87b03e5Sespie 	 BIGGEST_ALIGNMENT.  */
750c87b03e5Sespie       if (mode == BLKmode && align < BIGGEST_ALIGNMENT)
751c87b03e5Sespie 	abort ();
752c87b03e5Sespie       p->slot = assign_stack_local (mode,
753c87b03e5Sespie 				    (mode == BLKmode
754c87b03e5Sespie 				     ? CEIL_ROUND (size, align / BITS_PER_UNIT)
755c87b03e5Sespie 				     : size),
756c87b03e5Sespie 				    align);
757c87b03e5Sespie 
758c87b03e5Sespie       p->align = align;
759c87b03e5Sespie 
760c87b03e5Sespie       /* The following slot size computation is necessary because we don't
761c87b03e5Sespie 	 know the actual size of the temporary slot until assign_stack_local
762c87b03e5Sespie 	 has performed all the frame alignment and size rounding for the
763c87b03e5Sespie 	 requested temporary.  Note that extra space added for alignment
764c87b03e5Sespie 	 can be either above or below this stack slot depending on which
765c87b03e5Sespie 	 way the frame grows.  We include the extra space if and only if it
766c87b03e5Sespie 	 is above this slot.  */
767c87b03e5Sespie #ifdef FRAME_GROWS_DOWNWARD
768c87b03e5Sespie       p->size = frame_offset_old - frame_offset;
769c87b03e5Sespie #else
770c87b03e5Sespie       p->size = size;
771c87b03e5Sespie #endif
772c87b03e5Sespie 
773c87b03e5Sespie       /* Now define the fields used by combine_temp_slots.  */
774c87b03e5Sespie #ifdef FRAME_GROWS_DOWNWARD
775c87b03e5Sespie       p->base_offset = frame_offset;
776c87b03e5Sespie       p->full_size = frame_offset_old - frame_offset;
777c87b03e5Sespie #else
778c87b03e5Sespie       p->base_offset = frame_offset_old;
779c87b03e5Sespie       p->full_size = frame_offset - frame_offset_old;
780c87b03e5Sespie #endif
781c87b03e5Sespie       p->address = 0;
7825982dd4aSetoh       p->boundary_mark = char_array?++temp_boundary_mark:0;
783c87b03e5Sespie       p->next = temp_slots;
784c87b03e5Sespie       temp_slots = p;
785c87b03e5Sespie     }
786c87b03e5Sespie 
787c87b03e5Sespie   p->in_use = 1;
788c87b03e5Sespie   p->addr_taken = 0;
789c87b03e5Sespie   p->rtl_expr = seq_rtl_expr;
790c87b03e5Sespie   p->type = type;
791c87b03e5Sespie 
792c87b03e5Sespie   if (keep == 2)
793c87b03e5Sespie     {
794c87b03e5Sespie       p->level = target_temp_slot_level;
79506dc6460Sespie       p->keep = 1;
796c87b03e5Sespie     }
797c87b03e5Sespie   else if (keep == 3)
798c87b03e5Sespie     {
799c87b03e5Sespie       p->level = var_temp_slot_level;
800c87b03e5Sespie       p->keep = 0;
801c87b03e5Sespie     }
802c87b03e5Sespie   else
803c87b03e5Sespie     {
804c87b03e5Sespie       p->level = temp_slot_level;
805c87b03e5Sespie       p->keep = keep;
806c87b03e5Sespie     }
807c87b03e5Sespie 
808c87b03e5Sespie 
809c87b03e5Sespie   /* Create a new MEM rtx to avoid clobbering MEM flags of old slots.  */
810c87b03e5Sespie   slot = gen_rtx_MEM (mode, XEXP (p->slot, 0));
811c87b03e5Sespie   stack_slot_list = gen_rtx_EXPR_LIST (VOIDmode, slot, stack_slot_list);
812c87b03e5Sespie 
813c87b03e5Sespie   /* If we know the alias set for the memory that will be used, use
814c87b03e5Sespie      it.  If there's no TYPE, then we don't know anything about the
815c87b03e5Sespie      alias set for the memory.  */
816c87b03e5Sespie   set_mem_alias_set (slot, type ? get_alias_set (type) : 0);
817c87b03e5Sespie   set_mem_align (slot, align);
818c87b03e5Sespie 
819c87b03e5Sespie   /* If a type is specified, set the relevant flags.  */
820c87b03e5Sespie   if (type != 0)
821c87b03e5Sespie     {
822c87b03e5Sespie       RTX_UNCHANGING_P (slot) = (lang_hooks.honor_readonly
823c87b03e5Sespie 				 && TYPE_READONLY (type));
824c87b03e5Sespie       MEM_VOLATILE_P (slot) = TYPE_VOLATILE (type);
825c87b03e5Sespie       MEM_SET_IN_STRUCT_P (slot, AGGREGATE_TYPE_P (type));
826c87b03e5Sespie     }
827c87b03e5Sespie 
828c87b03e5Sespie   return slot;
829c87b03e5Sespie }
830c87b03e5Sespie 
831c87b03e5Sespie /* Allocate a temporary stack slot and record it for possible later
832c87b03e5Sespie    reuse.  First three arguments are same as in preceding function.  */
833c87b03e5Sespie 
834c87b03e5Sespie rtx
assign_stack_temp(mode,size,keep)835c87b03e5Sespie assign_stack_temp (mode, size, keep)
836c87b03e5Sespie      enum machine_mode mode;
837c87b03e5Sespie      HOST_WIDE_INT size;
838c87b03e5Sespie      int keep;
839c87b03e5Sespie {
840c87b03e5Sespie   return assign_stack_temp_for_type (mode, size, keep, NULL_TREE);
841c87b03e5Sespie }
842c87b03e5Sespie 
843c87b03e5Sespie /* Assign a temporary.
844c87b03e5Sespie    If TYPE_OR_DECL is a decl, then we are doing it on behalf of the decl
845c87b03e5Sespie    and so that should be used in error messages.  In either case, we
846c87b03e5Sespie    allocate of the given type.
847c87b03e5Sespie    KEEP is as for assign_stack_temp.
848c87b03e5Sespie    MEMORY_REQUIRED is 1 if the result must be addressable stack memory;
849c87b03e5Sespie    it is 0 if a register is OK.
850c87b03e5Sespie    DONT_PROMOTE is 1 if we should not promote values in register
851c87b03e5Sespie    to wider modes.  */
852c87b03e5Sespie 
853c87b03e5Sespie rtx
assign_temp(type_or_decl,keep,memory_required,dont_promote)854c87b03e5Sespie assign_temp (type_or_decl, keep, memory_required, dont_promote)
855c87b03e5Sespie      tree type_or_decl;
856c87b03e5Sespie      int keep;
857c87b03e5Sespie      int memory_required;
858c87b03e5Sespie      int dont_promote ATTRIBUTE_UNUSED;
859c87b03e5Sespie {
860c87b03e5Sespie   tree type, decl;
861c87b03e5Sespie   enum machine_mode mode;
862c87b03e5Sespie #ifndef PROMOTE_FOR_CALL_ONLY
863c87b03e5Sespie   int unsignedp;
864c87b03e5Sespie #endif
865c87b03e5Sespie 
866c87b03e5Sespie   if (DECL_P (type_or_decl))
867c87b03e5Sespie     decl = type_or_decl, type = TREE_TYPE (decl);
868c87b03e5Sespie   else
869c87b03e5Sespie     decl = NULL, type = type_or_decl;
870c87b03e5Sespie 
871c87b03e5Sespie   mode = TYPE_MODE (type);
872c87b03e5Sespie #ifndef PROMOTE_FOR_CALL_ONLY
873c87b03e5Sespie   unsignedp = TREE_UNSIGNED (type);
874c87b03e5Sespie #endif
875c87b03e5Sespie 
876c87b03e5Sespie   if (mode == BLKmode || memory_required)
877c87b03e5Sespie     {
878c87b03e5Sespie       HOST_WIDE_INT size = int_size_in_bytes (type);
879c87b03e5Sespie       rtx tmp;
880c87b03e5Sespie 
881c87b03e5Sespie       /* Zero sized arrays are GNU C extension.  Set size to 1 to avoid
882c87b03e5Sespie 	 problems with allocating the stack space.  */
883c87b03e5Sespie       if (size == 0)
884c87b03e5Sespie 	size = 1;
885c87b03e5Sespie 
886c87b03e5Sespie       /* Unfortunately, we don't yet know how to allocate variable-sized
887c87b03e5Sespie 	 temporaries.  However, sometimes we have a fixed upper limit on
888c87b03e5Sespie 	 the size (which is stored in TYPE_ARRAY_MAX_SIZE) and can use that
889c87b03e5Sespie 	 instead.  This is the case for Chill variable-sized strings.  */
890c87b03e5Sespie       if (size == -1 && TREE_CODE (type) == ARRAY_TYPE
891c87b03e5Sespie 	  && TYPE_ARRAY_MAX_SIZE (type) != NULL_TREE
892c87b03e5Sespie 	  && host_integerp (TYPE_ARRAY_MAX_SIZE (type), 1))
893c87b03e5Sespie 	size = tree_low_cst (TYPE_ARRAY_MAX_SIZE (type), 1);
894c87b03e5Sespie 
895c87b03e5Sespie       /* The size of the temporary may be too large to fit into an integer.  */
896c87b03e5Sespie       /* ??? Not sure this should happen except for user silliness, so limit
897c87b03e5Sespie 	 this to things that aren't compiler-generated temporaries.  The
898c87b03e5Sespie 	 rest of the time we'll abort in assign_stack_temp_for_type.  */
899c87b03e5Sespie       if (decl && size == -1
900c87b03e5Sespie 	  && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST)
901c87b03e5Sespie 	{
902c87b03e5Sespie 	  error_with_decl (decl, "size of variable `%s' is too large");
903c87b03e5Sespie 	  size = 1;
904c87b03e5Sespie 	}
905c87b03e5Sespie 
906c87b03e5Sespie       tmp = assign_stack_temp_for_type (mode, size, keep, type);
907c87b03e5Sespie       return tmp;
908c87b03e5Sespie     }
909c87b03e5Sespie 
910c87b03e5Sespie #ifndef PROMOTE_FOR_CALL_ONLY
911c87b03e5Sespie   if (! dont_promote)
912c87b03e5Sespie     mode = promote_mode (type, mode, &unsignedp, 0);
913c87b03e5Sespie #endif
914c87b03e5Sespie 
915c87b03e5Sespie   return gen_reg_rtx (mode);
916c87b03e5Sespie }
917c87b03e5Sespie 
918c87b03e5Sespie /* Combine temporary stack slots which are adjacent on the stack.
919c87b03e5Sespie 
920c87b03e5Sespie    This allows for better use of already allocated stack space.  This is only
921c87b03e5Sespie    done for BLKmode slots because we can be sure that we won't have alignment
922c87b03e5Sespie    problems in this case.  */
923c87b03e5Sespie 
924c87b03e5Sespie void
combine_temp_slots()925c87b03e5Sespie combine_temp_slots ()
926c87b03e5Sespie {
927c87b03e5Sespie   struct temp_slot *p, *q;
928c87b03e5Sespie   struct temp_slot *prev_p, *prev_q;
929c87b03e5Sespie   int num_slots;
930c87b03e5Sespie 
931c87b03e5Sespie   /* We can't combine slots, because the information about which slot
932c87b03e5Sespie      is in which alias set will be lost.  */
933c87b03e5Sespie   if (flag_strict_aliasing)
934c87b03e5Sespie     return;
935c87b03e5Sespie 
936c87b03e5Sespie   /* If there are a lot of temp slots, don't do anything unless
937c87b03e5Sespie      high levels of optimization.  */
938c87b03e5Sespie   if (! flag_expensive_optimizations)
939c87b03e5Sespie     for (p = temp_slots, num_slots = 0; p; p = p->next, num_slots++)
940c87b03e5Sespie       if (num_slots > 100 || (num_slots > 10 && optimize == 0))
941c87b03e5Sespie 	return;
942c87b03e5Sespie 
943c87b03e5Sespie   for (p = temp_slots, prev_p = 0; p; p = prev_p ? prev_p->next : temp_slots)
944c87b03e5Sespie     {
945c87b03e5Sespie       int delete_p = 0;
946c87b03e5Sespie 
947c87b03e5Sespie       if (! p->in_use && GET_MODE (p->slot) == BLKmode)
948c87b03e5Sespie 	for (q = p->next, prev_q = p; q; q = prev_q->next)
949c87b03e5Sespie 	  {
950c87b03e5Sespie 	    int delete_q = 0;
951c87b03e5Sespie 	    if (! q->in_use && GET_MODE (q->slot) == BLKmode)
952c87b03e5Sespie 	      {
9535982dd4aSetoh 		if (p->base_offset + p->full_size == q->base_offset &&
9545982dd4aSetoh 		    p->boundary_mark == q->boundary_mark)
955c87b03e5Sespie 		  {
956c87b03e5Sespie 		    /* Q comes after P; combine Q into P.  */
957c87b03e5Sespie 		    p->size += q->size;
958c87b03e5Sespie 		    p->full_size += q->full_size;
959c87b03e5Sespie 		    delete_q = 1;
960c87b03e5Sespie 		  }
9615982dd4aSetoh 		else if (q->base_offset + q->full_size == p->base_offset &&
9625982dd4aSetoh 			 p->boundary_mark == q->boundary_mark)
963c87b03e5Sespie 		  {
964c87b03e5Sespie 		    /* P comes after Q; combine P into Q.  */
965c87b03e5Sespie 		    q->size += p->size;
966c87b03e5Sespie 		    q->full_size += p->full_size;
967c87b03e5Sespie 		    delete_p = 1;
968c87b03e5Sespie 		    break;
969c87b03e5Sespie 		  }
970c87b03e5Sespie 	      }
971c87b03e5Sespie 	    /* Either delete Q or advance past it.  */
972c87b03e5Sespie 	    if (delete_q)
973c87b03e5Sespie 	      prev_q->next = q->next;
974c87b03e5Sespie 	    else
975c87b03e5Sespie 	      prev_q = q;
976c87b03e5Sespie 	  }
977c87b03e5Sespie       /* Either delete P or advance past it.  */
978c87b03e5Sespie       if (delete_p)
979c87b03e5Sespie 	{
980c87b03e5Sespie 	  if (prev_p)
981c87b03e5Sespie 	    prev_p->next = p->next;
982c87b03e5Sespie 	  else
983c87b03e5Sespie 	    temp_slots = p->next;
984c87b03e5Sespie 	}
985c87b03e5Sespie       else
986c87b03e5Sespie 	prev_p = p;
987c87b03e5Sespie     }
988c87b03e5Sespie }
989c87b03e5Sespie 
990c87b03e5Sespie /* Find the temp slot corresponding to the object at address X.  */
991c87b03e5Sespie 
992c87b03e5Sespie static struct temp_slot *
find_temp_slot_from_address(x)993c87b03e5Sespie find_temp_slot_from_address (x)
994c87b03e5Sespie      rtx x;
995c87b03e5Sespie {
996c87b03e5Sespie   struct temp_slot *p;
997c87b03e5Sespie   rtx next;
998c87b03e5Sespie 
999c87b03e5Sespie   for (p = temp_slots; p; p = p->next)
1000c87b03e5Sespie     {
1001c87b03e5Sespie       if (! p->in_use)
1002c87b03e5Sespie 	continue;
1003c87b03e5Sespie 
1004c87b03e5Sespie       else if (XEXP (p->slot, 0) == x
1005c87b03e5Sespie 	       || p->address == x
1006c87b03e5Sespie 	       || (GET_CODE (x) == PLUS
1007c87b03e5Sespie 		   && XEXP (x, 0) == virtual_stack_vars_rtx
1008c87b03e5Sespie 		   && GET_CODE (XEXP (x, 1)) == CONST_INT
1009c87b03e5Sespie 		   && INTVAL (XEXP (x, 1)) >= p->base_offset
1010c87b03e5Sespie 		   && INTVAL (XEXP (x, 1)) < p->base_offset + p->full_size))
1011c87b03e5Sespie 	return p;
1012c87b03e5Sespie 
1013c87b03e5Sespie       else if (p->address != 0 && GET_CODE (p->address) == EXPR_LIST)
1014c87b03e5Sespie 	for (next = p->address; next; next = XEXP (next, 1))
1015c87b03e5Sespie 	  if (XEXP (next, 0) == x)
1016c87b03e5Sespie 	    return p;
1017c87b03e5Sespie     }
1018c87b03e5Sespie 
1019c87b03e5Sespie   /* If we have a sum involving a register, see if it points to a temp
1020c87b03e5Sespie      slot.  */
1021c87b03e5Sespie   if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG
1022c87b03e5Sespie       && (p = find_temp_slot_from_address (XEXP (x, 0))) != 0)
1023c87b03e5Sespie     return p;
1024c87b03e5Sespie   else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == REG
1025c87b03e5Sespie 	   && (p = find_temp_slot_from_address (XEXP (x, 1))) != 0)
1026c87b03e5Sespie     return p;
1027c87b03e5Sespie 
1028c87b03e5Sespie   return 0;
1029c87b03e5Sespie }
1030c87b03e5Sespie 
1031c87b03e5Sespie /* Indicate that NEW is an alternate way of referring to the temp slot
1032c87b03e5Sespie    that previously was known by OLD.  */
1033c87b03e5Sespie 
1034c87b03e5Sespie void
update_temp_slot_address(old,new)1035c87b03e5Sespie update_temp_slot_address (old, new)
1036c87b03e5Sespie      rtx old, new;
1037c87b03e5Sespie {
1038c87b03e5Sespie   struct temp_slot *p;
1039c87b03e5Sespie 
1040c87b03e5Sespie   if (rtx_equal_p (old, new))
1041c87b03e5Sespie     return;
1042c87b03e5Sespie 
1043c87b03e5Sespie   p = find_temp_slot_from_address (old);
1044c87b03e5Sespie 
1045c87b03e5Sespie   /* If we didn't find one, see if both OLD is a PLUS.  If so, and NEW
1046c87b03e5Sespie      is a register, see if one operand of the PLUS is a temporary
1047c87b03e5Sespie      location.  If so, NEW points into it.  Otherwise, if both OLD and
1048c87b03e5Sespie      NEW are a PLUS and if there is a register in common between them.
1049c87b03e5Sespie      If so, try a recursive call on those values.  */
1050c87b03e5Sespie   if (p == 0)
1051c87b03e5Sespie     {
1052c87b03e5Sespie       if (GET_CODE (old) != PLUS)
1053c87b03e5Sespie 	return;
1054c87b03e5Sespie 
1055c87b03e5Sespie       if (GET_CODE (new) == REG)
1056c87b03e5Sespie 	{
1057c87b03e5Sespie 	  update_temp_slot_address (XEXP (old, 0), new);
1058c87b03e5Sespie 	  update_temp_slot_address (XEXP (old, 1), new);
1059c87b03e5Sespie 	  return;
1060c87b03e5Sespie 	}
1061c87b03e5Sespie       else if (GET_CODE (new) != PLUS)
1062c87b03e5Sespie 	return;
1063c87b03e5Sespie 
1064c87b03e5Sespie       if (rtx_equal_p (XEXP (old, 0), XEXP (new, 0)))
1065c87b03e5Sespie 	update_temp_slot_address (XEXP (old, 1), XEXP (new, 1));
1066c87b03e5Sespie       else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 0)))
1067c87b03e5Sespie 	update_temp_slot_address (XEXP (old, 0), XEXP (new, 1));
1068c87b03e5Sespie       else if (rtx_equal_p (XEXP (old, 0), XEXP (new, 1)))
1069c87b03e5Sespie 	update_temp_slot_address (XEXP (old, 1), XEXP (new, 0));
1070c87b03e5Sespie       else if (rtx_equal_p (XEXP (old, 1), XEXP (new, 1)))
1071c87b03e5Sespie 	update_temp_slot_address (XEXP (old, 0), XEXP (new, 0));
1072c87b03e5Sespie 
1073c87b03e5Sespie       return;
1074c87b03e5Sespie     }
1075c87b03e5Sespie 
1076c87b03e5Sespie   /* Otherwise add an alias for the temp's address.  */
1077c87b03e5Sespie   else if (p->address == 0)
1078c87b03e5Sespie     p->address = new;
1079c87b03e5Sespie   else
1080c87b03e5Sespie     {
1081c87b03e5Sespie       if (GET_CODE (p->address) != EXPR_LIST)
1082c87b03e5Sespie 	p->address = gen_rtx_EXPR_LIST (VOIDmode, p->address, NULL_RTX);
1083c87b03e5Sespie 
1084c87b03e5Sespie       p->address = gen_rtx_EXPR_LIST (VOIDmode, new, p->address);
1085c87b03e5Sespie     }
1086c87b03e5Sespie }
1087c87b03e5Sespie 
1088c87b03e5Sespie /* If X could be a reference to a temporary slot, mark the fact that its
1089c87b03e5Sespie    address was taken.  */
1090c87b03e5Sespie 
1091c87b03e5Sespie void
mark_temp_addr_taken(x)1092c87b03e5Sespie mark_temp_addr_taken (x)
1093c87b03e5Sespie      rtx x;
1094c87b03e5Sespie {
1095c87b03e5Sespie   struct temp_slot *p;
1096c87b03e5Sespie 
1097c87b03e5Sespie   if (x == 0)
1098c87b03e5Sespie     return;
1099c87b03e5Sespie 
1100c87b03e5Sespie   /* If X is not in memory or is at a constant address, it cannot be in
1101c87b03e5Sespie      a temporary slot.  */
1102c87b03e5Sespie   if (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1103c87b03e5Sespie     return;
1104c87b03e5Sespie 
1105c87b03e5Sespie   p = find_temp_slot_from_address (XEXP (x, 0));
1106c87b03e5Sespie   if (p != 0)
1107c87b03e5Sespie     p->addr_taken = 1;
1108c87b03e5Sespie }
1109c87b03e5Sespie 
1110c87b03e5Sespie /* If X could be a reference to a temporary slot, mark that slot as
1111c87b03e5Sespie    belonging to the to one level higher than the current level.  If X
1112c87b03e5Sespie    matched one of our slots, just mark that one.  Otherwise, we can't
1113c87b03e5Sespie    easily predict which it is, so upgrade all of them.  Kept slots
1114c87b03e5Sespie    need not be touched.
1115c87b03e5Sespie 
1116c87b03e5Sespie    This is called when an ({...}) construct occurs and a statement
1117c87b03e5Sespie    returns a value in memory.  */
1118c87b03e5Sespie 
1119c87b03e5Sespie void
preserve_temp_slots(x)1120c87b03e5Sespie preserve_temp_slots (x)
1121c87b03e5Sespie      rtx x;
1122c87b03e5Sespie {
1123c87b03e5Sespie   struct temp_slot *p = 0;
1124c87b03e5Sespie 
1125c87b03e5Sespie   /* If there is no result, we still might have some objects whose address
1126c87b03e5Sespie      were taken, so we need to make sure they stay around.  */
1127c87b03e5Sespie   if (x == 0)
1128c87b03e5Sespie     {
1129c87b03e5Sespie       for (p = temp_slots; p; p = p->next)
1130c87b03e5Sespie 	if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1131c87b03e5Sespie 	  p->level--;
1132c87b03e5Sespie 
1133c87b03e5Sespie       return;
1134c87b03e5Sespie     }
1135c87b03e5Sespie 
1136c87b03e5Sespie   /* If X is a register that is being used as a pointer, see if we have
1137c87b03e5Sespie      a temporary slot we know it points to.  To be consistent with
1138c87b03e5Sespie      the code below, we really should preserve all non-kept slots
1139c87b03e5Sespie      if we can't find a match, but that seems to be much too costly.  */
1140c87b03e5Sespie   if (GET_CODE (x) == REG && REG_POINTER (x))
1141c87b03e5Sespie     p = find_temp_slot_from_address (x);
1142c87b03e5Sespie 
1143c87b03e5Sespie   /* If X is not in memory or is at a constant address, it cannot be in
1144c87b03e5Sespie      a temporary slot, but it can contain something whose address was
1145c87b03e5Sespie      taken.  */
1146c87b03e5Sespie   if (p == 0 && (GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0))))
1147c87b03e5Sespie     {
1148c87b03e5Sespie       for (p = temp_slots; p; p = p->next)
1149c87b03e5Sespie 	if (p->in_use && p->level == temp_slot_level && p->addr_taken)
1150c87b03e5Sespie 	  p->level--;
1151c87b03e5Sespie 
1152c87b03e5Sespie       return;
1153c87b03e5Sespie     }
1154c87b03e5Sespie 
1155c87b03e5Sespie   /* First see if we can find a match.  */
1156c87b03e5Sespie   if (p == 0)
1157c87b03e5Sespie     p = find_temp_slot_from_address (XEXP (x, 0));
1158c87b03e5Sespie 
1159c87b03e5Sespie   if (p != 0)
1160c87b03e5Sespie     {
1161c87b03e5Sespie       /* Move everything at our level whose address was taken to our new
1162c87b03e5Sespie 	 level in case we used its address.  */
1163c87b03e5Sespie       struct temp_slot *q;
1164c87b03e5Sespie 
1165c87b03e5Sespie       if (p->level == temp_slot_level)
1166c87b03e5Sespie 	{
1167c87b03e5Sespie 	  for (q = temp_slots; q; q = q->next)
1168c87b03e5Sespie 	    if (q != p && q->addr_taken && q->level == p->level)
1169c87b03e5Sespie 	      q->level--;
1170c87b03e5Sespie 
1171c87b03e5Sespie 	  p->level--;
1172c87b03e5Sespie 	  p->addr_taken = 0;
1173c87b03e5Sespie 	}
1174c87b03e5Sespie       return;
1175c87b03e5Sespie     }
1176c87b03e5Sespie 
1177c87b03e5Sespie   /* Otherwise, preserve all non-kept slots at this level.  */
1178c87b03e5Sespie   for (p = temp_slots; p; p = p->next)
1179c87b03e5Sespie     if (p->in_use && p->level == temp_slot_level && ! p->keep)
1180c87b03e5Sespie       p->level--;
1181c87b03e5Sespie }
1182c87b03e5Sespie 
1183c87b03e5Sespie /* X is the result of an RTL_EXPR.  If it is a temporary slot associated
1184c87b03e5Sespie    with that RTL_EXPR, promote it into a temporary slot at the present
1185c87b03e5Sespie    level so it will not be freed when we free slots made in the
1186c87b03e5Sespie    RTL_EXPR.  */
1187c87b03e5Sespie 
1188c87b03e5Sespie void
preserve_rtl_expr_result(x)1189c87b03e5Sespie preserve_rtl_expr_result (x)
1190c87b03e5Sespie      rtx x;
1191c87b03e5Sespie {
1192c87b03e5Sespie   struct temp_slot *p;
1193c87b03e5Sespie 
1194c87b03e5Sespie   /* If X is not in memory or is at a constant address, it cannot be in
1195c87b03e5Sespie      a temporary slot.  */
1196c87b03e5Sespie   if (x == 0 || GET_CODE (x) != MEM || CONSTANT_P (XEXP (x, 0)))
1197c87b03e5Sespie     return;
1198c87b03e5Sespie 
1199c87b03e5Sespie   /* If we can find a match, move it to our level unless it is already at
1200c87b03e5Sespie      an upper level.  */
1201c87b03e5Sespie   p = find_temp_slot_from_address (XEXP (x, 0));
1202c87b03e5Sespie   if (p != 0)
1203c87b03e5Sespie     {
1204c87b03e5Sespie       p->level = MIN (p->level, temp_slot_level);
1205c87b03e5Sespie       p->rtl_expr = 0;
1206c87b03e5Sespie     }
1207c87b03e5Sespie 
1208c87b03e5Sespie   return;
1209c87b03e5Sespie }
1210c87b03e5Sespie 
1211c87b03e5Sespie /* Free all temporaries used so far.  This is normally called at the end
1212c87b03e5Sespie    of generating code for a statement.  Don't free any temporaries
1213c87b03e5Sespie    currently in use for an RTL_EXPR that hasn't yet been emitted.
1214c87b03e5Sespie    We could eventually do better than this since it can be reused while
1215c87b03e5Sespie    generating the same RTL_EXPR, but this is complex and probably not
1216c87b03e5Sespie    worthwhile.  */
1217c87b03e5Sespie 
1218c87b03e5Sespie void
free_temp_slots()1219c87b03e5Sespie free_temp_slots ()
1220c87b03e5Sespie {
1221c87b03e5Sespie   struct temp_slot *p;
1222c87b03e5Sespie 
1223c87b03e5Sespie   for (p = temp_slots; p; p = p->next)
1224c87b03e5Sespie     if (p->in_use && p->level == temp_slot_level && ! p->keep
1225c87b03e5Sespie 	&& p->rtl_expr == 0)
1226c87b03e5Sespie       p->in_use = 0;
1227c87b03e5Sespie 
1228c87b03e5Sespie   combine_temp_slots ();
1229c87b03e5Sespie }
1230c87b03e5Sespie 
1231c87b03e5Sespie /* Free all temporary slots used in T, an RTL_EXPR node.  */
1232c87b03e5Sespie 
1233c87b03e5Sespie void
free_temps_for_rtl_expr(t)1234c87b03e5Sespie free_temps_for_rtl_expr (t)
1235c87b03e5Sespie      tree t;
1236c87b03e5Sespie {
1237c87b03e5Sespie   struct temp_slot *p;
1238c87b03e5Sespie 
1239c87b03e5Sespie   for (p = temp_slots; p; p = p->next)
1240c87b03e5Sespie     if (p->rtl_expr == t)
1241c87b03e5Sespie       {
1242c87b03e5Sespie 	/* If this slot is below the current TEMP_SLOT_LEVEL, then it
1243c87b03e5Sespie 	   needs to be preserved.  This can happen if a temporary in
1244c87b03e5Sespie 	   the RTL_EXPR was addressed; preserve_temp_slots will move
1245c87b03e5Sespie 	   the temporary into a higher level.  */
1246c87b03e5Sespie 	if (temp_slot_level <= p->level)
1247c87b03e5Sespie 	  p->in_use = 0;
1248c87b03e5Sespie 	else
1249c87b03e5Sespie 	  p->rtl_expr = NULL_TREE;
1250c87b03e5Sespie       }
1251c87b03e5Sespie 
1252c87b03e5Sespie   combine_temp_slots ();
1253c87b03e5Sespie }
1254c87b03e5Sespie 
1255c87b03e5Sespie /* Mark all temporaries ever allocated in this function as not suitable
1256c87b03e5Sespie    for reuse until the current level is exited.  */
1257c87b03e5Sespie 
1258c87b03e5Sespie void
mark_all_temps_used()1259c87b03e5Sespie mark_all_temps_used ()
1260c87b03e5Sespie {
1261c87b03e5Sespie   struct temp_slot *p;
1262c87b03e5Sespie 
1263c87b03e5Sespie   for (p = temp_slots; p; p = p->next)
1264c87b03e5Sespie     {
1265c87b03e5Sespie       p->in_use = p->keep = 1;
1266c87b03e5Sespie       p->level = MIN (p->level, temp_slot_level);
1267c87b03e5Sespie     }
1268c87b03e5Sespie }
1269c87b03e5Sespie 
1270c87b03e5Sespie /* Push deeper into the nesting level for stack temporaries.  */
1271c87b03e5Sespie 
1272c87b03e5Sespie void
push_temp_slots()1273c87b03e5Sespie push_temp_slots ()
1274c87b03e5Sespie {
1275c87b03e5Sespie   temp_slot_level++;
1276c87b03e5Sespie }
1277c87b03e5Sespie 
1278c87b03e5Sespie /* Likewise, but save the new level as the place to allocate variables
1279c87b03e5Sespie    for blocks.  */
1280c87b03e5Sespie 
1281c87b03e5Sespie #if 0
1282c87b03e5Sespie void
1283c87b03e5Sespie push_temp_slots_for_block ()
1284c87b03e5Sespie {
1285c87b03e5Sespie   push_temp_slots ();
1286c87b03e5Sespie 
1287c87b03e5Sespie   var_temp_slot_level = temp_slot_level;
1288c87b03e5Sespie }
1289c87b03e5Sespie 
1290c87b03e5Sespie /* Likewise, but save the new level as the place to allocate temporaries
1291c87b03e5Sespie    for TARGET_EXPRs.  */
1292c87b03e5Sespie 
1293c87b03e5Sespie void
1294c87b03e5Sespie push_temp_slots_for_target ()
1295c87b03e5Sespie {
1296c87b03e5Sespie   push_temp_slots ();
1297c87b03e5Sespie 
1298c87b03e5Sespie   target_temp_slot_level = temp_slot_level;
1299c87b03e5Sespie }
1300c87b03e5Sespie 
1301c87b03e5Sespie /* Set and get the value of target_temp_slot_level.  The only
1302c87b03e5Sespie    permitted use of these functions is to save and restore this value.  */
1303c87b03e5Sespie 
1304c87b03e5Sespie int
1305c87b03e5Sespie get_target_temp_slot_level ()
1306c87b03e5Sespie {
1307c87b03e5Sespie   return target_temp_slot_level;
1308c87b03e5Sespie }
1309c87b03e5Sespie 
1310c87b03e5Sespie void
1311c87b03e5Sespie set_target_temp_slot_level (level)
1312c87b03e5Sespie      int level;
1313c87b03e5Sespie {
1314c87b03e5Sespie   target_temp_slot_level = level;
1315c87b03e5Sespie }
1316c87b03e5Sespie #endif
1317c87b03e5Sespie 
1318c87b03e5Sespie /* Pop a temporary nesting level.  All slots in use in the current level
1319c87b03e5Sespie    are freed.  */
1320c87b03e5Sespie 
1321c87b03e5Sespie void
pop_temp_slots()1322c87b03e5Sespie pop_temp_slots ()
1323c87b03e5Sespie {
1324c87b03e5Sespie   struct temp_slot *p;
1325c87b03e5Sespie 
1326c87b03e5Sespie   for (p = temp_slots; p; p = p->next)
1327c87b03e5Sespie     if (p->in_use && p->level == temp_slot_level && p->rtl_expr == 0)
1328c87b03e5Sespie       p->in_use = 0;
1329c87b03e5Sespie 
1330c87b03e5Sespie   combine_temp_slots ();
1331c87b03e5Sespie 
1332c87b03e5Sespie   temp_slot_level--;
1333c87b03e5Sespie }
1334c87b03e5Sespie 
1335c87b03e5Sespie /* Initialize temporary slots.  */
1336c87b03e5Sespie 
1337c87b03e5Sespie void
init_temp_slots()1338c87b03e5Sespie init_temp_slots ()
1339c87b03e5Sespie {
1340c87b03e5Sespie   /* We have not allocated any temporaries yet.  */
1341c87b03e5Sespie   temp_slots = 0;
1342c87b03e5Sespie   temp_slot_level = 0;
1343c87b03e5Sespie   var_temp_slot_level = 0;
1344c87b03e5Sespie   target_temp_slot_level = 0;
1345c87b03e5Sespie }
1346c87b03e5Sespie 
1347c87b03e5Sespie /* Retroactively move an auto variable from a register to a stack
1348c87b03e5Sespie    slot.  This is done when an address-reference to the variable is
1349c87b03e5Sespie    seen.  If RESCAN is true, all previously emitted instructions are
1350c87b03e5Sespie    examined and modified to handle the fact that DECL is now
1351c87b03e5Sespie    addressable.  */
1352c87b03e5Sespie 
1353c87b03e5Sespie void
put_var_into_stack(decl,rescan)1354c87b03e5Sespie put_var_into_stack (decl, rescan)
1355c87b03e5Sespie      tree decl;
1356c87b03e5Sespie      int rescan;
1357c87b03e5Sespie {
1358c87b03e5Sespie   rtx reg;
1359c87b03e5Sespie   enum machine_mode promoted_mode, decl_mode;
1360c87b03e5Sespie   struct function *function = 0;
1361c87b03e5Sespie   tree context;
1362e97b50d0Smiod   int can_use_addressof_p;
1363e97b50d0Smiod   int volatile_p = TREE_CODE (decl) != SAVE_EXPR && TREE_THIS_VOLATILE (decl);
1364e97b50d0Smiod   int used_p = (TREE_USED (decl)
1365c87b03e5Sespie 	       || (TREE_CODE (decl) != SAVE_EXPR && DECL_INITIAL (decl) != 0));
1366c87b03e5Sespie 
1367c87b03e5Sespie   context = decl_function_context (decl);
1368c87b03e5Sespie 
1369c87b03e5Sespie   /* Get the current rtl used for this object and its original mode.  */
1370c87b03e5Sespie   reg = (TREE_CODE (decl) == SAVE_EXPR
1371c87b03e5Sespie 	 ? SAVE_EXPR_RTL (decl)
1372c87b03e5Sespie 	 : DECL_RTL_IF_SET (decl));
1373c87b03e5Sespie 
1374c87b03e5Sespie   /* No need to do anything if decl has no rtx yet
1375c87b03e5Sespie      since in that case caller is setting TREE_ADDRESSABLE
1376c87b03e5Sespie      and a stack slot will be assigned when the rtl is made.  */
1377c87b03e5Sespie   if (reg == 0)
1378c87b03e5Sespie     return;
1379c87b03e5Sespie 
1380c87b03e5Sespie   /* Get the declared mode for this object.  */
1381c87b03e5Sespie   decl_mode = (TREE_CODE (decl) == SAVE_EXPR ? TYPE_MODE (TREE_TYPE (decl))
1382c87b03e5Sespie 	       : DECL_MODE (decl));
1383c87b03e5Sespie   /* Get the mode it's actually stored in.  */
1384c87b03e5Sespie   promoted_mode = GET_MODE (reg);
1385c87b03e5Sespie 
1386c87b03e5Sespie   /* If this variable comes from an outer function, find that
1387c87b03e5Sespie      function's saved context.  Don't use find_function_data here,
1388c87b03e5Sespie      because it might not be in any active function.
1389c87b03e5Sespie      FIXME: Is that really supposed to happen?
1390c87b03e5Sespie      It does in ObjC at least.  */
1391c87b03e5Sespie   if (context != current_function_decl && context != inline_function_decl)
1392c87b03e5Sespie     for (function = outer_function_chain; function; function = function->outer)
1393c87b03e5Sespie       if (function->decl == context)
1394c87b03e5Sespie 	break;
1395c87b03e5Sespie 
1396c87b03e5Sespie   /* If this is a variable-sized object or a structure passed by invisible
1397c87b03e5Sespie      reference, with a pseudo to address it, put that pseudo into the stack
1398c87b03e5Sespie      if the var is non-local.  */
1399c87b03e5Sespie   if (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl)
1400c87b03e5Sespie       && GET_CODE (reg) == MEM
1401c87b03e5Sespie       && GET_CODE (XEXP (reg, 0)) == REG
1402c87b03e5Sespie       && REGNO (XEXP (reg, 0)) > LAST_VIRTUAL_REGISTER)
1403c87b03e5Sespie     {
1404c87b03e5Sespie       reg = XEXP (reg, 0);
1405c87b03e5Sespie       decl_mode = promoted_mode = GET_MODE (reg);
1406c87b03e5Sespie     }
1407c87b03e5Sespie 
1408c87b03e5Sespie   /* If this variable lives in the current function and we don't need to put it
1409c87b03e5Sespie      in the stack for the sake of setjmp or the non-locality, try to keep it in
1410c87b03e5Sespie      a register until we know we actually need the address.  */
1411e97b50d0Smiod   can_use_addressof_p
1412c87b03e5Sespie     = (function == 0
1413c87b03e5Sespie        && ! (TREE_CODE (decl) != SAVE_EXPR && DECL_NONLOCAL (decl))
1414c87b03e5Sespie        && optimize > 0
1415c87b03e5Sespie        /* FIXME make it work for promoted modes too */
1416c87b03e5Sespie        && decl_mode == promoted_mode
1417c87b03e5Sespie #ifdef NON_SAVING_SETJMP
1418c87b03e5Sespie        && ! (NON_SAVING_SETJMP && current_function_calls_setjmp)
1419c87b03e5Sespie #endif
1420c87b03e5Sespie        );
1421c87b03e5Sespie 
1422c87b03e5Sespie   /* If we can't use ADDRESSOF, make sure we see through one we already
1423c87b03e5Sespie      generated.  */
1424e97b50d0Smiod   if (! can_use_addressof_p
1425e97b50d0Smiod       && GET_CODE (reg) == MEM
1426c87b03e5Sespie       && GET_CODE (XEXP (reg, 0)) == ADDRESSOF)
1427c87b03e5Sespie     reg = XEXP (XEXP (reg, 0), 0);
1428c87b03e5Sespie 
1429c87b03e5Sespie   /* Now we should have a value that resides in one or more pseudo regs.  */
1430c87b03e5Sespie 
1431c87b03e5Sespie   if (GET_CODE (reg) == REG)
1432c87b03e5Sespie     {
1433e97b50d0Smiod       if (can_use_addressof_p)
1434c87b03e5Sespie 	gen_mem_addressof (reg, decl, rescan);
1435c87b03e5Sespie       else
1436e97b50d0Smiod 	put_reg_into_stack (function, reg, TREE_TYPE (decl), decl_mode,
1437e97b50d0Smiod 			    0, volatile_p, used_p, 0, 0);
1438c87b03e5Sespie     }
1439c87b03e5Sespie   else if (GET_CODE (reg) == CONCAT)
1440c87b03e5Sespie     {
1441c87b03e5Sespie       /* A CONCAT contains two pseudos; put them both in the stack.
1442c87b03e5Sespie 	 We do it so they end up consecutive.
1443c87b03e5Sespie 	 We fixup references to the parts only after we fixup references
1444c87b03e5Sespie 	 to the whole CONCAT, lest we do double fixups for the latter
1445c87b03e5Sespie 	 references.  */
1446c87b03e5Sespie       enum machine_mode part_mode = GET_MODE (XEXP (reg, 0));
1447c87b03e5Sespie       tree part_type = (*lang_hooks.types.type_for_mode) (part_mode, 0);
1448c87b03e5Sespie       rtx lopart = XEXP (reg, 0);
1449c87b03e5Sespie       rtx hipart = XEXP (reg, 1);
1450c87b03e5Sespie #ifdef FRAME_GROWS_DOWNWARD
1451c87b03e5Sespie       /* Since part 0 should have a lower address, do it second.  */
1452c87b03e5Sespie       put_reg_into_stack (function, hipart, part_type, part_mode,
1453e97b50d0Smiod 			  0, volatile_p, 0, 0, 0);
1454c87b03e5Sespie       put_reg_into_stack (function, lopart, part_type, part_mode,
1455e97b50d0Smiod 			  0, volatile_p, 0, 1, 0);
1456c87b03e5Sespie #else
1457c87b03e5Sespie       put_reg_into_stack (function, lopart, part_type, part_mode,
1458e97b50d0Smiod 			  0, volatile_p, 0, 0, 0);
1459c87b03e5Sespie       put_reg_into_stack (function, hipart, part_type, part_mode,
1460e97b50d0Smiod 			  0, volatile_p, 0, 1, 0);
1461c87b03e5Sespie #endif
1462c87b03e5Sespie 
1463c87b03e5Sespie       /* Change the CONCAT into a combined MEM for both parts.  */
1464c87b03e5Sespie       PUT_CODE (reg, MEM);
1465c87b03e5Sespie       MEM_ATTRS (reg) = 0;
1466c87b03e5Sespie 
1467c87b03e5Sespie       /* set_mem_attributes uses DECL_RTL to avoid re-generating of
1468c87b03e5Sespie          already computed alias sets.  Here we want to re-generate.  */
1469c87b03e5Sespie       if (DECL_P (decl))
1470c87b03e5Sespie 	SET_DECL_RTL (decl, NULL);
1471c87b03e5Sespie       set_mem_attributes (reg, decl, 1);
1472c87b03e5Sespie       if (DECL_P (decl))
1473c87b03e5Sespie 	SET_DECL_RTL (decl, reg);
1474c87b03e5Sespie 
1475c87b03e5Sespie       /* The two parts are in memory order already.
1476c87b03e5Sespie 	 Use the lower parts address as ours.  */
1477c87b03e5Sespie       XEXP (reg, 0) = XEXP (XEXP (reg, 0), 0);
1478c87b03e5Sespie       /* Prevent sharing of rtl that might lose.  */
1479c87b03e5Sespie       if (GET_CODE (XEXP (reg, 0)) == PLUS)
1480c87b03e5Sespie 	XEXP (reg, 0) = copy_rtx (XEXP (reg, 0));
1481e97b50d0Smiod       if (used_p && rescan)
1482c87b03e5Sespie 	{
1483c87b03e5Sespie 	  schedule_fixup_var_refs (function, reg, TREE_TYPE (decl),
1484c87b03e5Sespie 				   promoted_mode, 0);
1485c87b03e5Sespie 	  schedule_fixup_var_refs (function, lopart, part_type, part_mode, 0);
1486c87b03e5Sespie 	  schedule_fixup_var_refs (function, hipart, part_type, part_mode, 0);
1487c87b03e5Sespie 	}
1488c87b03e5Sespie     }
1489c87b03e5Sespie   else
1490c87b03e5Sespie     return;
1491c87b03e5Sespie }
1492c87b03e5Sespie 
1493c87b03e5Sespie /* Subroutine of put_var_into_stack.  This puts a single pseudo reg REG
1494c87b03e5Sespie    into the stack frame of FUNCTION (0 means the current function).
1495e97b50d0Smiod    TYPE is the user-level data type of the value hold in the register.
1496c87b03e5Sespie    DECL_MODE is the machine mode of the user-level data type.
1497e97b50d0Smiod    ORIGINAL_REGNO must be set if the real regno is not visible in REG.
1498e97b50d0Smiod    VOLATILE_P is true if this is for a "volatile" decl.
1499e97b50d0Smiod    USED_P is true if this reg might have already been used in an insn.
1500e97b50d0Smiod    CONSECUTIVE_P is true if the stack slot assigned to reg must be
1501e97b50d0Smiod    consecutive with the previous stack slot.  */
1502c87b03e5Sespie 
1503c87b03e5Sespie static void
put_reg_into_stack(function,reg,type,decl_mode,original_regno,volatile_p,used_p,consecutive_p,ht)1504e97b50d0Smiod put_reg_into_stack (function, reg, type, decl_mode, original_regno,
1505e97b50d0Smiod 		    volatile_p, used_p, consecutive_p, ht)
1506c87b03e5Sespie      struct function *function;
1507c87b03e5Sespie      rtx reg;
1508c87b03e5Sespie      tree type;
1509e97b50d0Smiod      enum machine_mode decl_mode;
1510c87b03e5Sespie      unsigned int original_regno;
1511e97b50d0Smiod      int volatile_p, used_p, consecutive_p;
1512c87b03e5Sespie      htab_t ht;
1513c87b03e5Sespie {
1514c87b03e5Sespie   struct function *func = function ? function : cfun;
1515e97b50d0Smiod   enum machine_mode mode = GET_MODE (reg);
1516c87b03e5Sespie   unsigned int regno = original_regno;
1517e97b50d0Smiod   rtx new = 0;
1518c87b03e5Sespie 
1519c87b03e5Sespie   if (regno == 0)
1520c87b03e5Sespie     regno = REGNO (reg);
1521c87b03e5Sespie 
1522c87b03e5Sespie   if (regno < func->x_max_parm_reg)
152306dc6460Sespie     {
152406dc6460Sespie       if (!func->x_parm_reg_stack_loc)
152506dc6460Sespie 	abort ();
1526c87b03e5Sespie       new = func->x_parm_reg_stack_loc[regno];
152706dc6460Sespie     }
1528c87b03e5Sespie 
1529c87b03e5Sespie   if (new == 0)
15305982dd4aSetoh     new = function ?
1531e97b50d0Smiod 	assign_stack_local_1 (decl_mode, GET_MODE_SIZE (decl_mode),
1532e97b50d0Smiod 			      consecutive_p ? -2 : 0, func):
1533e97b50d0Smiod 	assign_stack_local_for_pseudo_reg (decl_mode, GET_MODE_SIZE (decl_mode),
1534e97b50d0Smiod 					   consecutive_p ? -2 : 0);
1535c87b03e5Sespie 
1536c87b03e5Sespie   PUT_CODE (reg, MEM);
1537c87b03e5Sespie   PUT_MODE (reg, decl_mode);
1538c87b03e5Sespie   XEXP (reg, 0) = XEXP (new, 0);
1539c87b03e5Sespie   MEM_ATTRS (reg) = 0;
1540c87b03e5Sespie   /* `volatil' bit means one thing for MEMs, another entirely for REGs.  */
1541c87b03e5Sespie   MEM_VOLATILE_P (reg) = volatile_p;
1542c87b03e5Sespie 
1543c87b03e5Sespie   /* If this is a memory ref that contains aggregate components,
1544c87b03e5Sespie      mark it as such for cse and loop optimize.  If we are reusing a
1545c87b03e5Sespie      previously generated stack slot, then we need to copy the bit in
1546c87b03e5Sespie      case it was set for other reasons.  For instance, it is set for
1547c87b03e5Sespie      __builtin_va_alist.  */
1548c87b03e5Sespie   if (type)
1549c87b03e5Sespie     {
1550c87b03e5Sespie       MEM_SET_IN_STRUCT_P (reg,
1551c87b03e5Sespie 			   AGGREGATE_TYPE_P (type) || MEM_IN_STRUCT_P (new));
1552c87b03e5Sespie       set_mem_alias_set (reg, get_alias_set (type));
1553c87b03e5Sespie     }
1554c87b03e5Sespie 
1555c87b03e5Sespie   if (used_p)
1556e97b50d0Smiod     schedule_fixup_var_refs (function, reg, type, mode, ht);
1557c87b03e5Sespie }
1558c87b03e5Sespie 
1559c87b03e5Sespie /* Make sure that all refs to the variable, previously made
1560c87b03e5Sespie    when it was a register, are fixed up to be valid again.
1561c87b03e5Sespie    See function above for meaning of arguments.  */
1562c87b03e5Sespie 
1563c87b03e5Sespie static void
schedule_fixup_var_refs(function,reg,type,promoted_mode,ht)1564c87b03e5Sespie schedule_fixup_var_refs (function, reg, type, promoted_mode, ht)
1565c87b03e5Sespie      struct function *function;
1566c87b03e5Sespie      rtx reg;
1567c87b03e5Sespie      tree type;
1568c87b03e5Sespie      enum machine_mode promoted_mode;
1569c87b03e5Sespie      htab_t ht;
1570c87b03e5Sespie {
1571c87b03e5Sespie   int unsigned_p = type ? TREE_UNSIGNED (type) : 0;
1572c87b03e5Sespie 
1573c87b03e5Sespie   if (function != 0)
1574c87b03e5Sespie     {
1575c87b03e5Sespie       struct var_refs_queue *temp;
1576c87b03e5Sespie 
1577c87b03e5Sespie       temp
1578c87b03e5Sespie 	= (struct var_refs_queue *) ggc_alloc (sizeof (struct var_refs_queue));
1579c87b03e5Sespie       temp->modified = reg;
1580c87b03e5Sespie       temp->promoted_mode = promoted_mode;
1581c87b03e5Sespie       temp->unsignedp = unsigned_p;
1582c87b03e5Sespie       temp->next = function->fixup_var_refs_queue;
1583c87b03e5Sespie       function->fixup_var_refs_queue = temp;
1584c87b03e5Sespie     }
1585c87b03e5Sespie   else
1586c87b03e5Sespie     /* Variable is local; fix it up now.  */
1587c87b03e5Sespie     fixup_var_refs (reg, promoted_mode, unsigned_p, reg, ht);
1588c87b03e5Sespie }
1589c87b03e5Sespie 
1590c87b03e5Sespie static void
fixup_var_refs(var,promoted_mode,unsignedp,may_share,ht)1591c87b03e5Sespie fixup_var_refs (var, promoted_mode, unsignedp, may_share, ht)
1592c87b03e5Sespie      rtx var;
1593c87b03e5Sespie      enum machine_mode promoted_mode;
1594c87b03e5Sespie      int unsignedp;
1595c87b03e5Sespie      htab_t ht;
1596c87b03e5Sespie      rtx may_share;
1597c87b03e5Sespie {
1598c87b03e5Sespie   tree pending;
1599c87b03e5Sespie   rtx first_insn = get_insns ();
1600c87b03e5Sespie   struct sequence_stack *stack = seq_stack;
1601c87b03e5Sespie   tree rtl_exps = rtl_expr_chain;
1602c87b03e5Sespie 
1603c87b03e5Sespie   /* If there's a hash table, it must record all uses of VAR.  */
1604c87b03e5Sespie   if (ht)
1605c87b03e5Sespie     {
1606c87b03e5Sespie       if (stack != 0)
1607c87b03e5Sespie 	abort ();
1608c87b03e5Sespie       fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp,
1609c87b03e5Sespie 				      may_share);
1610c87b03e5Sespie       return;
1611c87b03e5Sespie     }
1612c87b03e5Sespie 
1613c87b03e5Sespie   fixup_var_refs_insns (first_insn, var, promoted_mode, unsignedp,
1614c87b03e5Sespie 			stack == 0, may_share);
1615c87b03e5Sespie 
1616c87b03e5Sespie   /* Scan all pending sequences too.  */
1617c87b03e5Sespie   for (; stack; stack = stack->next)
1618c87b03e5Sespie     {
1619c87b03e5Sespie       push_to_full_sequence (stack->first, stack->last);
1620c87b03e5Sespie       fixup_var_refs_insns (stack->first, var, promoted_mode, unsignedp,
1621c87b03e5Sespie 			    stack->next != 0, may_share);
1622c87b03e5Sespie       /* Update remembered end of sequence
1623c87b03e5Sespie 	 in case we added an insn at the end.  */
1624c87b03e5Sespie       stack->last = get_last_insn ();
1625c87b03e5Sespie       end_sequence ();
1626c87b03e5Sespie     }
1627c87b03e5Sespie 
1628c87b03e5Sespie   /* Scan all waiting RTL_EXPRs too.  */
1629c87b03e5Sespie   for (pending = rtl_exps; pending; pending = TREE_CHAIN (pending))
1630c87b03e5Sespie     {
1631c87b03e5Sespie       rtx seq = RTL_EXPR_SEQUENCE (TREE_VALUE (pending));
1632c87b03e5Sespie       if (seq != const0_rtx && seq != 0)
1633c87b03e5Sespie 	{
1634c87b03e5Sespie 	  push_to_sequence (seq);
1635c87b03e5Sespie 	  fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1636c87b03e5Sespie 				may_share);
1637c87b03e5Sespie 	  end_sequence ();
1638c87b03e5Sespie 	}
1639c87b03e5Sespie     }
1640c87b03e5Sespie }
1641c87b03e5Sespie 
1642c87b03e5Sespie /* REPLACEMENTS is a pointer to a list of the struct fixup_replacement and X is
1643c87b03e5Sespie    some part of an insn.  Return a struct fixup_replacement whose OLD
1644c87b03e5Sespie    value is equal to X.  Allocate a new structure if no such entry exists.  */
1645c87b03e5Sespie 
1646c87b03e5Sespie static struct fixup_replacement *
find_fixup_replacement(replacements,x)1647c87b03e5Sespie find_fixup_replacement (replacements, x)
1648c87b03e5Sespie      struct fixup_replacement **replacements;
1649c87b03e5Sespie      rtx x;
1650c87b03e5Sespie {
1651c87b03e5Sespie   struct fixup_replacement *p;
1652c87b03e5Sespie 
1653c87b03e5Sespie   /* See if we have already replaced this.  */
1654c87b03e5Sespie   for (p = *replacements; p != 0 && ! rtx_equal_p (p->old, x); p = p->next)
1655c87b03e5Sespie     ;
1656c87b03e5Sespie 
1657c87b03e5Sespie   if (p == 0)
1658c87b03e5Sespie     {
1659c87b03e5Sespie       p = (struct fixup_replacement *) xmalloc (sizeof (struct fixup_replacement));
1660c87b03e5Sespie       p->old = x;
1661c87b03e5Sespie       p->new = 0;
1662c87b03e5Sespie       p->next = *replacements;
1663c87b03e5Sespie       *replacements = p;
1664c87b03e5Sespie     }
1665c87b03e5Sespie 
1666c87b03e5Sespie   return p;
1667c87b03e5Sespie }
1668c87b03e5Sespie 
1669c87b03e5Sespie /* Scan the insn-chain starting with INSN for refs to VAR and fix them
1670c87b03e5Sespie    up.  TOPLEVEL is nonzero if this chain is the main chain of insns
1671c87b03e5Sespie    for the current function.  MAY_SHARE is either a MEM that is not
1672c87b03e5Sespie    to be unshared or a list of them.  */
1673c87b03e5Sespie 
1674c87b03e5Sespie static void
fixup_var_refs_insns(insn,var,promoted_mode,unsignedp,toplevel,may_share)1675c87b03e5Sespie fixup_var_refs_insns (insn, var, promoted_mode, unsignedp, toplevel, may_share)
1676c87b03e5Sespie      rtx insn;
1677c87b03e5Sespie      rtx var;
1678c87b03e5Sespie      enum machine_mode promoted_mode;
1679c87b03e5Sespie      int unsignedp;
1680c87b03e5Sespie      int toplevel;
1681c87b03e5Sespie      rtx may_share;
1682c87b03e5Sespie {
1683c87b03e5Sespie   while (insn)
1684c87b03e5Sespie     {
1685c87b03e5Sespie       /* fixup_var_refs_insn might modify insn, so save its next
1686c87b03e5Sespie          pointer now.  */
1687c87b03e5Sespie       rtx next = NEXT_INSN (insn);
1688c87b03e5Sespie 
1689c87b03e5Sespie       /* CALL_PLACEHOLDERs are special; we have to switch into each of
1690c87b03e5Sespie 	 the three sequences they (potentially) contain, and process
1691c87b03e5Sespie 	 them recursively.  The CALL_INSN itself is not interesting.  */
1692c87b03e5Sespie 
1693c87b03e5Sespie       if (GET_CODE (insn) == CALL_INSN
1694c87b03e5Sespie 	  && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
1695c87b03e5Sespie 	{
1696c87b03e5Sespie 	  int i;
1697c87b03e5Sespie 
1698c87b03e5Sespie 	  /* Look at the Normal call, sibling call and tail recursion
1699c87b03e5Sespie 	     sequences attached to the CALL_PLACEHOLDER.  */
1700c87b03e5Sespie 	  for (i = 0; i < 3; i++)
1701c87b03e5Sespie 	    {
1702c87b03e5Sespie 	      rtx seq = XEXP (PATTERN (insn), i);
1703c87b03e5Sespie 	      if (seq)
1704c87b03e5Sespie 		{
1705c87b03e5Sespie 		  push_to_sequence (seq);
1706c87b03e5Sespie 		  fixup_var_refs_insns (seq, var, promoted_mode, unsignedp, 0,
1707c87b03e5Sespie 					may_share);
1708c87b03e5Sespie 		  XEXP (PATTERN (insn), i) = get_insns ();
1709c87b03e5Sespie 		  end_sequence ();
1710c87b03e5Sespie 		}
1711c87b03e5Sespie 	    }
1712c87b03e5Sespie 	}
1713c87b03e5Sespie 
1714c87b03e5Sespie       else if (INSN_P (insn))
1715c87b03e5Sespie 	fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel,
1716c87b03e5Sespie 			     may_share);
1717c87b03e5Sespie 
1718c87b03e5Sespie       insn = next;
1719c87b03e5Sespie     }
1720c87b03e5Sespie }
1721c87b03e5Sespie 
1722c87b03e5Sespie /* Look up the insns which reference VAR in HT and fix them up.  Other
1723c87b03e5Sespie    arguments are the same as fixup_var_refs_insns.
1724c87b03e5Sespie 
1725c87b03e5Sespie    N.B. No need for special processing of CALL_PLACEHOLDERs here,
1726c87b03e5Sespie    because the hash table will point straight to the interesting insn
1727c87b03e5Sespie    (inside the CALL_PLACEHOLDER).  */
1728c87b03e5Sespie 
1729c87b03e5Sespie static void
fixup_var_refs_insns_with_hash(ht,var,promoted_mode,unsignedp,may_share)1730c87b03e5Sespie fixup_var_refs_insns_with_hash (ht, var, promoted_mode, unsignedp, may_share)
1731c87b03e5Sespie      htab_t ht;
1732c87b03e5Sespie      rtx var;
1733c87b03e5Sespie      enum machine_mode promoted_mode;
1734c87b03e5Sespie      int unsignedp;
1735c87b03e5Sespie      rtx may_share;
1736c87b03e5Sespie {
1737c87b03e5Sespie   struct insns_for_mem_entry tmp;
1738c87b03e5Sespie   struct insns_for_mem_entry *ime;
1739c87b03e5Sespie   rtx insn_list;
1740c87b03e5Sespie 
1741c87b03e5Sespie   tmp.key = var;
1742c87b03e5Sespie   ime = (struct insns_for_mem_entry *) htab_find (ht, &tmp);
1743c87b03e5Sespie   for (insn_list = ime->insns; insn_list != 0; insn_list = XEXP (insn_list, 1))
1744e97b50d0Smiod     if (INSN_P (XEXP (insn_list, 0)) && !INSN_DELETED_P (XEXP (insn_list, 0)))
1745c87b03e5Sespie       fixup_var_refs_insn (XEXP (insn_list, 0), var, promoted_mode,
1746c87b03e5Sespie 			   unsignedp, 1, may_share);
1747c87b03e5Sespie }
1748c87b03e5Sespie 
1749c87b03e5Sespie 
1750c87b03e5Sespie /* Per-insn processing by fixup_var_refs_insns(_with_hash).  INSN is
1751c87b03e5Sespie    the insn under examination, VAR is the variable to fix up
1752c87b03e5Sespie    references to, PROMOTED_MODE and UNSIGNEDP describe VAR, and
1753c87b03e5Sespie    TOPLEVEL is nonzero if this is the main insn chain for this
1754c87b03e5Sespie    function.  */
1755c87b03e5Sespie 
1756c87b03e5Sespie static void
fixup_var_refs_insn(insn,var,promoted_mode,unsignedp,toplevel,no_share)1757c87b03e5Sespie fixup_var_refs_insn (insn, var, promoted_mode, unsignedp, toplevel, no_share)
1758c87b03e5Sespie      rtx insn;
1759c87b03e5Sespie      rtx var;
1760c87b03e5Sespie      enum machine_mode promoted_mode;
1761c87b03e5Sespie      int unsignedp;
1762c87b03e5Sespie      int toplevel;
1763c87b03e5Sespie      rtx no_share;
1764c87b03e5Sespie {
1765c87b03e5Sespie   rtx call_dest = 0;
1766c87b03e5Sespie   rtx set, prev, prev_set;
1767c87b03e5Sespie   rtx note;
1768c87b03e5Sespie 
1769c87b03e5Sespie   /* Remember the notes in case we delete the insn.  */
1770c87b03e5Sespie   note = REG_NOTES (insn);
1771c87b03e5Sespie 
1772c87b03e5Sespie   /* If this is a CLOBBER of VAR, delete it.
1773c87b03e5Sespie 
1774c87b03e5Sespie      If it has a REG_LIBCALL note, delete the REG_LIBCALL
1775c87b03e5Sespie      and REG_RETVAL notes too.  */
1776c87b03e5Sespie   if (GET_CODE (PATTERN (insn)) == CLOBBER
1777c87b03e5Sespie       && (XEXP (PATTERN (insn), 0) == var
1778c87b03e5Sespie 	  || (GET_CODE (XEXP (PATTERN (insn), 0)) == CONCAT
1779c87b03e5Sespie 	      && (XEXP (XEXP (PATTERN (insn), 0), 0) == var
1780c87b03e5Sespie 		  || XEXP (XEXP (PATTERN (insn), 0), 1) == var))))
1781c87b03e5Sespie     {
1782c87b03e5Sespie       if ((note = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0)
1783c87b03e5Sespie 	/* The REG_LIBCALL note will go away since we are going to
1784c87b03e5Sespie 	   turn INSN into a NOTE, so just delete the
1785c87b03e5Sespie 	   corresponding REG_RETVAL note.  */
1786c87b03e5Sespie 	remove_note (XEXP (note, 0),
1787c87b03e5Sespie 		     find_reg_note (XEXP (note, 0), REG_RETVAL,
1788c87b03e5Sespie 				    NULL_RTX));
1789c87b03e5Sespie 
1790c87b03e5Sespie       delete_insn (insn);
1791c87b03e5Sespie     }
1792c87b03e5Sespie 
1793c87b03e5Sespie   /* The insn to load VAR from a home in the arglist
1794c87b03e5Sespie      is now a no-op.  When we see it, just delete it.
1795c87b03e5Sespie      Similarly if this is storing VAR from a register from which
1796c87b03e5Sespie      it was loaded in the previous insn.  This will occur
1797c87b03e5Sespie      when an ADDRESSOF was made for an arglist slot.  */
1798c87b03e5Sespie   else if (toplevel
1799c87b03e5Sespie 	   && (set = single_set (insn)) != 0
1800c87b03e5Sespie 	   && SET_DEST (set) == var
1801c87b03e5Sespie 	   /* If this represents the result of an insn group,
1802c87b03e5Sespie 	      don't delete the insn.  */
1803c87b03e5Sespie 	   && find_reg_note (insn, REG_RETVAL, NULL_RTX) == 0
1804c87b03e5Sespie 	   && (rtx_equal_p (SET_SRC (set), var)
1805c87b03e5Sespie 	       || (GET_CODE (SET_SRC (set)) == REG
1806c87b03e5Sespie 		   && (prev = prev_nonnote_insn (insn)) != 0
1807c87b03e5Sespie 		   && (prev_set = single_set (prev)) != 0
1808c87b03e5Sespie 		   && SET_DEST (prev_set) == SET_SRC (set)
1809c87b03e5Sespie 		   && rtx_equal_p (SET_SRC (prev_set), var))))
1810c87b03e5Sespie     {
1811c87b03e5Sespie       delete_insn (insn);
1812c87b03e5Sespie     }
1813c87b03e5Sespie   else
1814c87b03e5Sespie     {
1815c87b03e5Sespie       struct fixup_replacement *replacements = 0;
1816c87b03e5Sespie       rtx next_insn = NEXT_INSN (insn);
1817c87b03e5Sespie 
1818c87b03e5Sespie       if (SMALL_REGISTER_CLASSES)
1819c87b03e5Sespie 	{
1820c87b03e5Sespie 	  /* If the insn that copies the results of a CALL_INSN
1821c87b03e5Sespie 	     into a pseudo now references VAR, we have to use an
1822c87b03e5Sespie 	     intermediate pseudo since we want the life of the
1823c87b03e5Sespie 	     return value register to be only a single insn.
1824c87b03e5Sespie 
1825c87b03e5Sespie 	     If we don't use an intermediate pseudo, such things as
1826c87b03e5Sespie 	     address computations to make the address of VAR valid
1827c87b03e5Sespie 	     if it is not can be placed between the CALL_INSN and INSN.
1828c87b03e5Sespie 
1829c87b03e5Sespie 	     To make sure this doesn't happen, we record the destination
1830c87b03e5Sespie 	     of the CALL_INSN and see if the next insn uses both that
1831c87b03e5Sespie 	     and VAR.  */
1832c87b03e5Sespie 
1833c87b03e5Sespie 	  if (call_dest != 0 && GET_CODE (insn) == INSN
1834c87b03e5Sespie 	      && reg_mentioned_p (var, PATTERN (insn))
1835c87b03e5Sespie 	      && reg_mentioned_p (call_dest, PATTERN (insn)))
1836c87b03e5Sespie 	    {
1837c87b03e5Sespie 	      rtx temp = gen_reg_rtx (GET_MODE (call_dest));
1838c87b03e5Sespie 
1839c87b03e5Sespie 	      emit_insn_before (gen_move_insn (temp, call_dest), insn);
1840c87b03e5Sespie 
1841c87b03e5Sespie 	      PATTERN (insn) = replace_rtx (PATTERN (insn),
1842c87b03e5Sespie 					    call_dest, temp);
1843c87b03e5Sespie 	    }
1844c87b03e5Sespie 
1845c87b03e5Sespie 	  if (GET_CODE (insn) == CALL_INSN
1846c87b03e5Sespie 	      && GET_CODE (PATTERN (insn)) == SET)
1847c87b03e5Sespie 	    call_dest = SET_DEST (PATTERN (insn));
1848c87b03e5Sespie 	  else if (GET_CODE (insn) == CALL_INSN
1849c87b03e5Sespie 		   && GET_CODE (PATTERN (insn)) == PARALLEL
1850c87b03e5Sespie 		   && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
1851c87b03e5Sespie 	    call_dest = SET_DEST (XVECEXP (PATTERN (insn), 0, 0));
1852c87b03e5Sespie 	  else
1853c87b03e5Sespie 	    call_dest = 0;
1854c87b03e5Sespie 	}
1855c87b03e5Sespie 
1856c87b03e5Sespie       /* See if we have to do anything to INSN now that VAR is in
1857c87b03e5Sespie 	 memory.  If it needs to be loaded into a pseudo, use a single
1858c87b03e5Sespie 	 pseudo for the entire insn in case there is a MATCH_DUP
1859c87b03e5Sespie 	 between two operands.  We pass a pointer to the head of
1860c87b03e5Sespie 	 a list of struct fixup_replacements.  If fixup_var_refs_1
1861c87b03e5Sespie 	 needs to allocate pseudos or replacement MEMs (for SUBREGs),
1862c87b03e5Sespie 	 it will record them in this list.
1863c87b03e5Sespie 
1864c87b03e5Sespie 	 If it allocated a pseudo for any replacement, we copy into
1865c87b03e5Sespie 	 it here.  */
1866c87b03e5Sespie 
1867c87b03e5Sespie       fixup_var_refs_1 (var, promoted_mode, &PATTERN (insn), insn,
1868c87b03e5Sespie 			&replacements, no_share);
1869c87b03e5Sespie 
1870c87b03e5Sespie       /* If this is last_parm_insn, and any instructions were output
1871c87b03e5Sespie 	 after it to fix it up, then we must set last_parm_insn to
1872c87b03e5Sespie 	 the last such instruction emitted.  */
1873c87b03e5Sespie       if (insn == last_parm_insn)
1874c87b03e5Sespie 	last_parm_insn = PREV_INSN (next_insn);
1875c87b03e5Sespie 
1876c87b03e5Sespie       while (replacements)
1877c87b03e5Sespie 	{
1878c87b03e5Sespie 	  struct fixup_replacement *next;
1879c87b03e5Sespie 
1880c87b03e5Sespie 	  if (GET_CODE (replacements->new) == REG)
1881c87b03e5Sespie 	    {
1882c87b03e5Sespie 	      rtx insert_before;
1883c87b03e5Sespie 	      rtx seq;
1884c87b03e5Sespie 
1885c87b03e5Sespie 	      /* OLD might be a (subreg (mem)).  */
1886c87b03e5Sespie 	      if (GET_CODE (replacements->old) == SUBREG)
1887c87b03e5Sespie 		replacements->old
1888c87b03e5Sespie 		  = fixup_memory_subreg (replacements->old, insn,
1889c87b03e5Sespie 					 promoted_mode, 0);
1890c87b03e5Sespie 	      else
1891c87b03e5Sespie 		replacements->old
1892c87b03e5Sespie 		  = fixup_stack_1 (replacements->old, insn);
1893c87b03e5Sespie 
1894c87b03e5Sespie 	      insert_before = insn;
1895c87b03e5Sespie 
1896c87b03e5Sespie 	      /* If we are changing the mode, do a conversion.
1897c87b03e5Sespie 		 This might be wasteful, but combine.c will
1898c87b03e5Sespie 		 eliminate much of the waste.  */
1899c87b03e5Sespie 
1900c87b03e5Sespie 	      if (GET_MODE (replacements->new)
1901c87b03e5Sespie 		  != GET_MODE (replacements->old))
1902c87b03e5Sespie 		{
1903c87b03e5Sespie 		  start_sequence ();
1904c87b03e5Sespie 		  convert_move (replacements->new,
1905c87b03e5Sespie 				replacements->old, unsignedp);
1906c87b03e5Sespie 		  seq = get_insns ();
1907c87b03e5Sespie 		  end_sequence ();
1908c87b03e5Sespie 		}
1909c87b03e5Sespie 	      else
1910c87b03e5Sespie 		seq = gen_move_insn (replacements->new,
1911c87b03e5Sespie 				     replacements->old);
1912c87b03e5Sespie 
1913c87b03e5Sespie 	      emit_insn_before (seq, insert_before);
1914c87b03e5Sespie 	    }
1915c87b03e5Sespie 
1916c87b03e5Sespie 	  next = replacements->next;
1917c87b03e5Sespie 	  free (replacements);
1918c87b03e5Sespie 	  replacements = next;
1919c87b03e5Sespie 	}
1920c87b03e5Sespie     }
1921c87b03e5Sespie 
1922c87b03e5Sespie   /* Also fix up any invalid exprs in the REG_NOTES of this insn.
1923c87b03e5Sespie      But don't touch other insns referred to by reg-notes;
1924c87b03e5Sespie      we will get them elsewhere.  */
1925c87b03e5Sespie   while (note)
1926c87b03e5Sespie     {
1927c87b03e5Sespie       if (GET_CODE (note) != INSN_LIST)
1928c87b03e5Sespie 	XEXP (note, 0)
1929c87b03e5Sespie 	  = walk_fixup_memory_subreg (XEXP (note, 0), insn,
1930c87b03e5Sespie 				      promoted_mode, 1);
1931c87b03e5Sespie       note = XEXP (note, 1);
1932c87b03e5Sespie     }
1933c87b03e5Sespie }
1934c87b03e5Sespie 
1935c87b03e5Sespie /* VAR is a MEM that used to be a pseudo register with mode PROMOTED_MODE.
1936c87b03e5Sespie    See if the rtx expression at *LOC in INSN needs to be changed.
1937c87b03e5Sespie 
1938c87b03e5Sespie    REPLACEMENTS is a pointer to a list head that starts out zero, but may
1939c87b03e5Sespie    contain a list of original rtx's and replacements. If we find that we need
1940c87b03e5Sespie    to modify this insn by replacing a memory reference with a pseudo or by
1941c87b03e5Sespie    making a new MEM to implement a SUBREG, we consult that list to see if
1942c87b03e5Sespie    we have already chosen a replacement. If none has already been allocated,
1943c87b03e5Sespie    we allocate it and update the list.  fixup_var_refs_insn will copy VAR
1944c87b03e5Sespie    or the SUBREG, as appropriate, to the pseudo.  */
1945c87b03e5Sespie 
1946c87b03e5Sespie static void
fixup_var_refs_1(var,promoted_mode,loc,insn,replacements,no_share)1947c87b03e5Sespie fixup_var_refs_1 (var, promoted_mode, loc, insn, replacements, no_share)
1948c87b03e5Sespie      rtx var;
1949c87b03e5Sespie      enum machine_mode promoted_mode;
1950c87b03e5Sespie      rtx *loc;
1951c87b03e5Sespie      rtx insn;
1952c87b03e5Sespie      struct fixup_replacement **replacements;
1953c87b03e5Sespie      rtx no_share;
1954c87b03e5Sespie {
1955c87b03e5Sespie   int i;
1956c87b03e5Sespie   rtx x = *loc;
1957c87b03e5Sespie   RTX_CODE code = GET_CODE (x);
1958c87b03e5Sespie   const char *fmt;
1959c87b03e5Sespie   rtx tem, tem1;
1960c87b03e5Sespie   struct fixup_replacement *replacement;
1961c87b03e5Sespie 
1962c87b03e5Sespie   switch (code)
1963c87b03e5Sespie     {
1964c87b03e5Sespie     case ADDRESSOF:
1965c87b03e5Sespie       if (XEXP (x, 0) == var)
1966c87b03e5Sespie 	{
1967c87b03e5Sespie 	  /* Prevent sharing of rtl that might lose.  */
1968c87b03e5Sespie 	  rtx sub = copy_rtx (XEXP (var, 0));
1969c87b03e5Sespie 
1970c87b03e5Sespie 	  if (! validate_change (insn, loc, sub, 0))
1971c87b03e5Sespie 	    {
1972c87b03e5Sespie 	      rtx y = gen_reg_rtx (GET_MODE (sub));
1973c87b03e5Sespie 	      rtx seq, new_insn;
1974c87b03e5Sespie 
1975c87b03e5Sespie 	      /* We should be able to replace with a register or all is lost.
1976c87b03e5Sespie 		 Note that we can't use validate_change to verify this, since
1977c87b03e5Sespie 		 we're not caring for replacing all dups simultaneously.  */
1978c87b03e5Sespie 	      if (! validate_replace_rtx (*loc, y, insn))
1979c87b03e5Sespie 		abort ();
1980c87b03e5Sespie 
1981c87b03e5Sespie 	      /* Careful!  First try to recognize a direct move of the
1982c87b03e5Sespie 		 value, mimicking how things are done in gen_reload wrt
1983c87b03e5Sespie 		 PLUS.  Consider what happens when insn is a conditional
1984c87b03e5Sespie 		 move instruction and addsi3 clobbers flags.  */
1985c87b03e5Sespie 
1986c87b03e5Sespie 	      start_sequence ();
1987c87b03e5Sespie 	      new_insn = emit_insn (gen_rtx_SET (VOIDmode, y, sub));
1988c87b03e5Sespie 	      seq = get_insns ();
1989c87b03e5Sespie 	      end_sequence ();
1990c87b03e5Sespie 
1991c87b03e5Sespie 	      if (recog_memoized (new_insn) < 0)
1992c87b03e5Sespie 		{
1993c87b03e5Sespie 		  /* That failed.  Fall back on force_operand and hope.  */
1994c87b03e5Sespie 
1995c87b03e5Sespie 		  start_sequence ();
1996c87b03e5Sespie 		  sub = force_operand (sub, y);
1997c87b03e5Sespie 		  if (sub != y)
1998c87b03e5Sespie 		    emit_insn (gen_move_insn (y, sub));
1999c87b03e5Sespie 		  seq = get_insns ();
2000c87b03e5Sespie 		  end_sequence ();
2001c87b03e5Sespie 		}
2002c87b03e5Sespie 
2003c87b03e5Sespie #ifdef HAVE_cc0
2004c87b03e5Sespie 	      /* Don't separate setter from user.  */
2005c87b03e5Sespie 	      if (PREV_INSN (insn) && sets_cc0_p (PREV_INSN (insn)))
2006c87b03e5Sespie 		insn = PREV_INSN (insn);
2007c87b03e5Sespie #endif
2008c87b03e5Sespie 
2009c87b03e5Sespie 	      emit_insn_before (seq, insn);
2010c87b03e5Sespie 	    }
2011c87b03e5Sespie 	}
2012c87b03e5Sespie       return;
2013c87b03e5Sespie 
2014c87b03e5Sespie     case MEM:
2015c87b03e5Sespie       if (var == x)
2016c87b03e5Sespie 	{
2017c87b03e5Sespie 	  /* If we already have a replacement, use it.  Otherwise,
2018c87b03e5Sespie 	     try to fix up this address in case it is invalid.  */
2019c87b03e5Sespie 
2020c87b03e5Sespie 	  replacement = find_fixup_replacement (replacements, var);
2021c87b03e5Sespie 	  if (replacement->new)
2022c87b03e5Sespie 	    {
2023c87b03e5Sespie 	      *loc = replacement->new;
2024c87b03e5Sespie 	      return;
2025c87b03e5Sespie 	    }
2026c87b03e5Sespie 
2027c87b03e5Sespie 	  *loc = replacement->new = x = fixup_stack_1 (x, insn);
2028c87b03e5Sespie 
2029c87b03e5Sespie 	  /* Unless we are forcing memory to register or we changed the mode,
2030c87b03e5Sespie 	     we can leave things the way they are if the insn is valid.  */
2031c87b03e5Sespie 
2032c87b03e5Sespie 	  INSN_CODE (insn) = -1;
2033c87b03e5Sespie 	  if (! flag_force_mem && GET_MODE (x) == promoted_mode
2034c87b03e5Sespie 	      && recog_memoized (insn) >= 0)
2035c87b03e5Sespie 	    return;
2036c87b03e5Sespie 
2037c87b03e5Sespie 	  *loc = replacement->new = gen_reg_rtx (promoted_mode);
2038c87b03e5Sespie 	  return;
2039c87b03e5Sespie 	}
2040c87b03e5Sespie 
2041c87b03e5Sespie       /* If X contains VAR, we need to unshare it here so that we update
2042c87b03e5Sespie 	 each occurrence separately.  But all identical MEMs in one insn
2043c87b03e5Sespie 	 must be replaced with the same rtx because of the possibility of
2044c87b03e5Sespie 	 MATCH_DUPs.  */
2045c87b03e5Sespie 
2046c87b03e5Sespie       if (reg_mentioned_p (var, x))
2047c87b03e5Sespie 	{
2048c87b03e5Sespie 	  replacement = find_fixup_replacement (replacements, x);
2049c87b03e5Sespie 	  if (replacement->new == 0)
2050c87b03e5Sespie 	    replacement->new = copy_most_rtx (x, no_share);
2051c87b03e5Sespie 
2052c87b03e5Sespie 	  *loc = x = replacement->new;
2053c87b03e5Sespie 	  code = GET_CODE (x);
2054c87b03e5Sespie 	}
2055c87b03e5Sespie       break;
2056c87b03e5Sespie 
2057c87b03e5Sespie     case REG:
2058c87b03e5Sespie     case CC0:
2059c87b03e5Sespie     case PC:
2060c87b03e5Sespie     case CONST_INT:
2061c87b03e5Sespie     case CONST:
2062c87b03e5Sespie     case SYMBOL_REF:
2063c87b03e5Sespie     case LABEL_REF:
2064c87b03e5Sespie     case CONST_DOUBLE:
2065c87b03e5Sespie     case CONST_VECTOR:
2066c87b03e5Sespie       return;
2067c87b03e5Sespie 
2068c87b03e5Sespie     case SIGN_EXTRACT:
2069c87b03e5Sespie     case ZERO_EXTRACT:
2070c87b03e5Sespie       /* Note that in some cases those types of expressions are altered
2071c87b03e5Sespie 	 by optimize_bit_field, and do not survive to get here.  */
2072c87b03e5Sespie       if (XEXP (x, 0) == var
2073c87b03e5Sespie 	  || (GET_CODE (XEXP (x, 0)) == SUBREG
2074c87b03e5Sespie 	      && SUBREG_REG (XEXP (x, 0)) == var))
2075c87b03e5Sespie 	{
2076c87b03e5Sespie 	  /* Get TEM as a valid MEM in the mode presently in the insn.
2077c87b03e5Sespie 
2078c87b03e5Sespie 	     We don't worry about the possibility of MATCH_DUP here; it
2079c87b03e5Sespie 	     is highly unlikely and would be tricky to handle.  */
2080c87b03e5Sespie 
2081c87b03e5Sespie 	  tem = XEXP (x, 0);
2082c87b03e5Sespie 	  if (GET_CODE (tem) == SUBREG)
2083c87b03e5Sespie 	    {
2084c87b03e5Sespie 	      if (GET_MODE_BITSIZE (GET_MODE (tem))
2085c87b03e5Sespie 		  > GET_MODE_BITSIZE (GET_MODE (var)))
2086c87b03e5Sespie 		{
2087c87b03e5Sespie 		  replacement = find_fixup_replacement (replacements, var);
2088c87b03e5Sespie 		  if (replacement->new == 0)
2089c87b03e5Sespie 		    replacement->new = gen_reg_rtx (GET_MODE (var));
2090c87b03e5Sespie 		  SUBREG_REG (tem) = replacement->new;
2091c87b03e5Sespie 
2092c87b03e5Sespie 		  /* The following code works only if we have a MEM, so we
2093c87b03e5Sespie 		     need to handle the subreg here.  We directly substitute
2094c87b03e5Sespie 		     it assuming that a subreg must be OK here.  We already
2095c87b03e5Sespie 		     scheduled a replacement to copy the mem into the
2096c87b03e5Sespie 		     subreg.  */
2097c87b03e5Sespie 		  XEXP (x, 0) = tem;
2098c87b03e5Sespie 		  return;
2099c87b03e5Sespie 		}
2100c87b03e5Sespie 	      else
2101c87b03e5Sespie 		tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2102c87b03e5Sespie 	    }
2103c87b03e5Sespie 	  else
2104c87b03e5Sespie 	    tem = fixup_stack_1 (tem, insn);
2105c87b03e5Sespie 
2106c87b03e5Sespie 	  /* Unless we want to load from memory, get TEM into the proper mode
2107c87b03e5Sespie 	     for an extract from memory.  This can only be done if the
2108c87b03e5Sespie 	     extract is at a constant position and length.  */
2109c87b03e5Sespie 
2110c87b03e5Sespie 	  if (! flag_force_mem && GET_CODE (XEXP (x, 1)) == CONST_INT
2111c87b03e5Sespie 	      && GET_CODE (XEXP (x, 2)) == CONST_INT
2112c87b03e5Sespie 	      && ! mode_dependent_address_p (XEXP (tem, 0))
2113c87b03e5Sespie 	      && ! MEM_VOLATILE_P (tem))
2114c87b03e5Sespie 	    {
2115c87b03e5Sespie 	      enum machine_mode wanted_mode = VOIDmode;
2116c87b03e5Sespie 	      enum machine_mode is_mode = GET_MODE (tem);
2117c87b03e5Sespie 	      HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
2118c87b03e5Sespie 
2119c87b03e5Sespie 	      if (GET_CODE (x) == ZERO_EXTRACT)
2120c87b03e5Sespie 		{
2121c87b03e5Sespie 		  enum machine_mode new_mode
2122c87b03e5Sespie 		    = mode_for_extraction (EP_extzv, 1);
2123c87b03e5Sespie 		  if (new_mode != MAX_MACHINE_MODE)
2124c87b03e5Sespie 		    wanted_mode = new_mode;
2125c87b03e5Sespie 		}
2126c87b03e5Sespie 	      else if (GET_CODE (x) == SIGN_EXTRACT)
2127c87b03e5Sespie 		{
2128c87b03e5Sespie 		  enum machine_mode new_mode
2129c87b03e5Sespie 		    = mode_for_extraction (EP_extv, 1);
2130c87b03e5Sespie 		  if (new_mode != MAX_MACHINE_MODE)
2131c87b03e5Sespie 		    wanted_mode = new_mode;
2132c87b03e5Sespie 		}
2133c87b03e5Sespie 
2134c87b03e5Sespie 	      /* If we have a narrower mode, we can do something.  */
2135c87b03e5Sespie 	      if (wanted_mode != VOIDmode
2136c87b03e5Sespie 		  && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2137c87b03e5Sespie 		{
2138c87b03e5Sespie 		  HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2139c87b03e5Sespie 		  rtx old_pos = XEXP (x, 2);
2140c87b03e5Sespie 		  rtx newmem;
2141c87b03e5Sespie 
2142c87b03e5Sespie 		  /* If the bytes and bits are counted differently, we
2143c87b03e5Sespie 		     must adjust the offset.  */
2144c87b03e5Sespie 		  if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2145c87b03e5Sespie 		    offset = (GET_MODE_SIZE (is_mode)
2146c87b03e5Sespie 			      - GET_MODE_SIZE (wanted_mode) - offset);
2147c87b03e5Sespie 
2148c87b03e5Sespie 		  pos %= GET_MODE_BITSIZE (wanted_mode);
2149c87b03e5Sespie 
2150c87b03e5Sespie 		  newmem = adjust_address_nv (tem, wanted_mode, offset);
2151c87b03e5Sespie 
2152c87b03e5Sespie 		  /* Make the change and see if the insn remains valid.  */
2153c87b03e5Sespie 		  INSN_CODE (insn) = -1;
2154c87b03e5Sespie 		  XEXP (x, 0) = newmem;
2155c87b03e5Sespie 		  XEXP (x, 2) = GEN_INT (pos);
2156c87b03e5Sespie 
2157c87b03e5Sespie 		  if (recog_memoized (insn) >= 0)
2158c87b03e5Sespie 		    return;
2159c87b03e5Sespie 
2160c87b03e5Sespie 		  /* Otherwise, restore old position.  XEXP (x, 0) will be
2161c87b03e5Sespie 		     restored later.  */
2162c87b03e5Sespie 		  XEXP (x, 2) = old_pos;
2163c87b03e5Sespie 		}
2164c87b03e5Sespie 	    }
2165c87b03e5Sespie 
2166c87b03e5Sespie 	  /* If we get here, the bitfield extract insn can't accept a memory
2167c87b03e5Sespie 	     reference.  Copy the input into a register.  */
2168c87b03e5Sespie 
2169c87b03e5Sespie 	  tem1 = gen_reg_rtx (GET_MODE (tem));
2170c87b03e5Sespie 	  emit_insn_before (gen_move_insn (tem1, tem), insn);
2171c87b03e5Sespie 	  XEXP (x, 0) = tem1;
2172c87b03e5Sespie 	  return;
2173c87b03e5Sespie 	}
2174c87b03e5Sespie       break;
2175c87b03e5Sespie 
2176c87b03e5Sespie     case SUBREG:
2177c87b03e5Sespie       if (SUBREG_REG (x) == var)
2178c87b03e5Sespie 	{
2179c87b03e5Sespie 	  /* If this is a special SUBREG made because VAR was promoted
2180c87b03e5Sespie 	     from a wider mode, replace it with VAR and call ourself
2181c87b03e5Sespie 	     recursively, this time saying that the object previously
2182c87b03e5Sespie 	     had its current mode (by virtue of the SUBREG).  */
2183c87b03e5Sespie 
2184c87b03e5Sespie 	  if (SUBREG_PROMOTED_VAR_P (x))
2185c87b03e5Sespie 	    {
2186c87b03e5Sespie 	      *loc = var;
2187c87b03e5Sespie 	      fixup_var_refs_1 (var, GET_MODE (var), loc, insn, replacements,
2188c87b03e5Sespie 				no_share);
2189c87b03e5Sespie 	      return;
2190c87b03e5Sespie 	    }
2191c87b03e5Sespie 
2192c87b03e5Sespie 	  /* If this SUBREG makes VAR wider, it has become a paradoxical
2193c87b03e5Sespie 	     SUBREG with VAR in memory, but these aren't allowed at this
2194c87b03e5Sespie 	     stage of the compilation.  So load VAR into a pseudo and take
2195c87b03e5Sespie 	     a SUBREG of that pseudo.  */
2196c87b03e5Sespie 	  if (GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (var)))
2197c87b03e5Sespie 	    {
2198c87b03e5Sespie 	      replacement = find_fixup_replacement (replacements, var);
2199c87b03e5Sespie 	      if (replacement->new == 0)
2200c87b03e5Sespie 		replacement->new = gen_reg_rtx (promoted_mode);
2201c87b03e5Sespie 	      SUBREG_REG (x) = replacement->new;
2202c87b03e5Sespie 	      return;
2203c87b03e5Sespie 	    }
2204c87b03e5Sespie 
2205c87b03e5Sespie 	  /* See if we have already found a replacement for this SUBREG.
2206c87b03e5Sespie 	     If so, use it.  Otherwise, make a MEM and see if the insn
2207c87b03e5Sespie 	     is recognized.  If not, or if we should force MEM into a register,
2208c87b03e5Sespie 	     make a pseudo for this SUBREG.  */
2209c87b03e5Sespie 	  replacement = find_fixup_replacement (replacements, x);
2210c87b03e5Sespie 	  if (replacement->new)
2211c87b03e5Sespie 	    {
221206dc6460Sespie 	      enum machine_mode mode = GET_MODE (x);
2213c87b03e5Sespie 	      *loc = replacement->new;
221406dc6460Sespie 
221506dc6460Sespie 	      /* Careful!  We may have just replaced a SUBREG by a MEM, which
221606dc6460Sespie 		 means that the insn may have become invalid again.  We can't
221706dc6460Sespie 		 in this case make a new replacement since we already have one
221806dc6460Sespie 		 and we must deal with MATCH_DUPs.  */
221906dc6460Sespie 	      if (GET_CODE (replacement->new) == MEM)
222006dc6460Sespie 		{
222106dc6460Sespie 		  INSN_CODE (insn) = -1;
222206dc6460Sespie 		  if (recog_memoized (insn) >= 0)
222306dc6460Sespie 		    return;
222406dc6460Sespie 
222506dc6460Sespie 		  fixup_var_refs_1 (replacement->new, mode, &PATTERN (insn),
222606dc6460Sespie 				    insn, replacements, no_share);
222706dc6460Sespie 		}
222806dc6460Sespie 
2229c87b03e5Sespie 	      return;
2230c87b03e5Sespie 	    }
2231c87b03e5Sespie 
2232c87b03e5Sespie 	  replacement->new = *loc = fixup_memory_subreg (x, insn,
2233c87b03e5Sespie 							 promoted_mode, 0);
2234c87b03e5Sespie 
2235c87b03e5Sespie 	  INSN_CODE (insn) = -1;
2236c87b03e5Sespie 	  if (! flag_force_mem && recog_memoized (insn) >= 0)
2237c87b03e5Sespie 	    return;
2238c87b03e5Sespie 
2239c87b03e5Sespie 	  *loc = replacement->new = gen_reg_rtx (GET_MODE (x));
2240c87b03e5Sespie 	  return;
2241c87b03e5Sespie 	}
2242c87b03e5Sespie       break;
2243c87b03e5Sespie 
2244c87b03e5Sespie     case SET:
2245c87b03e5Sespie       /* First do special simplification of bit-field references.  */
2246c87b03e5Sespie       if (GET_CODE (SET_DEST (x)) == SIGN_EXTRACT
2247c87b03e5Sespie 	  || GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2248c87b03e5Sespie 	optimize_bit_field (x, insn, 0);
2249c87b03e5Sespie       if (GET_CODE (SET_SRC (x)) == SIGN_EXTRACT
2250c87b03e5Sespie 	  || GET_CODE (SET_SRC (x)) == ZERO_EXTRACT)
2251c87b03e5Sespie 	optimize_bit_field (x, insn, 0);
2252c87b03e5Sespie 
2253c87b03e5Sespie       /* For a paradoxical SUBREG inside a ZERO_EXTRACT, load the object
2254c87b03e5Sespie 	 into a register and then store it back out.  */
2255c87b03e5Sespie       if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2256c87b03e5Sespie 	  && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG
2257c87b03e5Sespie 	  && SUBREG_REG (XEXP (SET_DEST (x), 0)) == var
2258c87b03e5Sespie 	  && (GET_MODE_SIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2259c87b03e5Sespie 	      > GET_MODE_SIZE (GET_MODE (var))))
2260c87b03e5Sespie 	{
2261c87b03e5Sespie 	  replacement = find_fixup_replacement (replacements, var);
2262c87b03e5Sespie 	  if (replacement->new == 0)
2263c87b03e5Sespie 	    replacement->new = gen_reg_rtx (GET_MODE (var));
2264c87b03e5Sespie 
2265c87b03e5Sespie 	  SUBREG_REG (XEXP (SET_DEST (x), 0)) = replacement->new;
2266c87b03e5Sespie 	  emit_insn_after (gen_move_insn (var, replacement->new), insn);
2267c87b03e5Sespie 	}
2268c87b03e5Sespie 
2269c87b03e5Sespie       /* If SET_DEST is now a paradoxical SUBREG, put the result of this
2270c87b03e5Sespie 	 insn into a pseudo and store the low part of the pseudo into VAR.  */
2271c87b03e5Sespie       if (GET_CODE (SET_DEST (x)) == SUBREG
2272c87b03e5Sespie 	  && SUBREG_REG (SET_DEST (x)) == var
2273c87b03e5Sespie 	  && (GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2274c87b03e5Sespie 	      > GET_MODE_SIZE (GET_MODE (var))))
2275c87b03e5Sespie 	{
2276c87b03e5Sespie 	  SET_DEST (x) = tem = gen_reg_rtx (GET_MODE (SET_DEST (x)));
2277c87b03e5Sespie 	  emit_insn_after (gen_move_insn (var, gen_lowpart (GET_MODE (var),
2278c87b03e5Sespie 							    tem)),
2279c87b03e5Sespie 			   insn);
2280c87b03e5Sespie 	  break;
2281c87b03e5Sespie 	}
2282c87b03e5Sespie 
2283c87b03e5Sespie       {
2284c87b03e5Sespie 	rtx dest = SET_DEST (x);
2285c87b03e5Sespie 	rtx src = SET_SRC (x);
2286c87b03e5Sespie 	rtx outerdest = dest;
2287c87b03e5Sespie 
2288c87b03e5Sespie 	while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
2289c87b03e5Sespie 	       || GET_CODE (dest) == SIGN_EXTRACT
2290c87b03e5Sespie 	       || GET_CODE (dest) == ZERO_EXTRACT)
2291c87b03e5Sespie 	  dest = XEXP (dest, 0);
2292c87b03e5Sespie 
2293c87b03e5Sespie 	if (GET_CODE (src) == SUBREG)
2294c87b03e5Sespie 	  src = SUBREG_REG (src);
2295c87b03e5Sespie 
2296c87b03e5Sespie 	/* If VAR does not appear at the top level of the SET
2297c87b03e5Sespie 	   just scan the lower levels of the tree.  */
2298c87b03e5Sespie 
2299c87b03e5Sespie 	if (src != var && dest != var)
2300c87b03e5Sespie 	  break;
2301c87b03e5Sespie 
2302c87b03e5Sespie 	/* We will need to rerecognize this insn.  */
2303c87b03e5Sespie 	INSN_CODE (insn) = -1;
2304c87b03e5Sespie 
2305c87b03e5Sespie 	if (GET_CODE (outerdest) == ZERO_EXTRACT && dest == var
2306c87b03e5Sespie 	    && mode_for_extraction (EP_insv, -1) != MAX_MACHINE_MODE)
2307c87b03e5Sespie 	  {
2308c87b03e5Sespie 	    /* Since this case will return, ensure we fixup all the
2309c87b03e5Sespie 	       operands here.  */
2310c87b03e5Sespie 	    fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 1),
2311c87b03e5Sespie 			      insn, replacements, no_share);
2312c87b03e5Sespie 	    fixup_var_refs_1 (var, promoted_mode, &XEXP (outerdest, 2),
2313c87b03e5Sespie 			      insn, replacements, no_share);
2314c87b03e5Sespie 	    fixup_var_refs_1 (var, promoted_mode, &SET_SRC (x),
2315c87b03e5Sespie 			      insn, replacements, no_share);
2316c87b03e5Sespie 
2317c87b03e5Sespie 	    tem = XEXP (outerdest, 0);
2318c87b03e5Sespie 
2319c87b03e5Sespie 	    /* Clean up (SUBREG:SI (MEM:mode ...) 0)
2320c87b03e5Sespie 	       that may appear inside a ZERO_EXTRACT.
2321c87b03e5Sespie 	       This was legitimate when the MEM was a REG.  */
2322c87b03e5Sespie 	    if (GET_CODE (tem) == SUBREG
2323c87b03e5Sespie 		&& SUBREG_REG (tem) == var)
2324c87b03e5Sespie 	      tem = fixup_memory_subreg (tem, insn, promoted_mode, 0);
2325c87b03e5Sespie 	    else
2326c87b03e5Sespie 	      tem = fixup_stack_1 (tem, insn);
2327c87b03e5Sespie 
2328c87b03e5Sespie 	    if (GET_CODE (XEXP (outerdest, 1)) == CONST_INT
2329c87b03e5Sespie 		&& GET_CODE (XEXP (outerdest, 2)) == CONST_INT
2330c87b03e5Sespie 		&& ! mode_dependent_address_p (XEXP (tem, 0))
2331c87b03e5Sespie 		&& ! MEM_VOLATILE_P (tem))
2332c87b03e5Sespie 	      {
2333c87b03e5Sespie 		enum machine_mode wanted_mode;
2334c87b03e5Sespie 		enum machine_mode is_mode = GET_MODE (tem);
2335c87b03e5Sespie 		HOST_WIDE_INT pos = INTVAL (XEXP (outerdest, 2));
2336c87b03e5Sespie 
2337c87b03e5Sespie 		wanted_mode = mode_for_extraction (EP_insv, 0);
2338c87b03e5Sespie 
2339c87b03e5Sespie 		/* If we have a narrower mode, we can do something.  */
2340c87b03e5Sespie 		if (GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
2341c87b03e5Sespie 		  {
2342c87b03e5Sespie 		    HOST_WIDE_INT offset = pos / BITS_PER_UNIT;
2343c87b03e5Sespie 		    rtx old_pos = XEXP (outerdest, 2);
2344c87b03e5Sespie 		    rtx newmem;
2345c87b03e5Sespie 
2346c87b03e5Sespie 		    if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
2347c87b03e5Sespie 		      offset = (GET_MODE_SIZE (is_mode)
2348c87b03e5Sespie 				- GET_MODE_SIZE (wanted_mode) - offset);
2349c87b03e5Sespie 
2350c87b03e5Sespie 		    pos %= GET_MODE_BITSIZE (wanted_mode);
2351c87b03e5Sespie 
2352c87b03e5Sespie 		    newmem = adjust_address_nv (tem, wanted_mode, offset);
2353c87b03e5Sespie 
2354c87b03e5Sespie 		    /* Make the change and see if the insn remains valid.  */
2355c87b03e5Sespie 		    INSN_CODE (insn) = -1;
2356c87b03e5Sespie 		    XEXP (outerdest, 0) = newmem;
2357c87b03e5Sespie 		    XEXP (outerdest, 2) = GEN_INT (pos);
2358c87b03e5Sespie 
2359c87b03e5Sespie 		    if (recog_memoized (insn) >= 0)
2360c87b03e5Sespie 		      return;
2361c87b03e5Sespie 
2362c87b03e5Sespie 		    /* Otherwise, restore old position.  XEXP (x, 0) will be
2363c87b03e5Sespie 		       restored later.  */
2364c87b03e5Sespie 		    XEXP (outerdest, 2) = old_pos;
2365c87b03e5Sespie 		  }
2366c87b03e5Sespie 	      }
2367c87b03e5Sespie 
2368c87b03e5Sespie 	    /* If we get here, the bit-field store doesn't allow memory
2369c87b03e5Sespie 	       or isn't located at a constant position.  Load the value into
2370c87b03e5Sespie 	       a register, do the store, and put it back into memory.  */
2371c87b03e5Sespie 
2372c87b03e5Sespie 	    tem1 = gen_reg_rtx (GET_MODE (tem));
2373c87b03e5Sespie 	    emit_insn_before (gen_move_insn (tem1, tem), insn);
2374c87b03e5Sespie 	    emit_insn_after (gen_move_insn (tem, tem1), insn);
2375c87b03e5Sespie 	    XEXP (outerdest, 0) = tem1;
2376c87b03e5Sespie 	    return;
2377c87b03e5Sespie 	  }
2378c87b03e5Sespie 
2379c87b03e5Sespie 	/* STRICT_LOW_PART is a no-op on memory references
2380c87b03e5Sespie 	   and it can cause combinations to be unrecognizable,
2381c87b03e5Sespie 	   so eliminate it.  */
2382c87b03e5Sespie 
2383c87b03e5Sespie 	if (dest == var && GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2384c87b03e5Sespie 	  SET_DEST (x) = XEXP (SET_DEST (x), 0);
2385c87b03e5Sespie 
2386c87b03e5Sespie 	/* A valid insn to copy VAR into or out of a register
2387c87b03e5Sespie 	   must be left alone, to avoid an infinite loop here.
2388c87b03e5Sespie 	   If the reference to VAR is by a subreg, fix that up,
2389c87b03e5Sespie 	   since SUBREG is not valid for a memref.
2390c87b03e5Sespie 	   Also fix up the address of the stack slot.
2391c87b03e5Sespie 
2392c87b03e5Sespie 	   Note that we must not try to recognize the insn until
2393c87b03e5Sespie 	   after we know that we have valid addresses and no
2394c87b03e5Sespie 	   (subreg (mem ...) ...) constructs, since these interfere
2395c87b03e5Sespie 	   with determining the validity of the insn.  */
2396c87b03e5Sespie 
2397c87b03e5Sespie 	if ((SET_SRC (x) == var
2398c87b03e5Sespie 	     || (GET_CODE (SET_SRC (x)) == SUBREG
2399c87b03e5Sespie 		 && SUBREG_REG (SET_SRC (x)) == var))
2400c87b03e5Sespie 	    && (GET_CODE (SET_DEST (x)) == REG
2401c87b03e5Sespie 		|| (GET_CODE (SET_DEST (x)) == SUBREG
2402c87b03e5Sespie 		    && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG))
2403c87b03e5Sespie 	    && GET_MODE (var) == promoted_mode
2404c87b03e5Sespie 	    && x == single_set (insn))
2405c87b03e5Sespie 	  {
2406c87b03e5Sespie 	    rtx pat, last;
2407c87b03e5Sespie 
2408c87b03e5Sespie 	    if (GET_CODE (SET_SRC (x)) == SUBREG
2409c87b03e5Sespie 		&& (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
2410c87b03e5Sespie 		    > GET_MODE_SIZE (GET_MODE (var))))
2411c87b03e5Sespie 	      {
2412c87b03e5Sespie 		/* This (subreg VAR) is now a paradoxical subreg.  We need
2413c87b03e5Sespie 		   to replace VAR instead of the subreg.  */
2414c87b03e5Sespie 		replacement = find_fixup_replacement (replacements, var);
2415c87b03e5Sespie 		if (replacement->new == NULL_RTX)
2416c87b03e5Sespie 		  replacement->new = gen_reg_rtx (GET_MODE (var));
2417c87b03e5Sespie 		SUBREG_REG (SET_SRC (x)) = replacement->new;
2418c87b03e5Sespie 	      }
2419c87b03e5Sespie 	    else
2420c87b03e5Sespie 	      {
2421c87b03e5Sespie 		replacement = find_fixup_replacement (replacements, SET_SRC (x));
2422c87b03e5Sespie 		if (replacement->new)
2423c87b03e5Sespie 		  SET_SRC (x) = replacement->new;
2424c87b03e5Sespie 		else if (GET_CODE (SET_SRC (x)) == SUBREG)
2425c87b03e5Sespie 		  SET_SRC (x) = replacement->new
2426c87b03e5Sespie 		    = fixup_memory_subreg (SET_SRC (x), insn, promoted_mode,
2427c87b03e5Sespie 					   0);
2428c87b03e5Sespie 		else
2429c87b03e5Sespie 		  SET_SRC (x) = replacement->new
2430c87b03e5Sespie 		    = fixup_stack_1 (SET_SRC (x), insn);
2431c87b03e5Sespie 	      }
2432c87b03e5Sespie 
2433c87b03e5Sespie 	    if (recog_memoized (insn) >= 0)
2434c87b03e5Sespie 	      return;
2435c87b03e5Sespie 
2436c87b03e5Sespie 	    /* INSN is not valid, but we know that we want to
2437c87b03e5Sespie 	       copy SET_SRC (x) to SET_DEST (x) in some way.  So
2438c87b03e5Sespie 	       we generate the move and see whether it requires more
2439c87b03e5Sespie 	       than one insn.  If it does, we emit those insns and
2440c87b03e5Sespie 	       delete INSN.  Otherwise, we can just replace the pattern
2441c87b03e5Sespie 	       of INSN; we have already verified above that INSN has
2442c87b03e5Sespie 	       no other function that to do X.  */
2443c87b03e5Sespie 
2444c87b03e5Sespie 	    pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2445c87b03e5Sespie 	    if (NEXT_INSN (pat) != NULL_RTX)
2446c87b03e5Sespie 	      {
2447c87b03e5Sespie 		last = emit_insn_before (pat, insn);
2448c87b03e5Sespie 
2449c87b03e5Sespie 		/* INSN might have REG_RETVAL or other important notes, so
2450c87b03e5Sespie 		   we need to store the pattern of the last insn in the
2451c87b03e5Sespie 		   sequence into INSN similarly to the normal case.  LAST
2452c87b03e5Sespie 		   should not have REG_NOTES, but we allow them if INSN has
2453c87b03e5Sespie 		   no REG_NOTES.  */
2454c87b03e5Sespie 		if (REG_NOTES (last) && REG_NOTES (insn))
2455c87b03e5Sespie 		  abort ();
2456c87b03e5Sespie 		if (REG_NOTES (last))
2457c87b03e5Sespie 		  REG_NOTES (insn) = REG_NOTES (last);
2458c87b03e5Sespie 		PATTERN (insn) = PATTERN (last);
2459c87b03e5Sespie 
2460c87b03e5Sespie 		delete_insn (last);
2461c87b03e5Sespie 	      }
2462c87b03e5Sespie 	    else
2463c87b03e5Sespie 	      PATTERN (insn) = PATTERN (pat);
2464c87b03e5Sespie 
2465c87b03e5Sespie 	    return;
2466c87b03e5Sespie 	  }
2467c87b03e5Sespie 
2468c87b03e5Sespie 	if ((SET_DEST (x) == var
2469c87b03e5Sespie 	     || (GET_CODE (SET_DEST (x)) == SUBREG
2470c87b03e5Sespie 		 && SUBREG_REG (SET_DEST (x)) == var))
2471c87b03e5Sespie 	    && (GET_CODE (SET_SRC (x)) == REG
2472c87b03e5Sespie 		|| (GET_CODE (SET_SRC (x)) == SUBREG
2473c87b03e5Sespie 		    && GET_CODE (SUBREG_REG (SET_SRC (x))) == REG))
2474c87b03e5Sespie 	    && GET_MODE (var) == promoted_mode
2475c87b03e5Sespie 	    && x == single_set (insn))
2476c87b03e5Sespie 	  {
2477c87b03e5Sespie 	    rtx pat, last;
2478c87b03e5Sespie 
2479c87b03e5Sespie 	    if (GET_CODE (SET_DEST (x)) == SUBREG)
2480c87b03e5Sespie 	      SET_DEST (x) = fixup_memory_subreg (SET_DEST (x), insn,
2481c87b03e5Sespie 						  promoted_mode, 0);
2482c87b03e5Sespie 	    else
2483c87b03e5Sespie 	      SET_DEST (x) = fixup_stack_1 (SET_DEST (x), insn);
2484c87b03e5Sespie 
2485c87b03e5Sespie 	    if (recog_memoized (insn) >= 0)
2486c87b03e5Sespie 	      return;
2487c87b03e5Sespie 
2488c87b03e5Sespie 	    pat = gen_move_insn (SET_DEST (x), SET_SRC (x));
2489c87b03e5Sespie 	    if (NEXT_INSN (pat) != NULL_RTX)
2490c87b03e5Sespie 	      {
2491c87b03e5Sespie 		last = emit_insn_before (pat, insn);
2492c87b03e5Sespie 
2493c87b03e5Sespie 		/* INSN might have REG_RETVAL or other important notes, so
2494c87b03e5Sespie 		   we need to store the pattern of the last insn in the
2495c87b03e5Sespie 		   sequence into INSN similarly to the normal case.  LAST
2496c87b03e5Sespie 		   should not have REG_NOTES, but we allow them if INSN has
2497c87b03e5Sespie 		   no REG_NOTES.  */
2498c87b03e5Sespie 		if (REG_NOTES (last) && REG_NOTES (insn))
2499c87b03e5Sespie 		  abort ();
2500c87b03e5Sespie 		if (REG_NOTES (last))
2501c87b03e5Sespie 		  REG_NOTES (insn) = REG_NOTES (last);
2502c87b03e5Sespie 		PATTERN (insn) = PATTERN (last);
2503c87b03e5Sespie 
2504c87b03e5Sespie 		delete_insn (last);
2505c87b03e5Sespie 	      }
2506c87b03e5Sespie 	    else
2507c87b03e5Sespie 	      PATTERN (insn) = PATTERN (pat);
2508c87b03e5Sespie 
2509c87b03e5Sespie 	    return;
2510c87b03e5Sespie 	  }
2511c87b03e5Sespie 
2512c87b03e5Sespie 	/* Otherwise, storing into VAR must be handled specially
2513c87b03e5Sespie 	   by storing into a temporary and copying that into VAR
2514c87b03e5Sespie 	   with a new insn after this one.  Note that this case
2515c87b03e5Sespie 	   will be used when storing into a promoted scalar since
2516c87b03e5Sespie 	   the insn will now have different modes on the input
2517c87b03e5Sespie 	   and output and hence will be invalid (except for the case
2518c87b03e5Sespie 	   of setting it to a constant, which does not need any
2519c87b03e5Sespie 	   change if it is valid).  We generate extra code in that case,
2520c87b03e5Sespie 	   but combine.c will eliminate it.  */
2521c87b03e5Sespie 
2522c87b03e5Sespie 	if (dest == var)
2523c87b03e5Sespie 	  {
2524c87b03e5Sespie 	    rtx temp;
2525c87b03e5Sespie 	    rtx fixeddest = SET_DEST (x);
2526c87b03e5Sespie 	    enum machine_mode temp_mode;
2527c87b03e5Sespie 
2528c87b03e5Sespie 	    /* STRICT_LOW_PART can be discarded, around a MEM.  */
2529c87b03e5Sespie 	    if (GET_CODE (fixeddest) == STRICT_LOW_PART)
2530c87b03e5Sespie 	      fixeddest = XEXP (fixeddest, 0);
2531c87b03e5Sespie 	    /* Convert (SUBREG (MEM)) to a MEM in a changed mode.  */
2532c87b03e5Sespie 	    if (GET_CODE (fixeddest) == SUBREG)
2533c87b03e5Sespie 	      {
2534c87b03e5Sespie 		fixeddest = fixup_memory_subreg (fixeddest, insn,
2535c87b03e5Sespie 						 promoted_mode, 0);
2536c87b03e5Sespie 		temp_mode = GET_MODE (fixeddest);
2537c87b03e5Sespie 	      }
2538c87b03e5Sespie 	    else
2539c87b03e5Sespie 	      {
2540c87b03e5Sespie 		fixeddest = fixup_stack_1 (fixeddest, insn);
2541c87b03e5Sespie 		temp_mode = promoted_mode;
2542c87b03e5Sespie 	      }
2543c87b03e5Sespie 
2544c87b03e5Sespie 	    temp = gen_reg_rtx (temp_mode);
2545c87b03e5Sespie 
2546c87b03e5Sespie 	    emit_insn_after (gen_move_insn (fixeddest,
2547c87b03e5Sespie 					    gen_lowpart (GET_MODE (fixeddest),
2548c87b03e5Sespie 							 temp)),
2549c87b03e5Sespie 			     insn);
2550c87b03e5Sespie 
2551c87b03e5Sespie 	    SET_DEST (x) = temp;
2552c87b03e5Sespie 	  }
2553c87b03e5Sespie       }
2554c87b03e5Sespie 
2555c87b03e5Sespie     default:
2556c87b03e5Sespie       break;
2557c87b03e5Sespie     }
2558c87b03e5Sespie 
2559c87b03e5Sespie   /* Nothing special about this RTX; fix its operands.  */
2560c87b03e5Sespie 
2561c87b03e5Sespie   fmt = GET_RTX_FORMAT (code);
2562c87b03e5Sespie   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2563c87b03e5Sespie     {
2564c87b03e5Sespie       if (fmt[i] == 'e')
2565c87b03e5Sespie 	fixup_var_refs_1 (var, promoted_mode, &XEXP (x, i), insn, replacements,
2566c87b03e5Sespie 			  no_share);
2567c87b03e5Sespie       else if (fmt[i] == 'E')
2568c87b03e5Sespie 	{
2569c87b03e5Sespie 	  int j;
2570c87b03e5Sespie 	  for (j = 0; j < XVECLEN (x, i); j++)
2571c87b03e5Sespie 	    fixup_var_refs_1 (var, promoted_mode, &XVECEXP (x, i, j),
2572c87b03e5Sespie 			      insn, replacements, no_share);
2573c87b03e5Sespie 	}
2574c87b03e5Sespie     }
2575c87b03e5Sespie }
2576c87b03e5Sespie 
2577c87b03e5Sespie /* Previously, X had the form (SUBREG:m1 (REG:PROMOTED_MODE ...)).
2578c87b03e5Sespie    The REG  was placed on the stack, so X now has the form (SUBREG:m1
2579c87b03e5Sespie    (MEM:m2 ...)).
2580c87b03e5Sespie 
2581c87b03e5Sespie    Return an rtx (MEM:m1 newaddr) which is equivalent.  If any insns
2582c87b03e5Sespie    must be emitted to compute NEWADDR, put them before INSN.
2583c87b03e5Sespie 
2584c87b03e5Sespie    UNCRITICAL nonzero means accept paradoxical subregs.
2585c87b03e5Sespie    This is used for subregs found inside REG_NOTES.  */
2586c87b03e5Sespie 
2587c87b03e5Sespie static rtx
fixup_memory_subreg(x,insn,promoted_mode,uncritical)2588c87b03e5Sespie fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2589c87b03e5Sespie      rtx x;
2590c87b03e5Sespie      rtx insn;
2591c87b03e5Sespie      enum machine_mode promoted_mode;
2592c87b03e5Sespie      int uncritical;
2593c87b03e5Sespie {
2594c87b03e5Sespie   int offset;
2595c87b03e5Sespie   rtx mem = SUBREG_REG (x);
2596c87b03e5Sespie   rtx addr = XEXP (mem, 0);
2597c87b03e5Sespie   enum machine_mode mode = GET_MODE (x);
2598c87b03e5Sespie   rtx result, seq;
2599c87b03e5Sespie 
2600c87b03e5Sespie   /* Paradoxical SUBREGs are usually invalid during RTL generation.  */
2601c87b03e5Sespie   if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (mem)) && ! uncritical)
2602c87b03e5Sespie     abort ();
2603c87b03e5Sespie 
2604c87b03e5Sespie   offset = SUBREG_BYTE (x);
2605c87b03e5Sespie   if (BYTES_BIG_ENDIAN)
2606c87b03e5Sespie     /* If the PROMOTED_MODE is wider than the mode of the MEM, adjust
2607c87b03e5Sespie        the offset so that it points to the right location within the
2608c87b03e5Sespie        MEM.  */
2609c87b03e5Sespie     offset -= (GET_MODE_SIZE (promoted_mode) - GET_MODE_SIZE (GET_MODE (mem)));
2610c87b03e5Sespie 
2611c87b03e5Sespie   if (!flag_force_addr
2612c87b03e5Sespie       && memory_address_p (mode, plus_constant (addr, offset)))
2613c87b03e5Sespie     /* Shortcut if no insns need be emitted.  */
2614c87b03e5Sespie     return adjust_address (mem, mode, offset);
2615c87b03e5Sespie 
2616c87b03e5Sespie   start_sequence ();
2617c87b03e5Sespie   result = adjust_address (mem, mode, offset);
2618c87b03e5Sespie   seq = get_insns ();
2619c87b03e5Sespie   end_sequence ();
2620c87b03e5Sespie 
2621c87b03e5Sespie   emit_insn_before (seq, insn);
2622c87b03e5Sespie   return result;
2623c87b03e5Sespie }
2624c87b03e5Sespie 
2625c87b03e5Sespie /* Do fixup_memory_subreg on all (SUBREG (MEM ...) ...) contained in X.
2626c87b03e5Sespie    Replace subexpressions of X in place.
2627c87b03e5Sespie    If X itself is a (SUBREG (MEM ...) ...), return the replacement expression.
2628c87b03e5Sespie    Otherwise return X, with its contents possibly altered.
2629c87b03e5Sespie 
2630c87b03e5Sespie    INSN, PROMOTED_MODE and UNCRITICAL are as for
2631c87b03e5Sespie    fixup_memory_subreg.  */
2632c87b03e5Sespie 
2633c87b03e5Sespie static rtx
walk_fixup_memory_subreg(x,insn,promoted_mode,uncritical)2634c87b03e5Sespie walk_fixup_memory_subreg (x, insn, promoted_mode, uncritical)
2635c87b03e5Sespie      rtx x;
2636c87b03e5Sespie      rtx insn;
2637c87b03e5Sespie      enum machine_mode promoted_mode;
2638c87b03e5Sespie      int uncritical;
2639c87b03e5Sespie {
2640c87b03e5Sespie   enum rtx_code code;
2641c87b03e5Sespie   const char *fmt;
2642c87b03e5Sespie   int i;
2643c87b03e5Sespie 
2644c87b03e5Sespie   if (x == 0)
2645c87b03e5Sespie     return 0;
2646c87b03e5Sespie 
2647c87b03e5Sespie   code = GET_CODE (x);
2648c87b03e5Sespie 
2649c87b03e5Sespie   if (code == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
2650c87b03e5Sespie     return fixup_memory_subreg (x, insn, promoted_mode, uncritical);
2651c87b03e5Sespie 
2652c87b03e5Sespie   /* Nothing special about this RTX; fix its operands.  */
2653c87b03e5Sespie 
2654c87b03e5Sespie   fmt = GET_RTX_FORMAT (code);
2655c87b03e5Sespie   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2656c87b03e5Sespie     {
2657c87b03e5Sespie       if (fmt[i] == 'e')
2658c87b03e5Sespie 	XEXP (x, i) = walk_fixup_memory_subreg (XEXP (x, i), insn,
2659c87b03e5Sespie 						promoted_mode, uncritical);
2660c87b03e5Sespie       else if (fmt[i] == 'E')
2661c87b03e5Sespie 	{
2662c87b03e5Sespie 	  int j;
2663c87b03e5Sespie 	  for (j = 0; j < XVECLEN (x, i); j++)
2664c87b03e5Sespie 	    XVECEXP (x, i, j)
2665c87b03e5Sespie 	      = walk_fixup_memory_subreg (XVECEXP (x, i, j), insn,
2666c87b03e5Sespie 					  promoted_mode, uncritical);
2667c87b03e5Sespie 	}
2668c87b03e5Sespie     }
2669c87b03e5Sespie   return x;
2670c87b03e5Sespie }
2671c87b03e5Sespie 
2672c87b03e5Sespie /* For each memory ref within X, if it refers to a stack slot
2673c87b03e5Sespie    with an out of range displacement, put the address in a temp register
2674c87b03e5Sespie    (emitting new insns before INSN to load these registers)
2675c87b03e5Sespie    and alter the memory ref to use that register.
2676c87b03e5Sespie    Replace each such MEM rtx with a copy, to avoid clobberage.  */
2677c87b03e5Sespie 
2678c87b03e5Sespie static rtx
fixup_stack_1(x,insn)2679c87b03e5Sespie fixup_stack_1 (x, insn)
2680c87b03e5Sespie      rtx x;
2681c87b03e5Sespie      rtx insn;
2682c87b03e5Sespie {
2683c87b03e5Sespie   int i;
2684c87b03e5Sespie   RTX_CODE code = GET_CODE (x);
2685c87b03e5Sespie   const char *fmt;
2686c87b03e5Sespie 
2687c87b03e5Sespie   if (code == MEM)
2688c87b03e5Sespie     {
2689c87b03e5Sespie       rtx ad = XEXP (x, 0);
2690c87b03e5Sespie       /* If we have address of a stack slot but it's not valid
2691c87b03e5Sespie 	 (displacement is too large), compute the sum in a register.  */
2692c87b03e5Sespie       if (GET_CODE (ad) == PLUS
2693c87b03e5Sespie 	  && GET_CODE (XEXP (ad, 0)) == REG
2694c87b03e5Sespie 	  && ((REGNO (XEXP (ad, 0)) >= FIRST_VIRTUAL_REGISTER
2695c87b03e5Sespie 	       && REGNO (XEXP (ad, 0)) <= LAST_VIRTUAL_REGISTER)
2696c87b03e5Sespie 	      || REGNO (XEXP (ad, 0)) == FRAME_POINTER_REGNUM
2697c87b03e5Sespie #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2698c87b03e5Sespie 	      || REGNO (XEXP (ad, 0)) == HARD_FRAME_POINTER_REGNUM
2699c87b03e5Sespie #endif
2700c87b03e5Sespie 	      || REGNO (XEXP (ad, 0)) == STACK_POINTER_REGNUM
2701c87b03e5Sespie 	      || REGNO (XEXP (ad, 0)) == ARG_POINTER_REGNUM
2702c87b03e5Sespie 	      || XEXP (ad, 0) == current_function_internal_arg_pointer)
2703c87b03e5Sespie 	  && GET_CODE (XEXP (ad, 1)) == CONST_INT)
2704c87b03e5Sespie 	{
2705c87b03e5Sespie 	  rtx temp, seq;
2706c87b03e5Sespie 	  if (memory_address_p (GET_MODE (x), ad))
2707c87b03e5Sespie 	    return x;
2708c87b03e5Sespie 
2709c87b03e5Sespie 	  start_sequence ();
2710c87b03e5Sespie 	  temp = copy_to_reg (ad);
2711c87b03e5Sespie 	  seq = get_insns ();
2712c87b03e5Sespie 	  end_sequence ();
2713c87b03e5Sespie 	  emit_insn_before (seq, insn);
2714c87b03e5Sespie 	  return replace_equiv_address (x, temp);
2715c87b03e5Sespie 	}
2716c87b03e5Sespie       return x;
2717c87b03e5Sespie     }
2718c87b03e5Sespie 
2719c87b03e5Sespie   fmt = GET_RTX_FORMAT (code);
2720c87b03e5Sespie   for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2721c87b03e5Sespie     {
2722c87b03e5Sespie       if (fmt[i] == 'e')
2723c87b03e5Sespie 	XEXP (x, i) = fixup_stack_1 (XEXP (x, i), insn);
2724c87b03e5Sespie       else if (fmt[i] == 'E')
2725c87b03e5Sespie 	{
2726c87b03e5Sespie 	  int j;
2727c87b03e5Sespie 	  for (j = 0; j < XVECLEN (x, i); j++)
2728c87b03e5Sespie 	    XVECEXP (x, i, j) = fixup_stack_1 (XVECEXP (x, i, j), insn);
2729c87b03e5Sespie 	}
2730c87b03e5Sespie     }
2731c87b03e5Sespie   return x;
2732c87b03e5Sespie }
2733c87b03e5Sespie 
2734c87b03e5Sespie /* Optimization: a bit-field instruction whose field
2735c87b03e5Sespie    happens to be a byte or halfword in memory
2736c87b03e5Sespie    can be changed to a move instruction.
2737c87b03e5Sespie 
2738c87b03e5Sespie    We call here when INSN is an insn to examine or store into a bit-field.
2739c87b03e5Sespie    BODY is the SET-rtx to be altered.
2740c87b03e5Sespie 
2741c87b03e5Sespie    EQUIV_MEM is the table `reg_equiv_mem' if that is available; else 0.
2742c87b03e5Sespie    (Currently this is called only from function.c, and EQUIV_MEM
2743c87b03e5Sespie    is always 0.)  */
2744c87b03e5Sespie 
2745c87b03e5Sespie static void
optimize_bit_field(body,insn,equiv_mem)2746c87b03e5Sespie optimize_bit_field (body, insn, equiv_mem)
2747c87b03e5Sespie      rtx body;
2748c87b03e5Sespie      rtx insn;
2749c87b03e5Sespie      rtx *equiv_mem;
2750c87b03e5Sespie {
2751c87b03e5Sespie   rtx bitfield;
2752c87b03e5Sespie   int destflag;
2753c87b03e5Sespie   rtx seq = 0;
2754c87b03e5Sespie   enum machine_mode mode;
2755c87b03e5Sespie 
2756c87b03e5Sespie   if (GET_CODE (SET_DEST (body)) == SIGN_EXTRACT
2757c87b03e5Sespie       || GET_CODE (SET_DEST (body)) == ZERO_EXTRACT)
2758c87b03e5Sespie     bitfield = SET_DEST (body), destflag = 1;
2759c87b03e5Sespie   else
2760c87b03e5Sespie     bitfield = SET_SRC (body), destflag = 0;
2761c87b03e5Sespie 
2762c87b03e5Sespie   /* First check that the field being stored has constant size and position
2763c87b03e5Sespie      and is in fact a byte or halfword suitably aligned.  */
2764c87b03e5Sespie 
2765c87b03e5Sespie   if (GET_CODE (XEXP (bitfield, 1)) == CONST_INT
2766c87b03e5Sespie       && GET_CODE (XEXP (bitfield, 2)) == CONST_INT
2767c87b03e5Sespie       && ((mode = mode_for_size (INTVAL (XEXP (bitfield, 1)), MODE_INT, 1))
2768c87b03e5Sespie 	  != BLKmode)
2769c87b03e5Sespie       && INTVAL (XEXP (bitfield, 2)) % INTVAL (XEXP (bitfield, 1)) == 0)
2770c87b03e5Sespie     {
2771c87b03e5Sespie       rtx memref = 0;
2772c87b03e5Sespie 
2773c87b03e5Sespie       /* Now check that the containing word is memory, not a register,
2774c87b03e5Sespie 	 and that it is safe to change the machine mode.  */
2775c87b03e5Sespie 
2776c87b03e5Sespie       if (GET_CODE (XEXP (bitfield, 0)) == MEM)
2777c87b03e5Sespie 	memref = XEXP (bitfield, 0);
2778c87b03e5Sespie       else if (GET_CODE (XEXP (bitfield, 0)) == REG
2779c87b03e5Sespie 	       && equiv_mem != 0)
2780c87b03e5Sespie 	memref = equiv_mem[REGNO (XEXP (bitfield, 0))];
2781c87b03e5Sespie       else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2782c87b03e5Sespie 	       && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == MEM)
2783c87b03e5Sespie 	memref = SUBREG_REG (XEXP (bitfield, 0));
2784c87b03e5Sespie       else if (GET_CODE (XEXP (bitfield, 0)) == SUBREG
2785c87b03e5Sespie 	       && equiv_mem != 0
2786c87b03e5Sespie 	       && GET_CODE (SUBREG_REG (XEXP (bitfield, 0))) == REG)
2787c87b03e5Sespie 	memref = equiv_mem[REGNO (SUBREG_REG (XEXP (bitfield, 0)))];
2788c87b03e5Sespie 
2789c87b03e5Sespie       if (memref
2790c87b03e5Sespie 	  && ! mode_dependent_address_p (XEXP (memref, 0))
2791c87b03e5Sespie 	  && ! MEM_VOLATILE_P (memref))
2792c87b03e5Sespie 	{
2793c87b03e5Sespie 	  /* Now adjust the address, first for any subreg'ing
2794c87b03e5Sespie 	     that we are now getting rid of,
2795c87b03e5Sespie 	     and then for which byte of the word is wanted.  */
2796c87b03e5Sespie 
2797c87b03e5Sespie 	  HOST_WIDE_INT offset = INTVAL (XEXP (bitfield, 2));
2798c87b03e5Sespie 	  rtx insns;
2799c87b03e5Sespie 
2800c87b03e5Sespie 	  /* Adjust OFFSET to count bits from low-address byte.  */
2801c87b03e5Sespie 	  if (BITS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2802c87b03e5Sespie 	    offset = (GET_MODE_BITSIZE (GET_MODE (XEXP (bitfield, 0)))
2803c87b03e5Sespie 		      - offset - INTVAL (XEXP (bitfield, 1)));
2804c87b03e5Sespie 
2805c87b03e5Sespie 	  /* Adjust OFFSET to count bytes from low-address byte.  */
2806c87b03e5Sespie 	  offset /= BITS_PER_UNIT;
2807c87b03e5Sespie 	  if (GET_CODE (XEXP (bitfield, 0)) == SUBREG)
2808c87b03e5Sespie 	    {
2809c87b03e5Sespie 	      offset += (SUBREG_BYTE (XEXP (bitfield, 0))
2810c87b03e5Sespie 			 / UNITS_PER_WORD) * UNITS_PER_WORD;
2811c87b03e5Sespie 	      if (BYTES_BIG_ENDIAN)
2812c87b03e5Sespie 		offset -= (MIN (UNITS_PER_WORD,
2813c87b03e5Sespie 				GET_MODE_SIZE (GET_MODE (XEXP (bitfield, 0))))
2814c87b03e5Sespie 			   - MIN (UNITS_PER_WORD,
2815c87b03e5Sespie 				  GET_MODE_SIZE (GET_MODE (memref))));
2816c87b03e5Sespie 	    }
2817c87b03e5Sespie 
2818c87b03e5Sespie 	  start_sequence ();
2819c87b03e5Sespie 	  memref = adjust_address (memref, mode, offset);
2820c87b03e5Sespie 	  insns = get_insns ();
2821c87b03e5Sespie 	  end_sequence ();
2822c87b03e5Sespie 	  emit_insn_before (insns, insn);
2823c87b03e5Sespie 
2824c87b03e5Sespie 	  /* Store this memory reference where
2825c87b03e5Sespie 	     we found the bit field reference.  */
2826c87b03e5Sespie 
2827c87b03e5Sespie 	  if (destflag)
2828c87b03e5Sespie 	    {
2829c87b03e5Sespie 	      validate_change (insn, &SET_DEST (body), memref, 1);
2830c87b03e5Sespie 	      if (! CONSTANT_ADDRESS_P (SET_SRC (body)))
2831c87b03e5Sespie 		{
2832c87b03e5Sespie 		  rtx src = SET_SRC (body);
2833c87b03e5Sespie 		  while (GET_CODE (src) == SUBREG
2834c87b03e5Sespie 			 && SUBREG_BYTE (src) == 0)
2835c87b03e5Sespie 		    src = SUBREG_REG (src);
2836c87b03e5Sespie 		  if (GET_MODE (src) != GET_MODE (memref))
2837c87b03e5Sespie 		    src = gen_lowpart (GET_MODE (memref), SET_SRC (body));
2838c87b03e5Sespie 		  validate_change (insn, &SET_SRC (body), src, 1);
2839c87b03e5Sespie 		}
2840c87b03e5Sespie 	      else if (GET_MODE (SET_SRC (body)) != VOIDmode
2841c87b03e5Sespie 		       && GET_MODE (SET_SRC (body)) != GET_MODE (memref))
2842c87b03e5Sespie 		/* This shouldn't happen because anything that didn't have
2843c87b03e5Sespie 		   one of these modes should have got converted explicitly
2844c87b03e5Sespie 		   and then referenced through a subreg.
2845c87b03e5Sespie 		   This is so because the original bit-field was
2846c87b03e5Sespie 		   handled by agg_mode and so its tree structure had
2847c87b03e5Sespie 		   the same mode that memref now has.  */
2848c87b03e5Sespie 		abort ();
2849c87b03e5Sespie 	    }
2850c87b03e5Sespie 	  else
2851c87b03e5Sespie 	    {
2852c87b03e5Sespie 	      rtx dest = SET_DEST (body);
2853c87b03e5Sespie 
2854c87b03e5Sespie 	      while (GET_CODE (dest) == SUBREG
2855c87b03e5Sespie 		     && SUBREG_BYTE (dest) == 0
2856c87b03e5Sespie 		     && (GET_MODE_CLASS (GET_MODE (dest))
2857c87b03e5Sespie 			 == GET_MODE_CLASS (GET_MODE (SUBREG_REG (dest))))
2858c87b03e5Sespie 		     && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
2859c87b03e5Sespie 			 <= UNITS_PER_WORD))
2860c87b03e5Sespie 		dest = SUBREG_REG (dest);
2861c87b03e5Sespie 
2862c87b03e5Sespie 	      validate_change (insn, &SET_DEST (body), dest, 1);
2863c87b03e5Sespie 
2864c87b03e5Sespie 	      if (GET_MODE (dest) == GET_MODE (memref))
2865c87b03e5Sespie 		validate_change (insn, &SET_SRC (body), memref, 1);
2866c87b03e5Sespie 	      else
2867c87b03e5Sespie 		{
2868c87b03e5Sespie 		  /* Convert the mem ref to the destination mode.  */
2869c87b03e5Sespie 		  rtx newreg = gen_reg_rtx (GET_MODE (dest));
2870c87b03e5Sespie 
2871c87b03e5Sespie 		  start_sequence ();
2872c87b03e5Sespie 		  convert_move (newreg, memref,
2873c87b03e5Sespie 				GET_CODE (SET_SRC (body)) == ZERO_EXTRACT);
2874c87b03e5Sespie 		  seq = get_insns ();
2875c87b03e5Sespie 		  end_sequence ();
2876c87b03e5Sespie 
2877c87b03e5Sespie 		  validate_change (insn, &SET_SRC (body), newreg, 1);
2878c87b03e5Sespie 		}
2879c87b03e5Sespie 	    }
2880c87b03e5Sespie 
2881c87b03e5Sespie 	  /* See if we can convert this extraction or insertion into
2882c87b03e5Sespie 	     a simple move insn.  We might not be able to do so if this
2883c87b03e5Sespie 	     was, for example, part of a PARALLEL.
2884c87b03e5Sespie 
2885c87b03e5Sespie 	     If we succeed, write out any needed conversions.  If we fail,
2886c87b03e5Sespie 	     it is hard to guess why we failed, so don't do anything
2887c87b03e5Sespie 	     special; just let the optimization be suppressed.  */
2888c87b03e5Sespie 
2889c87b03e5Sespie 	  if (apply_change_group () && seq)
2890c87b03e5Sespie 	    emit_insn_before (seq, insn);
2891c87b03e5Sespie 	}
2892c87b03e5Sespie     }
2893c87b03e5Sespie }
2894c87b03e5Sespie 
2895c87b03e5Sespie /* These routines are responsible for converting virtual register references
2896c87b03e5Sespie    to the actual hard register references once RTL generation is complete.
2897c87b03e5Sespie 
2898c87b03e5Sespie    The following four variables are used for communication between the
2899c87b03e5Sespie    routines.  They contain the offsets of the virtual registers from their
2900c87b03e5Sespie    respective hard registers.  */
2901c87b03e5Sespie 
2902c87b03e5Sespie static int in_arg_offset;
2903c87b03e5Sespie static int var_offset;
2904c87b03e5Sespie static int dynamic_offset;
2905c87b03e5Sespie static int out_arg_offset;
2906c87b03e5Sespie static int cfa_offset;
2907c87b03e5Sespie 
2908c87b03e5Sespie /* In most machines, the stack pointer register is equivalent to the bottom
2909c87b03e5Sespie    of the stack.  */
2910c87b03e5Sespie 
2911c87b03e5Sespie #ifndef STACK_POINTER_OFFSET
2912c87b03e5Sespie #define STACK_POINTER_OFFSET	0
2913c87b03e5Sespie #endif
2914c87b03e5Sespie 
2915c87b03e5Sespie /* If not defined, pick an appropriate default for the offset of dynamically
2916c87b03e5Sespie    allocated memory depending on the value of ACCUMULATE_OUTGOING_ARGS,
2917c87b03e5Sespie    REG_PARM_STACK_SPACE, and OUTGOING_REG_PARM_STACK_SPACE.  */
2918c87b03e5Sespie 
2919c87b03e5Sespie #ifndef STACK_DYNAMIC_OFFSET
2920c87b03e5Sespie 
2921c87b03e5Sespie /* The bottom of the stack points to the actual arguments.  If
2922c87b03e5Sespie    REG_PARM_STACK_SPACE is defined, this includes the space for the register
2923c87b03e5Sespie    parameters.  However, if OUTGOING_REG_PARM_STACK space is not defined,
2924c87b03e5Sespie    stack space for register parameters is not pushed by the caller, but
2925c87b03e5Sespie    rather part of the fixed stack areas and hence not included in
2926c87b03e5Sespie    `current_function_outgoing_args_size'.  Nevertheless, we must allow
2927c87b03e5Sespie    for it when allocating stack dynamic objects.  */
2928c87b03e5Sespie 
2929c87b03e5Sespie #if defined(REG_PARM_STACK_SPACE) && ! defined(OUTGOING_REG_PARM_STACK_SPACE)
2930c87b03e5Sespie #define STACK_DYNAMIC_OFFSET(FNDECL)	\
2931c87b03e5Sespie ((ACCUMULATE_OUTGOING_ARGS						      \
2932c87b03e5Sespie   ? (current_function_outgoing_args_size + REG_PARM_STACK_SPACE (FNDECL)) : 0)\
2933c87b03e5Sespie  + (STACK_POINTER_OFFSET))						      \
2934c87b03e5Sespie 
2935c87b03e5Sespie #else
2936c87b03e5Sespie #define STACK_DYNAMIC_OFFSET(FNDECL)	\
2937c87b03e5Sespie ((ACCUMULATE_OUTGOING_ARGS ? current_function_outgoing_args_size : 0)	      \
2938c87b03e5Sespie  + (STACK_POINTER_OFFSET))
2939c87b03e5Sespie #endif
2940c87b03e5Sespie #endif
2941c87b03e5Sespie 
2942c87b03e5Sespie /* On most machines, the CFA coincides with the first incoming parm.  */
2943c87b03e5Sespie 
2944c87b03e5Sespie #ifndef ARG_POINTER_CFA_OFFSET
2945c87b03e5Sespie #define ARG_POINTER_CFA_OFFSET(FNDECL) FIRST_PARM_OFFSET (FNDECL)
2946c87b03e5Sespie #endif
2947c87b03e5Sespie 
2948c87b03e5Sespie /* Build up a (MEM (ADDRESSOF (REG))) rtx for a register REG that just
2949c87b03e5Sespie    had its address taken.  DECL is the decl or SAVE_EXPR for the
2950c87b03e5Sespie    object stored in the register, for later use if we do need to force
2951c87b03e5Sespie    REG into the stack.  REG is overwritten by the MEM like in
2952c87b03e5Sespie    put_reg_into_stack.  RESCAN is true if previously emitted
2953c87b03e5Sespie    instructions must be rescanned and modified now that the REG has
2954c87b03e5Sespie    been transformed.  */
2955c87b03e5Sespie 
2956c87b03e5Sespie rtx
gen_mem_addressof(reg,decl,rescan)2957c87b03e5Sespie gen_mem_addressof (reg, decl, rescan)
2958c87b03e5Sespie      rtx reg;
2959c87b03e5Sespie      tree decl;
2960c87b03e5Sespie      int rescan;
2961c87b03e5Sespie {
2962c87b03e5Sespie   rtx r = gen_rtx_ADDRESSOF (Pmode, gen_reg_rtx (GET_MODE (reg)),
2963c87b03e5Sespie 			     REGNO (reg), decl);
2964c87b03e5Sespie 
2965c87b03e5Sespie   /* Calculate this before we start messing with decl's RTL.  */
2966c87b03e5Sespie   HOST_WIDE_INT set = decl ? get_alias_set (decl) : 0;
2967c87b03e5Sespie 
2968c87b03e5Sespie   /* If the original REG was a user-variable, then so is the REG whose
2969c87b03e5Sespie      address is being taken.  Likewise for unchanging.  */
2970c87b03e5Sespie   REG_USERVAR_P (XEXP (r, 0)) = REG_USERVAR_P (reg);
2971c87b03e5Sespie   RTX_UNCHANGING_P (XEXP (r, 0)) = RTX_UNCHANGING_P (reg);
2972c87b03e5Sespie 
2973c87b03e5Sespie   PUT_CODE (reg, MEM);
2974c87b03e5Sespie   MEM_ATTRS (reg) = 0;
2975c87b03e5Sespie   XEXP (reg, 0) = r;
2976c87b03e5Sespie 
2977c87b03e5Sespie   if (decl)
2978c87b03e5Sespie     {
2979c87b03e5Sespie       tree type = TREE_TYPE (decl);
2980c87b03e5Sespie       enum machine_mode decl_mode
2981c87b03e5Sespie 	= (DECL_P (decl) ? DECL_MODE (decl) : TYPE_MODE (TREE_TYPE (decl)));
2982c87b03e5Sespie       rtx decl_rtl = (TREE_CODE (decl) == SAVE_EXPR ? SAVE_EXPR_RTL (decl)
2983c87b03e5Sespie 		      : DECL_RTL_IF_SET (decl));
2984c87b03e5Sespie 
2985c87b03e5Sespie       PUT_MODE (reg, decl_mode);
2986c87b03e5Sespie 
2987c87b03e5Sespie       /* Clear DECL_RTL momentarily so functions below will work
2988c87b03e5Sespie 	 properly, then set it again.  */
2989c87b03e5Sespie       if (DECL_P (decl) && decl_rtl == reg)
2990c87b03e5Sespie 	SET_DECL_RTL (decl, 0);
2991c87b03e5Sespie 
2992c87b03e5Sespie       set_mem_attributes (reg, decl, 1);
2993c87b03e5Sespie       set_mem_alias_set (reg, set);
2994c87b03e5Sespie 
2995c87b03e5Sespie       if (DECL_P (decl) && decl_rtl == reg)
2996c87b03e5Sespie 	SET_DECL_RTL (decl, reg);
2997c87b03e5Sespie 
2998c87b03e5Sespie       if (rescan
2999c87b03e5Sespie 	  && (TREE_USED (decl) || (DECL_P (decl) && DECL_INITIAL (decl) != 0)))
3000c87b03e5Sespie 	fixup_var_refs (reg, GET_MODE (reg), TREE_UNSIGNED (type), reg, 0);
3001c87b03e5Sespie     }
3002c87b03e5Sespie   else if (rescan)
3003c87b03e5Sespie     fixup_var_refs (reg, GET_MODE (reg), 0, reg, 0);
3004c87b03e5Sespie 
3005c87b03e5Sespie   return reg;
3006c87b03e5Sespie }
3007c87b03e5Sespie 
3008c87b03e5Sespie /* If DECL has an RTL that is an ADDRESSOF rtx, put it into the stack.  */
3009c87b03e5Sespie 
3010c87b03e5Sespie void
flush_addressof(decl)3011c87b03e5Sespie flush_addressof (decl)
3012c87b03e5Sespie      tree decl;
3013c87b03e5Sespie {
3014c87b03e5Sespie   if ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == VAR_DECL)
3015c87b03e5Sespie       && DECL_RTL (decl) != 0
3016c87b03e5Sespie       && GET_CODE (DECL_RTL (decl)) == MEM
3017c87b03e5Sespie       && GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF
3018c87b03e5Sespie       && GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == REG)
3019c87b03e5Sespie     put_addressof_into_stack (XEXP (DECL_RTL (decl), 0), 0);
3020c87b03e5Sespie }
3021c87b03e5Sespie 
3022c87b03e5Sespie /* Force the register pointed to by R, an ADDRESSOF rtx, into the stack.  */
3023c87b03e5Sespie 
3024c87b03e5Sespie static void
put_addressof_into_stack(r,ht)3025c87b03e5Sespie put_addressof_into_stack (r, ht)
3026c87b03e5Sespie      rtx r;
3027c87b03e5Sespie      htab_t ht;
3028c87b03e5Sespie {
3029c87b03e5Sespie   tree decl, type;
3030c87b03e5Sespie   int volatile_p, used_p;
3031c87b03e5Sespie 
3032c87b03e5Sespie   rtx reg = XEXP (r, 0);
3033c87b03e5Sespie 
3034c87b03e5Sespie   if (GET_CODE (reg) != REG)
3035c87b03e5Sespie     abort ();
3036c87b03e5Sespie 
3037c87b03e5Sespie   decl = ADDRESSOF_DECL (r);
3038c87b03e5Sespie   if (decl)
3039c87b03e5Sespie     {
3040c87b03e5Sespie       type = TREE_TYPE (decl);
3041c87b03e5Sespie       volatile_p = (TREE_CODE (decl) != SAVE_EXPR
3042c87b03e5Sespie 		    && TREE_THIS_VOLATILE (decl));
3043c87b03e5Sespie       used_p = (TREE_USED (decl)
3044c87b03e5Sespie 		|| (DECL_P (decl) && DECL_INITIAL (decl) != 0));
3045c87b03e5Sespie     }
3046c87b03e5Sespie   else
3047c87b03e5Sespie     {
3048c87b03e5Sespie       type = NULL_TREE;
3049c87b03e5Sespie       volatile_p = 0;
3050c87b03e5Sespie       used_p = 1;
3051c87b03e5Sespie     }
3052c87b03e5Sespie 
3053e97b50d0Smiod   put_reg_into_stack (0, reg, type, GET_MODE (reg), ADDRESSOF_REGNO (r),
3054e97b50d0Smiod 		      volatile_p, used_p, 0, ht);
3055c87b03e5Sespie }
3056c87b03e5Sespie 
3057c87b03e5Sespie /* List of replacements made below in purge_addressof_1 when creating
3058c87b03e5Sespie    bitfield insertions.  */
3059c87b03e5Sespie static rtx purge_bitfield_addressof_replacements;
3060c87b03e5Sespie 
3061c87b03e5Sespie /* List of replacements made below in purge_addressof_1 for patterns
3062c87b03e5Sespie    (MEM (ADDRESSOF (REG ...))).  The key of the list entry is the
3063c87b03e5Sespie    corresponding (ADDRESSOF (REG ...)) and value is a substitution for
3064c87b03e5Sespie    the all pattern.  List PURGE_BITFIELD_ADDRESSOF_REPLACEMENTS is not
3065c87b03e5Sespie    enough in complex cases, e.g. when some field values can be
3066c87b03e5Sespie    extracted by usage MEM with narrower mode.  */
3067c87b03e5Sespie static rtx purge_addressof_replacements;
3068c87b03e5Sespie 
3069c87b03e5Sespie /* Helper function for purge_addressof.  See if the rtx expression at *LOC
3070c87b03e5Sespie    in INSN needs to be changed.  If FORCE, always put any ADDRESSOFs into
3071c87b03e5Sespie    the stack.  If the function returns FALSE then the replacement could not
3072c87b03e5Sespie    be made.  */
3073c87b03e5Sespie 
3074c87b03e5Sespie static bool
purge_addressof_1(loc,insn,force,store,ht)3075c87b03e5Sespie purge_addressof_1 (loc, insn, force, store, ht)
3076c87b03e5Sespie      rtx *loc;
3077c87b03e5Sespie      rtx insn;
3078c87b03e5Sespie      int force, store;
3079c87b03e5Sespie      htab_t ht;
3080c87b03e5Sespie {
3081c87b03e5Sespie   rtx x;
3082c87b03e5Sespie   RTX_CODE code;
3083c87b03e5Sespie   int i, j;
3084c87b03e5Sespie   const char *fmt;
3085c87b03e5Sespie   bool result = true;
3086c87b03e5Sespie 
3087c87b03e5Sespie   /* Re-start here to avoid recursion in common cases.  */
3088c87b03e5Sespie  restart:
3089c87b03e5Sespie 
3090c87b03e5Sespie   x = *loc;
3091c87b03e5Sespie   if (x == 0)
3092c87b03e5Sespie     return true;
3093c87b03e5Sespie 
3094c87b03e5Sespie   code = GET_CODE (x);
3095c87b03e5Sespie 
3096c87b03e5Sespie   /* If we don't return in any of the cases below, we will recurse inside
3097c87b03e5Sespie      the RTX, which will normally result in any ADDRESSOF being forced into
3098c87b03e5Sespie      memory.  */
3099c87b03e5Sespie   if (code == SET)
3100c87b03e5Sespie     {
3101c87b03e5Sespie       result = purge_addressof_1 (&SET_DEST (x), insn, force, 1, ht);
3102c87b03e5Sespie       result &= purge_addressof_1 (&SET_SRC (x), insn, force, 0, ht);
3103c87b03e5Sespie       return result;
3104c87b03e5Sespie     }
3105c87b03e5Sespie   else if (code == ADDRESSOF)
3106c87b03e5Sespie     {
3107c87b03e5Sespie       rtx sub, insns;
3108c87b03e5Sespie 
3109c87b03e5Sespie       if (GET_CODE (XEXP (x, 0)) != MEM)
3110c87b03e5Sespie 	put_addressof_into_stack (x, ht);
3111c87b03e5Sespie 
3112c87b03e5Sespie       /* We must create a copy of the rtx because it was created by
3113c87b03e5Sespie 	 overwriting a REG rtx which is always shared.  */
3114c87b03e5Sespie       sub = copy_rtx (XEXP (XEXP (x, 0), 0));
3115c87b03e5Sespie       if (validate_change (insn, loc, sub, 0)
3116c87b03e5Sespie 	  || validate_replace_rtx (x, sub, insn))
3117c87b03e5Sespie 	return true;
3118c87b03e5Sespie 
3119c87b03e5Sespie       start_sequence ();
3120c87b03e5Sespie       sub = force_operand (sub, NULL_RTX);
3121c87b03e5Sespie       if (! validate_change (insn, loc, sub, 0)
3122c87b03e5Sespie 	  && ! validate_replace_rtx (x, sub, insn))
3123c87b03e5Sespie 	abort ();
3124c87b03e5Sespie 
3125c87b03e5Sespie       insns = get_insns ();
3126c87b03e5Sespie       end_sequence ();
3127c87b03e5Sespie       emit_insn_before (insns, insn);
3128c87b03e5Sespie       return true;
3129c87b03e5Sespie     }
3130c87b03e5Sespie 
3131c87b03e5Sespie   else if (code == MEM && GET_CODE (XEXP (x, 0)) == ADDRESSOF && ! force)
3132c87b03e5Sespie     {
3133c87b03e5Sespie       rtx sub = XEXP (XEXP (x, 0), 0);
3134c87b03e5Sespie 
3135c87b03e5Sespie       if (GET_CODE (sub) == MEM)
3136c87b03e5Sespie 	sub = adjust_address_nv (sub, GET_MODE (x), 0);
3137c87b03e5Sespie       else if (GET_CODE (sub) == REG
3138c87b03e5Sespie 	       && (MEM_VOLATILE_P (x) || GET_MODE (x) == BLKmode))
3139c87b03e5Sespie 	;
3140c87b03e5Sespie       else if (GET_CODE (sub) == REG && GET_MODE (x) != GET_MODE (sub))
3141c87b03e5Sespie 	{
3142c87b03e5Sespie 	  int size_x, size_sub;
3143c87b03e5Sespie 
3144c87b03e5Sespie 	  if (!insn)
3145c87b03e5Sespie 	    {
3146c87b03e5Sespie 	      /* When processing REG_NOTES look at the list of
3147c87b03e5Sespie 		 replacements done on the insn to find the register that X
3148c87b03e5Sespie 		 was replaced by.  */
3149c87b03e5Sespie 	      rtx tem;
3150c87b03e5Sespie 
3151c87b03e5Sespie 	      for (tem = purge_bitfield_addressof_replacements;
3152c87b03e5Sespie 		   tem != NULL_RTX;
3153c87b03e5Sespie 		   tem = XEXP (XEXP (tem, 1), 1))
3154c87b03e5Sespie 		if (rtx_equal_p (x, XEXP (tem, 0)))
3155c87b03e5Sespie 		  {
3156c87b03e5Sespie 		    *loc = XEXP (XEXP (tem, 1), 0);
3157c87b03e5Sespie 		    return true;
3158c87b03e5Sespie 		  }
3159c87b03e5Sespie 
3160c87b03e5Sespie 	      /* See comment for purge_addressof_replacements.  */
3161c87b03e5Sespie 	      for (tem = purge_addressof_replacements;
3162c87b03e5Sespie 		   tem != NULL_RTX;
3163c87b03e5Sespie 		   tem = XEXP (XEXP (tem, 1), 1))
3164c87b03e5Sespie 		if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3165c87b03e5Sespie 		  {
3166c87b03e5Sespie 		    rtx z = XEXP (XEXP (tem, 1), 0);
3167c87b03e5Sespie 
3168c87b03e5Sespie 		    if (GET_MODE (x) == GET_MODE (z)
3169c87b03e5Sespie 			|| (GET_CODE (XEXP (XEXP (tem, 1), 0)) != REG
3170c87b03e5Sespie 			    && GET_CODE (XEXP (XEXP (tem, 1), 0)) != SUBREG))
3171c87b03e5Sespie 		      abort ();
3172c87b03e5Sespie 
3173c87b03e5Sespie 		    /* It can happen that the note may speak of things
3174c87b03e5Sespie 		       in a wider (or just different) mode than the
3175c87b03e5Sespie 		       code did.  This is especially true of
3176c87b03e5Sespie 		       REG_RETVAL.  */
3177c87b03e5Sespie 
3178c87b03e5Sespie 		    if (GET_CODE (z) == SUBREG && SUBREG_BYTE (z) == 0)
3179c87b03e5Sespie 		      z = SUBREG_REG (z);
3180c87b03e5Sespie 
3181c87b03e5Sespie 		    if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD
3182c87b03e5Sespie 			&& (GET_MODE_SIZE (GET_MODE (x))
3183c87b03e5Sespie 			    > GET_MODE_SIZE (GET_MODE (z))))
3184c87b03e5Sespie 		      {
3185c87b03e5Sespie 			/* This can occur as a result in invalid
3186c87b03e5Sespie 			   pointer casts, e.g. float f; ...
3187c87b03e5Sespie 			   *(long long int *)&f.
3188c87b03e5Sespie 			   ??? We could emit a warning here, but
3189c87b03e5Sespie 			   without a line number that wouldn't be
3190c87b03e5Sespie 			   very helpful.  */
3191c87b03e5Sespie 			z = gen_rtx_SUBREG (GET_MODE (x), z, 0);
3192c87b03e5Sespie 		      }
3193c87b03e5Sespie 		    else
3194c87b03e5Sespie 		      z = gen_lowpart (GET_MODE (x), z);
3195c87b03e5Sespie 
3196c87b03e5Sespie 		    *loc = z;
3197c87b03e5Sespie 		    return true;
3198c87b03e5Sespie 		  }
3199c87b03e5Sespie 
3200c87b03e5Sespie 	      /* Sometimes we may not be able to find the replacement.  For
3201c87b03e5Sespie 		 example when the original insn was a MEM in a wider mode,
3202c87b03e5Sespie 		 and the note is part of a sign extension of a narrowed
3203c87b03e5Sespie 		 version of that MEM.  Gcc testcase compile/990829-1.c can
3204c87b03e5Sespie 		 generate an example of this situation.  Rather than complain
3205c87b03e5Sespie 		 we return false, which will prompt our caller to remove the
3206c87b03e5Sespie 		 offending note.  */
3207c87b03e5Sespie 	      return false;
3208c87b03e5Sespie 	    }
3209c87b03e5Sespie 
3210c87b03e5Sespie 	  size_x = GET_MODE_BITSIZE (GET_MODE (x));
3211c87b03e5Sespie 	  size_sub = GET_MODE_BITSIZE (GET_MODE (sub));
3212c87b03e5Sespie 
3213c87b03e5Sespie 	  /* Do not frob unchanging MEMs.  If a later reference forces the
3214c87b03e5Sespie 	     pseudo to the stack, we can wind up with multiple writes to
3215c87b03e5Sespie 	     an unchanging memory, which is invalid.  */
3216c87b03e5Sespie 	  if (RTX_UNCHANGING_P (x) && size_x != size_sub)
3217c87b03e5Sespie 	    ;
3218c87b03e5Sespie 
3219c87b03e5Sespie 	  /* Don't even consider working with paradoxical subregs,
3220c87b03e5Sespie 	     or the moral equivalent seen here.  */
3221c87b03e5Sespie 	  else if (size_x <= size_sub
3222c87b03e5Sespie 	           && int_mode_for_mode (GET_MODE (sub)) != BLKmode)
3223c87b03e5Sespie 	    {
3224c87b03e5Sespie 	      /* Do a bitfield insertion to mirror what would happen
3225c87b03e5Sespie 		 in memory.  */
3226c87b03e5Sespie 
3227c87b03e5Sespie 	      rtx val, seq;
3228c87b03e5Sespie 
3229c87b03e5Sespie 	      if (store)
3230c87b03e5Sespie 		{
3231c87b03e5Sespie 		  rtx p = PREV_INSN (insn);
3232c87b03e5Sespie 
3233c87b03e5Sespie 		  start_sequence ();
3234c87b03e5Sespie 		  val = gen_reg_rtx (GET_MODE (x));
3235c87b03e5Sespie 		  if (! validate_change (insn, loc, val, 0))
3236c87b03e5Sespie 		    {
3237c87b03e5Sespie 		      /* Discard the current sequence and put the
3238c87b03e5Sespie 			 ADDRESSOF on stack.  */
3239c87b03e5Sespie 		      end_sequence ();
3240c87b03e5Sespie 		      goto give_up;
3241c87b03e5Sespie 		    }
3242c87b03e5Sespie 		  seq = get_insns ();
3243c87b03e5Sespie 		  end_sequence ();
3244c87b03e5Sespie 		  emit_insn_before (seq, insn);
3245c87b03e5Sespie 		  compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3246c87b03e5Sespie 					 insn, ht);
3247c87b03e5Sespie 
3248c87b03e5Sespie 		  start_sequence ();
3249c87b03e5Sespie 		  store_bit_field (sub, size_x, 0, GET_MODE (x),
3250c87b03e5Sespie 				   val, GET_MODE_SIZE (GET_MODE (sub)));
3251c87b03e5Sespie 
3252c87b03e5Sespie 		  /* Make sure to unshare any shared rtl that store_bit_field
3253c87b03e5Sespie 		     might have created.  */
3254c87b03e5Sespie 		  unshare_all_rtl_again (get_insns ());
3255c87b03e5Sespie 
3256c87b03e5Sespie 		  seq = get_insns ();
3257c87b03e5Sespie 		  end_sequence ();
3258c87b03e5Sespie 		  p = emit_insn_after (seq, insn);
3259c87b03e5Sespie 		  if (NEXT_INSN (insn))
3260c87b03e5Sespie 		    compute_insns_for_mem (NEXT_INSN (insn),
3261c87b03e5Sespie 					   p ? NEXT_INSN (p) : NULL_RTX,
3262c87b03e5Sespie 					   ht);
3263c87b03e5Sespie 		}
3264c87b03e5Sespie 	      else
3265c87b03e5Sespie 		{
3266c87b03e5Sespie 		  rtx p = PREV_INSN (insn);
3267c87b03e5Sespie 
3268c87b03e5Sespie 		  start_sequence ();
3269c87b03e5Sespie 		  val = extract_bit_field (sub, size_x, 0, 1, NULL_RTX,
3270c87b03e5Sespie 					   GET_MODE (x), GET_MODE (x),
3271c87b03e5Sespie 					   GET_MODE_SIZE (GET_MODE (sub)));
3272c87b03e5Sespie 
3273c87b03e5Sespie 		  if (! validate_change (insn, loc, val, 0))
3274c87b03e5Sespie 		    {
3275c87b03e5Sespie 		      /* Discard the current sequence and put the
3276c87b03e5Sespie 			 ADDRESSOF on stack.  */
3277c87b03e5Sespie 		      end_sequence ();
3278c87b03e5Sespie 		      goto give_up;
3279c87b03e5Sespie 		    }
3280c87b03e5Sespie 
3281c87b03e5Sespie 		  seq = get_insns ();
3282c87b03e5Sespie 		  end_sequence ();
3283c87b03e5Sespie 		  emit_insn_before (seq, insn);
3284c87b03e5Sespie 		  compute_insns_for_mem (p ? NEXT_INSN (p) : get_insns (),
3285c87b03e5Sespie 					 insn, ht);
3286c87b03e5Sespie 		}
3287c87b03e5Sespie 
3288c87b03e5Sespie 	      /* Remember the replacement so that the same one can be done
3289c87b03e5Sespie 		 on the REG_NOTES.  */
3290c87b03e5Sespie 	      purge_bitfield_addressof_replacements
3291c87b03e5Sespie 		= gen_rtx_EXPR_LIST (VOIDmode, x,
3292c87b03e5Sespie 				     gen_rtx_EXPR_LIST
3293c87b03e5Sespie 				     (VOIDmode, val,
3294c87b03e5Sespie 				      purge_bitfield_addressof_replacements));
3295c87b03e5Sespie 
3296c87b03e5Sespie 	      /* We replaced with a reg -- all done.  */
3297c87b03e5Sespie 	      return true;
3298c87b03e5Sespie 	    }
3299c87b03e5Sespie 	}
3300c87b03e5Sespie 
3301c87b03e5Sespie       else if (validate_change (insn, loc, sub, 0))
3302c87b03e5Sespie 	{
3303c87b03e5Sespie 	  /* Remember the replacement so that the same one can be done
3304c87b03e5Sespie 	     on the REG_NOTES.  */
3305c87b03e5Sespie 	  if (GET_CODE (sub) == REG || GET_CODE (sub) == SUBREG)
3306c87b03e5Sespie 	    {
3307c87b03e5Sespie 	      rtx tem;
3308c87b03e5Sespie 
3309c87b03e5Sespie 	      for (tem = purge_addressof_replacements;
3310c87b03e5Sespie 		   tem != NULL_RTX;
3311c87b03e5Sespie 		   tem = XEXP (XEXP (tem, 1), 1))
3312c87b03e5Sespie 		if (rtx_equal_p (XEXP (x, 0), XEXP (tem, 0)))
3313c87b03e5Sespie 		  {
3314c87b03e5Sespie 		    XEXP (XEXP (tem, 1), 0) = sub;
3315c87b03e5Sespie 		    return true;
3316c87b03e5Sespie 		  }
3317c87b03e5Sespie 	      purge_addressof_replacements
3318c87b03e5Sespie 		= gen_rtx (EXPR_LIST, VOIDmode, XEXP (x, 0),
3319c87b03e5Sespie 			   gen_rtx_EXPR_LIST (VOIDmode, sub,
3320c87b03e5Sespie 					      purge_addressof_replacements));
3321c87b03e5Sespie 	      return true;
3322c87b03e5Sespie 	    }
3323c87b03e5Sespie 	  goto restart;
3324c87b03e5Sespie 	}
3325c87b03e5Sespie     }
3326c87b03e5Sespie 
3327c87b03e5Sespie  give_up:
3328c87b03e5Sespie   /* Scan all subexpressions.  */
3329c87b03e5Sespie   fmt = GET_RTX_FORMAT (code);
3330c87b03e5Sespie   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
3331c87b03e5Sespie     {
3332c87b03e5Sespie       if (*fmt == 'e')
3333c87b03e5Sespie 	result &= purge_addressof_1 (&XEXP (x, i), insn, force, 0, ht);
3334c87b03e5Sespie       else if (*fmt == 'E')
3335c87b03e5Sespie 	for (j = 0; j < XVECLEN (x, i); j++)
3336c87b03e5Sespie 	  result &= purge_addressof_1 (&XVECEXP (x, i, j), insn, force, 0, ht);
3337c87b03e5Sespie     }
3338c87b03e5Sespie 
3339c87b03e5Sespie   return result;
3340c87b03e5Sespie }
3341c87b03e5Sespie 
3342c87b03e5Sespie /* Return a hash value for K, a REG.  */
3343c87b03e5Sespie 
3344c87b03e5Sespie static hashval_t
insns_for_mem_hash(k)3345c87b03e5Sespie insns_for_mem_hash (k)
3346c87b03e5Sespie      const void * k;
3347c87b03e5Sespie {
3348c87b03e5Sespie   /* Use the address of the key for the hash value.  */
3349c87b03e5Sespie   struct insns_for_mem_entry *m = (struct insns_for_mem_entry *) k;
3350c87b03e5Sespie   return htab_hash_pointer (m->key);
3351c87b03e5Sespie }
3352c87b03e5Sespie 
3353c87b03e5Sespie /* Return nonzero if K1 and K2 (two REGs) are the same.  */
3354c87b03e5Sespie 
3355c87b03e5Sespie static int
insns_for_mem_comp(k1,k2)3356c87b03e5Sespie insns_for_mem_comp (k1, k2)
3357c87b03e5Sespie      const void * k1;
3358c87b03e5Sespie      const void * k2;
3359c87b03e5Sespie {
3360c87b03e5Sespie   struct insns_for_mem_entry *m1 = (struct insns_for_mem_entry *) k1;
3361c87b03e5Sespie   struct insns_for_mem_entry *m2 = (struct insns_for_mem_entry *) k2;
3362c87b03e5Sespie   return m1->key == m2->key;
3363c87b03e5Sespie }
3364c87b03e5Sespie 
3365c87b03e5Sespie struct insns_for_mem_walk_info
3366c87b03e5Sespie {
3367c87b03e5Sespie   /* The hash table that we are using to record which INSNs use which
3368c87b03e5Sespie      MEMs.  */
3369c87b03e5Sespie   htab_t ht;
3370c87b03e5Sespie 
3371c87b03e5Sespie   /* The INSN we are currently processing.  */
3372c87b03e5Sespie   rtx insn;
3373c87b03e5Sespie 
3374c87b03e5Sespie   /* Zero if we are walking to find ADDRESSOFs, one if we are walking
3375c87b03e5Sespie      to find the insns that use the REGs in the ADDRESSOFs.  */
3376c87b03e5Sespie   int pass;
3377c87b03e5Sespie };
3378c87b03e5Sespie 
3379c87b03e5Sespie /* Called from compute_insns_for_mem via for_each_rtx.  If R is a REG
3380c87b03e5Sespie    that might be used in an ADDRESSOF expression, record this INSN in
3381c87b03e5Sespie    the hash table given by DATA (which is really a pointer to an
3382c87b03e5Sespie    insns_for_mem_walk_info structure).  */
3383c87b03e5Sespie 
3384c87b03e5Sespie static int
insns_for_mem_walk(r,data)3385c87b03e5Sespie insns_for_mem_walk (r, data)
3386c87b03e5Sespie      rtx *r;
3387c87b03e5Sespie      void *data;
3388c87b03e5Sespie {
3389c87b03e5Sespie   struct insns_for_mem_walk_info *ifmwi
3390c87b03e5Sespie     = (struct insns_for_mem_walk_info *) data;
3391c87b03e5Sespie   struct insns_for_mem_entry tmp;
3392c87b03e5Sespie   tmp.insns = NULL_RTX;
3393c87b03e5Sespie 
3394c87b03e5Sespie   if (ifmwi->pass == 0 && *r && GET_CODE (*r) == ADDRESSOF
3395c87b03e5Sespie       && GET_CODE (XEXP (*r, 0)) == REG)
3396c87b03e5Sespie     {
3397c87b03e5Sespie       PTR *e;
3398c87b03e5Sespie       tmp.key = XEXP (*r, 0);
3399c87b03e5Sespie       e = htab_find_slot (ifmwi->ht, &tmp, INSERT);
3400c87b03e5Sespie       if (*e == NULL)
3401c87b03e5Sespie 	{
3402c87b03e5Sespie 	  *e = ggc_alloc (sizeof (tmp));
3403c87b03e5Sespie 	  memcpy (*e, &tmp, sizeof (tmp));
3404c87b03e5Sespie 	}
3405c87b03e5Sespie     }
3406c87b03e5Sespie   else if (ifmwi->pass == 1 && *r && GET_CODE (*r) == REG)
3407c87b03e5Sespie     {
3408c87b03e5Sespie       struct insns_for_mem_entry *ifme;
3409c87b03e5Sespie       tmp.key = *r;
3410c87b03e5Sespie       ifme = (struct insns_for_mem_entry *) htab_find (ifmwi->ht, &tmp);
3411c87b03e5Sespie 
3412c87b03e5Sespie       /* If we have not already recorded this INSN, do so now.  Since
3413c87b03e5Sespie 	 we process the INSNs in order, we know that if we have
3414c87b03e5Sespie 	 recorded it it must be at the front of the list.  */
3415c87b03e5Sespie       if (ifme && (!ifme->insns || XEXP (ifme->insns, 0) != ifmwi->insn))
3416c87b03e5Sespie 	ifme->insns = gen_rtx_EXPR_LIST (VOIDmode, ifmwi->insn,
3417c87b03e5Sespie 					 ifme->insns);
3418c87b03e5Sespie     }
3419c87b03e5Sespie 
3420c87b03e5Sespie   return 0;
3421c87b03e5Sespie }
3422c87b03e5Sespie 
3423c87b03e5Sespie /* Walk the INSNS, until we reach LAST_INSN, recording which INSNs use
3424c87b03e5Sespie    which REGs in HT.  */
3425c87b03e5Sespie 
3426c87b03e5Sespie static void
compute_insns_for_mem(insns,last_insn,ht)3427c87b03e5Sespie compute_insns_for_mem (insns, last_insn, ht)
3428c87b03e5Sespie      rtx insns;
3429c87b03e5Sespie      rtx last_insn;
3430c87b03e5Sespie      htab_t ht;
3431c87b03e5Sespie {
3432c87b03e5Sespie   rtx insn;
3433c87b03e5Sespie   struct insns_for_mem_walk_info ifmwi;
3434c87b03e5Sespie   ifmwi.ht = ht;
3435c87b03e5Sespie 
3436c87b03e5Sespie   for (ifmwi.pass = 0; ifmwi.pass < 2; ++ifmwi.pass)
3437c87b03e5Sespie     for (insn = insns; insn != last_insn; insn = NEXT_INSN (insn))
3438c87b03e5Sespie       if (INSN_P (insn))
3439c87b03e5Sespie 	{
3440c87b03e5Sespie 	  ifmwi.insn = insn;
3441c87b03e5Sespie 	  for_each_rtx (&insn, insns_for_mem_walk, &ifmwi);
3442c87b03e5Sespie 	}
3443c87b03e5Sespie }
3444c87b03e5Sespie 
3445c87b03e5Sespie /* Helper function for purge_addressof called through for_each_rtx.
3446c87b03e5Sespie    Returns true iff the rtl is an ADDRESSOF.  */
3447c87b03e5Sespie 
3448c87b03e5Sespie static int
is_addressof(rtl,data)3449c87b03e5Sespie is_addressof (rtl, data)
3450c87b03e5Sespie      rtx *rtl;
3451c87b03e5Sespie      void *data ATTRIBUTE_UNUSED;
3452c87b03e5Sespie {
3453c87b03e5Sespie   return GET_CODE (*rtl) == ADDRESSOF;
3454c87b03e5Sespie }
3455c87b03e5Sespie 
3456c87b03e5Sespie /* Eliminate all occurrences of ADDRESSOF from INSNS.  Elide any remaining
3457c87b03e5Sespie    (MEM (ADDRESSOF)) patterns, and force any needed registers into the
3458c87b03e5Sespie    stack.  */
3459c87b03e5Sespie 
3460c87b03e5Sespie void
purge_addressof(insns)3461c87b03e5Sespie purge_addressof (insns)
3462c87b03e5Sespie      rtx insns;
3463c87b03e5Sespie {
3464c87b03e5Sespie   rtx insn;
3465c87b03e5Sespie   htab_t ht;
3466c87b03e5Sespie 
3467c87b03e5Sespie   /* When we actually purge ADDRESSOFs, we turn REGs into MEMs.  That
3468c87b03e5Sespie      requires a fixup pass over the instruction stream to correct
3469c87b03e5Sespie      INSNs that depended on the REG being a REG, and not a MEM.  But,
3470c87b03e5Sespie      these fixup passes are slow.  Furthermore, most MEMs are not
3471c87b03e5Sespie      mentioned in very many instructions.  So, we speed up the process
3472c87b03e5Sespie      by pre-calculating which REGs occur in which INSNs; that allows
3473c87b03e5Sespie      us to perform the fixup passes much more quickly.  */
3474c87b03e5Sespie   ht = htab_create_ggc (1000, insns_for_mem_hash, insns_for_mem_comp, NULL);
3475c87b03e5Sespie   compute_insns_for_mem (insns, NULL_RTX, ht);
3476c87b03e5Sespie 
3477c87b03e5Sespie   for (insn = insns; insn; insn = NEXT_INSN (insn))
3478c87b03e5Sespie     if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3479c87b03e5Sespie 	|| GET_CODE (insn) == CALL_INSN)
3480c87b03e5Sespie       {
3481c87b03e5Sespie 	if (! purge_addressof_1 (&PATTERN (insn), insn,
3482c87b03e5Sespie 				 asm_noperands (PATTERN (insn)) > 0, 0, ht))
3483c87b03e5Sespie 	  /* If we could not replace the ADDRESSOFs in the insn,
3484c87b03e5Sespie 	     something is wrong.  */
3485c87b03e5Sespie 	  abort ();
3486c87b03e5Sespie 
3487c87b03e5Sespie 	if (! purge_addressof_1 (&REG_NOTES (insn), NULL_RTX, 0, 0, ht))
3488c87b03e5Sespie 	  {
3489c87b03e5Sespie 	    /* If we could not replace the ADDRESSOFs in the insn's notes,
3490c87b03e5Sespie 	       we can just remove the offending notes instead.  */
3491c87b03e5Sespie 	    rtx note;
3492c87b03e5Sespie 
3493c87b03e5Sespie 	    for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
3494c87b03e5Sespie 	      {
3495c87b03e5Sespie 		/* If we find a REG_RETVAL note then the insn is a libcall.
3496c87b03e5Sespie 		   Such insns must have REG_EQUAL notes as well, in order
3497c87b03e5Sespie 		   for later passes of the compiler to work.  So it is not
3498c87b03e5Sespie 		   safe to delete the notes here, and instead we abort.  */
3499c87b03e5Sespie 		if (REG_NOTE_KIND (note) == REG_RETVAL)
3500c87b03e5Sespie 		  abort ();
3501c87b03e5Sespie 		if (for_each_rtx (&note, is_addressof, NULL))
3502c87b03e5Sespie 		  remove_note (insn, note);
3503c87b03e5Sespie 	      }
3504c87b03e5Sespie 	  }
3505c87b03e5Sespie       }
3506c87b03e5Sespie 
3507c87b03e5Sespie   /* Clean up.  */
3508c87b03e5Sespie   purge_bitfield_addressof_replacements = 0;
3509c87b03e5Sespie   purge_addressof_replacements = 0;
3510c87b03e5Sespie 
3511c87b03e5Sespie   /* REGs are shared.  purge_addressof will destructively replace a REG
3512c87b03e5Sespie      with a MEM, which creates shared MEMs.
3513c87b03e5Sespie 
3514c87b03e5Sespie      Unfortunately, the children of put_reg_into_stack assume that MEMs
3515c87b03e5Sespie      referring to the same stack slot are shared (fixup_var_refs and
3516c87b03e5Sespie      the associated hash table code).
3517c87b03e5Sespie 
3518c87b03e5Sespie      So, we have to do another unsharing pass after we have flushed any
3519c87b03e5Sespie      REGs that had their address taken into the stack.
3520c87b03e5Sespie 
3521c87b03e5Sespie      It may be worth tracking whether or not we converted any REGs into
3522c87b03e5Sespie      MEMs to avoid this overhead when it is not needed.  */
3523c87b03e5Sespie   unshare_all_rtl_again (get_insns ());
3524c87b03e5Sespie }
3525c87b03e5Sespie 
3526c87b03e5Sespie /* Convert a SET of a hard subreg to a set of the appropriate hard
3527c87b03e5Sespie    register.  A subroutine of purge_hard_subreg_sets.  */
3528c87b03e5Sespie 
3529c87b03e5Sespie static void
purge_single_hard_subreg_set(pattern)3530c87b03e5Sespie purge_single_hard_subreg_set (pattern)
3531c87b03e5Sespie      rtx pattern;
3532c87b03e5Sespie {
3533c87b03e5Sespie   rtx reg = SET_DEST (pattern);
3534c87b03e5Sespie   enum machine_mode mode = GET_MODE (SET_DEST (pattern));
3535c87b03e5Sespie   int offset = 0;
3536c87b03e5Sespie 
3537c87b03e5Sespie   if (GET_CODE (reg) == SUBREG && GET_CODE (SUBREG_REG (reg)) == REG
3538c87b03e5Sespie       && REGNO (SUBREG_REG (reg)) < FIRST_PSEUDO_REGISTER)
3539c87b03e5Sespie     {
3540c87b03e5Sespie       offset = subreg_regno_offset (REGNO (SUBREG_REG (reg)),
3541c87b03e5Sespie 				    GET_MODE (SUBREG_REG (reg)),
3542c87b03e5Sespie 				    SUBREG_BYTE (reg),
3543c87b03e5Sespie 				    GET_MODE (reg));
3544c87b03e5Sespie       reg = SUBREG_REG (reg);
3545c87b03e5Sespie     }
3546c87b03e5Sespie 
3547c87b03e5Sespie 
3548c87b03e5Sespie   if (GET_CODE (reg) == REG && REGNO (reg) < FIRST_PSEUDO_REGISTER)
3549c87b03e5Sespie     {
3550c87b03e5Sespie       reg = gen_rtx_REG (mode, REGNO (reg) + offset);
3551c87b03e5Sespie       SET_DEST (pattern) = reg;
3552c87b03e5Sespie     }
3553c87b03e5Sespie }
3554c87b03e5Sespie 
3555c87b03e5Sespie /* Eliminate all occurrences of SETs of hard subregs from INSNS.  The
3556c87b03e5Sespie    only such SETs that we expect to see are those left in because
3557c87b03e5Sespie    integrate can't handle sets of parts of a return value register.
3558c87b03e5Sespie 
3559c87b03e5Sespie    We don't use alter_subreg because we only want to eliminate subregs
3560c87b03e5Sespie    of hard registers.  */
3561c87b03e5Sespie 
3562c87b03e5Sespie void
purge_hard_subreg_sets(insn)3563c87b03e5Sespie purge_hard_subreg_sets (insn)
3564c87b03e5Sespie      rtx insn;
3565c87b03e5Sespie {
3566c87b03e5Sespie   for (; insn; insn = NEXT_INSN (insn))
3567c87b03e5Sespie     {
3568c87b03e5Sespie       if (INSN_P (insn))
3569c87b03e5Sespie 	{
3570c87b03e5Sespie 	  rtx pattern = PATTERN (insn);
3571c87b03e5Sespie 	  switch (GET_CODE (pattern))
3572c87b03e5Sespie 	    {
3573c87b03e5Sespie 	    case SET:
3574c87b03e5Sespie 	      if (GET_CODE (SET_DEST (pattern)) == SUBREG)
3575c87b03e5Sespie 		purge_single_hard_subreg_set (pattern);
3576c87b03e5Sespie 	      break;
3577c87b03e5Sespie 	    case PARALLEL:
3578c87b03e5Sespie 	      {
3579c87b03e5Sespie 		int j;
3580c87b03e5Sespie 		for (j = XVECLEN (pattern, 0) - 1; j >= 0; j--)
3581c87b03e5Sespie 		  {
3582c87b03e5Sespie 		    rtx inner_pattern = XVECEXP (pattern, 0, j);
3583c87b03e5Sespie 		    if (GET_CODE (inner_pattern) == SET
3584c87b03e5Sespie 			&& GET_CODE (SET_DEST (inner_pattern)) == SUBREG)
3585c87b03e5Sespie 		      purge_single_hard_subreg_set (inner_pattern);
3586c87b03e5Sespie 		  }
3587c87b03e5Sespie 	      }
3588c87b03e5Sespie 	      break;
3589c87b03e5Sespie 	    default:
3590c87b03e5Sespie 	      break;
3591c87b03e5Sespie 	    }
3592c87b03e5Sespie 	}
3593c87b03e5Sespie     }
3594c87b03e5Sespie }
3595c87b03e5Sespie 
3596c87b03e5Sespie /* Pass through the INSNS of function FNDECL and convert virtual register
3597c87b03e5Sespie    references to hard register references.  */
3598c87b03e5Sespie 
3599c87b03e5Sespie void
instantiate_virtual_regs(fndecl,insns)3600c87b03e5Sespie instantiate_virtual_regs (fndecl, insns)
3601c87b03e5Sespie      tree fndecl;
3602c87b03e5Sespie      rtx insns;
3603c87b03e5Sespie {
3604c87b03e5Sespie   rtx insn;
3605c87b03e5Sespie   unsigned int i;
3606c87b03e5Sespie 
3607c87b03e5Sespie   /* Compute the offsets to use for this function.  */
3608c87b03e5Sespie   in_arg_offset = FIRST_PARM_OFFSET (fndecl);
3609c87b03e5Sespie   var_offset = STARTING_FRAME_OFFSET;
3610c87b03e5Sespie   dynamic_offset = STACK_DYNAMIC_OFFSET (fndecl);
3611c87b03e5Sespie   out_arg_offset = STACK_POINTER_OFFSET;
3612c87b03e5Sespie   cfa_offset = ARG_POINTER_CFA_OFFSET (fndecl);
3613c87b03e5Sespie 
3614c87b03e5Sespie   /* Scan all variables and parameters of this function.  For each that is
3615c87b03e5Sespie      in memory, instantiate all virtual registers if the result is a valid
3616c87b03e5Sespie      address.  If not, we do it later.  That will handle most uses of virtual
3617c87b03e5Sespie      regs on many machines.  */
3618c87b03e5Sespie   instantiate_decls (fndecl, 1);
3619c87b03e5Sespie 
3620c87b03e5Sespie   /* Initialize recognition, indicating that volatile is OK.  */
3621c87b03e5Sespie   init_recog ();
3622c87b03e5Sespie 
3623c87b03e5Sespie   /* Scan through all the insns, instantiating every virtual register still
3624c87b03e5Sespie      present.  */
3625c87b03e5Sespie   for (insn = insns; insn; insn = NEXT_INSN (insn))
3626c87b03e5Sespie     if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN
3627c87b03e5Sespie 	|| GET_CODE (insn) == CALL_INSN)
3628c87b03e5Sespie       {
3629c87b03e5Sespie 	instantiate_virtual_regs_1 (&PATTERN (insn), insn, 1);
3630c87b03e5Sespie 	if (INSN_DELETED_P (insn))
3631c87b03e5Sespie 	  continue;
3632c87b03e5Sespie 	instantiate_virtual_regs_1 (&REG_NOTES (insn), NULL_RTX, 0);
3633c87b03e5Sespie 	/* Instantiate any virtual registers in CALL_INSN_FUNCTION_USAGE.  */
3634c87b03e5Sespie 	if (GET_CODE (insn) == CALL_INSN)
3635c87b03e5Sespie 	  instantiate_virtual_regs_1 (&CALL_INSN_FUNCTION_USAGE (insn),
3636c87b03e5Sespie 				      NULL_RTX, 0);
3637c87b03e5Sespie 
3638c87b03e5Sespie 	/* Past this point all ASM statements should match.  Verify that
3639c87b03e5Sespie 	   to avoid failures later in the compilation process.  */
3640c87b03e5Sespie         if (asm_noperands (PATTERN (insn)) >= 0
3641c87b03e5Sespie 	    && ! check_asm_operands (PATTERN (insn)))
3642c87b03e5Sespie           instantiate_virtual_regs_lossage (insn);
3643c87b03e5Sespie       }
3644c87b03e5Sespie 
3645c87b03e5Sespie   /* Instantiate the stack slots for the parm registers, for later use in
3646c87b03e5Sespie      addressof elimination.  */
3647c87b03e5Sespie   for (i = 0; i < max_parm_reg; ++i)
3648c87b03e5Sespie     if (parm_reg_stack_loc[i])
3649c87b03e5Sespie       instantiate_virtual_regs_1 (&parm_reg_stack_loc[i], NULL_RTX, 0);
3650c87b03e5Sespie 
3651c87b03e5Sespie   /* Now instantiate the remaining register equivalences for debugging info.
3652c87b03e5Sespie      These will not be valid addresses.  */
3653c87b03e5Sespie   instantiate_decls (fndecl, 0);
3654c87b03e5Sespie 
3655c87b03e5Sespie   /* Indicate that, from now on, assign_stack_local should use
3656c87b03e5Sespie      frame_pointer_rtx.  */
3657c87b03e5Sespie   virtuals_instantiated = 1;
3658c87b03e5Sespie }
3659c87b03e5Sespie 
3660c87b03e5Sespie /* Scan all decls in FNDECL (both variables and parameters) and instantiate
3661c87b03e5Sespie    all virtual registers in their DECL_RTL's.
3662c87b03e5Sespie 
3663c87b03e5Sespie    If VALID_ONLY, do this only if the resulting address is still valid.
3664c87b03e5Sespie    Otherwise, always do it.  */
3665c87b03e5Sespie 
3666c87b03e5Sespie static void
instantiate_decls(fndecl,valid_only)3667c87b03e5Sespie instantiate_decls (fndecl, valid_only)
3668c87b03e5Sespie      tree fndecl;
3669c87b03e5Sespie      int valid_only;
3670c87b03e5Sespie {
3671c87b03e5Sespie   tree decl;
3672c87b03e5Sespie 
3673c87b03e5Sespie   /* Process all parameters of the function.  */
3674c87b03e5Sespie   for (decl = DECL_ARGUMENTS (fndecl); decl; decl = TREE_CHAIN (decl))
3675c87b03e5Sespie     {
3676c87b03e5Sespie       HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
3677c87b03e5Sespie       HOST_WIDE_INT size_rtl;
3678c87b03e5Sespie 
3679c87b03e5Sespie       instantiate_decl (DECL_RTL (decl), size, valid_only);
3680c87b03e5Sespie 
3681c87b03e5Sespie       /* If the parameter was promoted, then the incoming RTL mode may be
3682c87b03e5Sespie 	 larger than the declared type size.  We must use the larger of
3683c87b03e5Sespie 	 the two sizes.  */
3684c87b03e5Sespie       size_rtl = GET_MODE_SIZE (GET_MODE (DECL_INCOMING_RTL (decl)));
3685c87b03e5Sespie       size = MAX (size_rtl, size);
3686c87b03e5Sespie       instantiate_decl (DECL_INCOMING_RTL (decl), size, valid_only);
3687c87b03e5Sespie     }
3688c87b03e5Sespie 
3689c87b03e5Sespie   /* Now process all variables defined in the function or its subblocks.  */
3690c87b03e5Sespie   instantiate_decls_1 (DECL_INITIAL (fndecl), valid_only);
3691c87b03e5Sespie }
3692c87b03e5Sespie 
3693c87b03e5Sespie /* Subroutine of instantiate_decls: Process all decls in the given
3694c87b03e5Sespie    BLOCK node and all its subblocks.  */
3695c87b03e5Sespie 
3696c87b03e5Sespie static void
instantiate_decls_1(let,valid_only)3697c87b03e5Sespie instantiate_decls_1 (let, valid_only)
3698c87b03e5Sespie      tree let;
3699c87b03e5Sespie      int valid_only;
3700c87b03e5Sespie {
3701c87b03e5Sespie   tree t;
3702c87b03e5Sespie 
3703c87b03e5Sespie   for (t = BLOCK_VARS (let); t; t = TREE_CHAIN (t))
3704c87b03e5Sespie     if (DECL_RTL_SET_P (t))
3705c87b03e5Sespie       instantiate_decl (DECL_RTL (t),
3706c87b03e5Sespie 			int_size_in_bytes (TREE_TYPE (t)),
3707c87b03e5Sespie 			valid_only);
3708c87b03e5Sespie 
3709c87b03e5Sespie   /* Process all subblocks.  */
3710c87b03e5Sespie   for (t = BLOCK_SUBBLOCKS (let); t; t = TREE_CHAIN (t))
3711c87b03e5Sespie     instantiate_decls_1 (t, valid_only);
3712c87b03e5Sespie }
3713c87b03e5Sespie 
3714c87b03e5Sespie /* Subroutine of the preceding procedures: Given RTL representing a
3715c87b03e5Sespie    decl and the size of the object, do any instantiation required.
3716c87b03e5Sespie 
3717c87b03e5Sespie    If VALID_ONLY is nonzero, it means that the RTL should only be
3718c87b03e5Sespie    changed if the new address is valid.  */
3719c87b03e5Sespie 
3720c87b03e5Sespie static void
instantiate_decl(x,size,valid_only)3721c87b03e5Sespie instantiate_decl (x, size, valid_only)
3722c87b03e5Sespie      rtx x;
3723c87b03e5Sespie      HOST_WIDE_INT size;
3724c87b03e5Sespie      int valid_only;
3725c87b03e5Sespie {
3726c87b03e5Sespie   enum machine_mode mode;
3727c87b03e5Sespie   rtx addr;
3728c87b03e5Sespie 
3729c87b03e5Sespie   /* If this is not a MEM, no need to do anything.  Similarly if the
3730c87b03e5Sespie      address is a constant or a register that is not a virtual register.  */
3731c87b03e5Sespie 
3732c87b03e5Sespie   if (x == 0 || GET_CODE (x) != MEM)
3733c87b03e5Sespie     return;
3734c87b03e5Sespie 
3735c87b03e5Sespie   addr = XEXP (x, 0);
3736c87b03e5Sespie   if (CONSTANT_P (addr)
3737c87b03e5Sespie       || (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == REG)
3738c87b03e5Sespie       || (GET_CODE (addr) == REG
3739c87b03e5Sespie 	  && (REGNO (addr) < FIRST_VIRTUAL_REGISTER
3740c87b03e5Sespie 	      || REGNO (addr) > LAST_VIRTUAL_REGISTER)))
3741c87b03e5Sespie     return;
3742c87b03e5Sespie 
3743c87b03e5Sespie   /* If we should only do this if the address is valid, copy the address.
3744c87b03e5Sespie      We need to do this so we can undo any changes that might make the
3745c87b03e5Sespie      address invalid.  This copy is unfortunate, but probably can't be
3746c87b03e5Sespie      avoided.  */
3747c87b03e5Sespie 
3748c87b03e5Sespie   if (valid_only)
3749c87b03e5Sespie     addr = copy_rtx (addr);
3750c87b03e5Sespie 
3751c87b03e5Sespie   instantiate_virtual_regs_1 (&addr, NULL_RTX, 0);
3752c87b03e5Sespie 
3753c87b03e5Sespie   if (valid_only && size >= 0)
3754c87b03e5Sespie     {
3755c87b03e5Sespie       unsigned HOST_WIDE_INT decl_size = size;
3756c87b03e5Sespie 
3757c87b03e5Sespie       /* Now verify that the resulting address is valid for every integer or
3758c87b03e5Sespie 	 floating-point mode up to and including SIZE bytes long.  We do this
3759c87b03e5Sespie 	 since the object might be accessed in any mode and frame addresses
3760c87b03e5Sespie 	 are shared.  */
3761c87b03e5Sespie 
3762c87b03e5Sespie       for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3763c87b03e5Sespie 	   mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3764c87b03e5Sespie 	   mode = GET_MODE_WIDER_MODE (mode))
3765c87b03e5Sespie 	if (! memory_address_p (mode, addr))
3766c87b03e5Sespie 	  return;
3767c87b03e5Sespie 
3768c87b03e5Sespie       for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
3769c87b03e5Sespie 	   mode != VOIDmode && GET_MODE_SIZE (mode) <= decl_size;
3770c87b03e5Sespie 	   mode = GET_MODE_WIDER_MODE (mode))
3771c87b03e5Sespie 	if (! memory_address_p (mode, addr))
3772c87b03e5Sespie 	  return;
3773c87b03e5Sespie     }
3774c87b03e5Sespie 
3775c87b03e5Sespie   /* Put back the address now that we have updated it and we either know
3776c87b03e5Sespie      it is valid or we don't care whether it is valid.  */
3777c87b03e5Sespie 
3778c87b03e5Sespie   XEXP (x, 0) = addr;
3779c87b03e5Sespie }
3780c87b03e5Sespie 
3781c87b03e5Sespie /* Given a piece of RTX and a pointer to a HOST_WIDE_INT, if the RTX
3782c87b03e5Sespie    is a virtual register, return the equivalent hard register and set the
3783c87b03e5Sespie    offset indirectly through the pointer.  Otherwise, return 0.  */
3784c87b03e5Sespie 
3785c87b03e5Sespie static rtx
instantiate_new_reg(x,poffset)3786c87b03e5Sespie instantiate_new_reg (x, poffset)
3787c87b03e5Sespie      rtx x;
3788c87b03e5Sespie      HOST_WIDE_INT *poffset;
3789c87b03e5Sespie {
3790c87b03e5Sespie   rtx new;
3791c87b03e5Sespie   HOST_WIDE_INT offset;
3792c87b03e5Sespie 
3793c87b03e5Sespie   if (x == virtual_incoming_args_rtx)
3794c87b03e5Sespie     new = arg_pointer_rtx, offset = in_arg_offset;
3795c87b03e5Sespie   else if (x == virtual_stack_vars_rtx)
3796c87b03e5Sespie     new = frame_pointer_rtx, offset = var_offset;
3797c87b03e5Sespie   else if (x == virtual_stack_dynamic_rtx)
3798c87b03e5Sespie     new = stack_pointer_rtx, offset = dynamic_offset;
3799c87b03e5Sespie   else if (x == virtual_outgoing_args_rtx)
3800c87b03e5Sespie     new = stack_pointer_rtx, offset = out_arg_offset;
3801c87b03e5Sespie   else if (x == virtual_cfa_rtx)
3802c87b03e5Sespie     new = arg_pointer_rtx, offset = cfa_offset;
3803c87b03e5Sespie   else
3804c87b03e5Sespie     return 0;
3805c87b03e5Sespie 
3806c87b03e5Sespie   *poffset = offset;
3807c87b03e5Sespie   return new;
3808c87b03e5Sespie }
3809c87b03e5Sespie 
3810c87b03e5Sespie 
3811c87b03e5Sespie /* Called when instantiate_virtual_regs has failed to update the instruction.
3812c87b03e5Sespie    Usually this means that non-matching instruction has been emit, however for
3813c87b03e5Sespie    asm statements it may be the problem in the constraints.  */
3814c87b03e5Sespie static void
instantiate_virtual_regs_lossage(insn)3815c87b03e5Sespie instantiate_virtual_regs_lossage (insn)
3816c87b03e5Sespie      rtx insn;
3817c87b03e5Sespie {
3818c87b03e5Sespie   if (asm_noperands (PATTERN (insn)) >= 0)
3819c87b03e5Sespie     {
3820c87b03e5Sespie       error_for_asm (insn, "impossible constraint in `asm'");
3821c87b03e5Sespie       delete_insn (insn);
3822c87b03e5Sespie     }
3823c87b03e5Sespie   else
3824c87b03e5Sespie     abort ();
3825c87b03e5Sespie }
3826c87b03e5Sespie /* Given a pointer to a piece of rtx and an optional pointer to the
3827c87b03e5Sespie    containing object, instantiate any virtual registers present in it.
3828c87b03e5Sespie 
3829c87b03e5Sespie    If EXTRA_INSNS, we always do the replacement and generate
3830c87b03e5Sespie    any extra insns before OBJECT.  If it zero, we do nothing if replacement
3831c87b03e5Sespie    is not valid.
3832c87b03e5Sespie 
3833c87b03e5Sespie    Return 1 if we either had nothing to do or if we were able to do the
3834c87b03e5Sespie    needed replacement.  Return 0 otherwise; we only return zero if
3835c87b03e5Sespie    EXTRA_INSNS is zero.
3836c87b03e5Sespie 
3837c87b03e5Sespie    We first try some simple transformations to avoid the creation of extra
3838c87b03e5Sespie    pseudos.  */
3839c87b03e5Sespie 
3840c87b03e5Sespie static int
instantiate_virtual_regs_1(loc,object,extra_insns)3841c87b03e5Sespie instantiate_virtual_regs_1 (loc, object, extra_insns)
3842c87b03e5Sespie      rtx *loc;
3843c87b03e5Sespie      rtx object;
3844c87b03e5Sespie      int extra_insns;
3845c87b03e5Sespie {
3846c87b03e5Sespie   rtx x;
3847c87b03e5Sespie   RTX_CODE code;
3848c87b03e5Sespie   rtx new = 0;
3849c87b03e5Sespie   HOST_WIDE_INT offset = 0;
3850c87b03e5Sespie   rtx temp;
3851c87b03e5Sespie   rtx seq;
3852c87b03e5Sespie   int i, j;
3853c87b03e5Sespie   const char *fmt;
3854c87b03e5Sespie 
3855c87b03e5Sespie   /* Re-start here to avoid recursion in common cases.  */
3856c87b03e5Sespie  restart:
3857c87b03e5Sespie 
3858c87b03e5Sespie   x = *loc;
3859c87b03e5Sespie   if (x == 0)
3860c87b03e5Sespie     return 1;
3861c87b03e5Sespie 
3862c87b03e5Sespie   /* We may have detected and deleted invalid asm statements.  */
3863c87b03e5Sespie   if (object && INSN_P (object) && INSN_DELETED_P (object))
3864c87b03e5Sespie     return 1;
3865c87b03e5Sespie 
3866c87b03e5Sespie   code = GET_CODE (x);
3867c87b03e5Sespie 
3868c87b03e5Sespie   /* Check for some special cases.  */
3869c87b03e5Sespie   switch (code)
3870c87b03e5Sespie     {
3871c87b03e5Sespie     case CONST_INT:
3872c87b03e5Sespie     case CONST_DOUBLE:
3873c87b03e5Sespie     case CONST_VECTOR:
3874c87b03e5Sespie     case CONST:
3875c87b03e5Sespie     case SYMBOL_REF:
3876c87b03e5Sespie     case CODE_LABEL:
3877c87b03e5Sespie     case PC:
3878c87b03e5Sespie     case CC0:
3879c87b03e5Sespie     case ASM_INPUT:
3880c87b03e5Sespie     case ADDR_VEC:
3881c87b03e5Sespie     case ADDR_DIFF_VEC:
3882c87b03e5Sespie     case RETURN:
3883c87b03e5Sespie       return 1;
3884c87b03e5Sespie 
3885c87b03e5Sespie     case SET:
3886c87b03e5Sespie       /* We are allowed to set the virtual registers.  This means that
3887c87b03e5Sespie 	 the actual register should receive the source minus the
3888c87b03e5Sespie 	 appropriate offset.  This is used, for example, in the handling
3889c87b03e5Sespie 	 of non-local gotos.  */
3890c87b03e5Sespie       if ((new = instantiate_new_reg (SET_DEST (x), &offset)) != 0)
3891c87b03e5Sespie 	{
3892c87b03e5Sespie 	  rtx src = SET_SRC (x);
3893c87b03e5Sespie 
3894c87b03e5Sespie 	  /* We are setting the register, not using it, so the relevant
3895c87b03e5Sespie 	     offset is the negative of the offset to use were we using
3896c87b03e5Sespie 	     the register.  */
3897c87b03e5Sespie 	  offset = - offset;
3898c87b03e5Sespie 	  instantiate_virtual_regs_1 (&src, NULL_RTX, 0);
3899c87b03e5Sespie 
3900c87b03e5Sespie 	  /* The only valid sources here are PLUS or REG.  Just do
3901c87b03e5Sespie 	     the simplest possible thing to handle them.  */
3902c87b03e5Sespie 	  if (GET_CODE (src) != REG && GET_CODE (src) != PLUS)
3903c87b03e5Sespie 	    {
3904c87b03e5Sespie 	      instantiate_virtual_regs_lossage (object);
3905c87b03e5Sespie 	      return 1;
3906c87b03e5Sespie 	    }
3907c87b03e5Sespie 
3908c87b03e5Sespie 	  start_sequence ();
3909c87b03e5Sespie 	  if (GET_CODE (src) != REG)
3910c87b03e5Sespie 	    temp = force_operand (src, NULL_RTX);
3911c87b03e5Sespie 	  else
3912c87b03e5Sespie 	    temp = src;
3913c87b03e5Sespie 	  temp = force_operand (plus_constant (temp, offset), NULL_RTX);
3914c87b03e5Sespie 	  seq = get_insns ();
3915c87b03e5Sespie 	  end_sequence ();
3916c87b03e5Sespie 
3917c87b03e5Sespie 	  emit_insn_before (seq, object);
3918c87b03e5Sespie 	  SET_DEST (x) = new;
3919c87b03e5Sespie 
3920c87b03e5Sespie 	  if (! validate_change (object, &SET_SRC (x), temp, 0)
3921c87b03e5Sespie 	      || ! extra_insns)
3922c87b03e5Sespie 	    instantiate_virtual_regs_lossage (object);
3923c87b03e5Sespie 
3924c87b03e5Sespie 	  return 1;
3925c87b03e5Sespie 	}
3926c87b03e5Sespie 
3927c87b03e5Sespie       instantiate_virtual_regs_1 (&SET_DEST (x), object, extra_insns);
3928c87b03e5Sespie       loc = &SET_SRC (x);
3929c87b03e5Sespie       goto restart;
3930c87b03e5Sespie 
3931c87b03e5Sespie     case PLUS:
3932c87b03e5Sespie       /* Handle special case of virtual register plus constant.  */
3933c87b03e5Sespie       if (CONSTANT_P (XEXP (x, 1)))
3934c87b03e5Sespie 	{
3935c87b03e5Sespie 	  rtx old, new_offset;
3936c87b03e5Sespie 
3937c87b03e5Sespie 	  /* Check for (plus (plus VIRT foo) (const_int)) first.  */
3938c87b03e5Sespie 	  if (GET_CODE (XEXP (x, 0)) == PLUS)
3939c87b03e5Sespie 	    {
3940c87b03e5Sespie 	      if ((new = instantiate_new_reg (XEXP (XEXP (x, 0), 0), &offset)))
3941c87b03e5Sespie 		{
3942c87b03e5Sespie 		  instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 1), object,
3943c87b03e5Sespie 					      extra_insns);
3944c87b03e5Sespie 		  new = gen_rtx_PLUS (Pmode, new, XEXP (XEXP (x, 0), 1));
3945c87b03e5Sespie 		}
3946c87b03e5Sespie 	      else
3947c87b03e5Sespie 		{
3948c87b03e5Sespie 		  loc = &XEXP (x, 0);
3949c87b03e5Sespie 		  goto restart;
3950c87b03e5Sespie 		}
3951c87b03e5Sespie 	    }
3952c87b03e5Sespie 
3953c87b03e5Sespie #ifdef POINTERS_EXTEND_UNSIGNED
3954c87b03e5Sespie 	  /* If we have (plus (subreg (virtual-reg)) (const_int)), we know
3955c87b03e5Sespie 	     we can commute the PLUS and SUBREG because pointers into the
3956c87b03e5Sespie 	     frame are well-behaved.  */
3957c87b03e5Sespie 	  else if (GET_CODE (XEXP (x, 0)) == SUBREG && GET_MODE (x) == ptr_mode
3958c87b03e5Sespie 		   && GET_CODE (XEXP (x, 1)) == CONST_INT
3959c87b03e5Sespie 		   && 0 != (new
3960c87b03e5Sespie 			    = instantiate_new_reg (SUBREG_REG (XEXP (x, 0)),
3961c87b03e5Sespie 						   &offset))
3962c87b03e5Sespie 		   && validate_change (object, loc,
3963c87b03e5Sespie 				       plus_constant (gen_lowpart (ptr_mode,
3964c87b03e5Sespie 								   new),
3965c87b03e5Sespie 						      offset
3966c87b03e5Sespie 						      + INTVAL (XEXP (x, 1))),
3967c87b03e5Sespie 				       0))
3968c87b03e5Sespie 		return 1;
3969c87b03e5Sespie #endif
3970c87b03e5Sespie 	  else if ((new = instantiate_new_reg (XEXP (x, 0), &offset)) == 0)
3971c87b03e5Sespie 	    {
3972c87b03e5Sespie 	      /* We know the second operand is a constant.  Unless the
3973c87b03e5Sespie 		 first operand is a REG (which has been already checked),
3974c87b03e5Sespie 		 it needs to be checked.  */
3975c87b03e5Sespie 	      if (GET_CODE (XEXP (x, 0)) != REG)
3976c87b03e5Sespie 		{
3977c87b03e5Sespie 		  loc = &XEXP (x, 0);
3978c87b03e5Sespie 		  goto restart;
3979c87b03e5Sespie 		}
3980c87b03e5Sespie 	      return 1;
3981c87b03e5Sespie 	    }
3982c87b03e5Sespie 
3983c87b03e5Sespie 	  new_offset = plus_constant (XEXP (x, 1), offset);
3984c87b03e5Sespie 
3985c87b03e5Sespie 	  /* If the new constant is zero, try to replace the sum with just
3986c87b03e5Sespie 	     the register.  */
3987c87b03e5Sespie 	  if (new_offset == const0_rtx
3988c87b03e5Sespie 	      && validate_change (object, loc, new, 0))
3989c87b03e5Sespie 	    return 1;
3990c87b03e5Sespie 
3991c87b03e5Sespie 	  /* Next try to replace the register and new offset.
3992c87b03e5Sespie 	     There are two changes to validate here and we can't assume that
3993c87b03e5Sespie 	     in the case of old offset equals new just changing the register
3994c87b03e5Sespie 	     will yield a valid insn.  In the interests of a little efficiency,
3995c87b03e5Sespie 	     however, we only call validate change once (we don't queue up the
3996c87b03e5Sespie 	     changes and then call apply_change_group).  */
3997c87b03e5Sespie 
3998c87b03e5Sespie 	  old = XEXP (x, 0);
3999c87b03e5Sespie 	  if (offset == 0
4000c87b03e5Sespie 	      ? ! validate_change (object, &XEXP (x, 0), new, 0)
4001c87b03e5Sespie 	      : (XEXP (x, 0) = new,
4002c87b03e5Sespie 		 ! validate_change (object, &XEXP (x, 1), new_offset, 0)))
4003c87b03e5Sespie 	    {
4004c87b03e5Sespie 	      if (! extra_insns)
4005c87b03e5Sespie 		{
4006c87b03e5Sespie 		  XEXP (x, 0) = old;
4007c87b03e5Sespie 		  return 0;
4008c87b03e5Sespie 		}
4009c87b03e5Sespie 
4010c87b03e5Sespie 	      /* Otherwise copy the new constant into a register and replace
4011c87b03e5Sespie 		 constant with that register.  */
4012c87b03e5Sespie 	      temp = gen_reg_rtx (Pmode);
4013c87b03e5Sespie 	      XEXP (x, 0) = new;
40145982dd4aSetoh 	      if (validate_change (object, &XEXP (x, 1), temp, 0)
40155982dd4aSetoh 		  && ! flag_propolice_protection)
4016c87b03e5Sespie 		emit_insn_before (gen_move_insn (temp, new_offset), object);
4017c87b03e5Sespie 	      else
4018c87b03e5Sespie 		{
4019c87b03e5Sespie 		  /* If that didn't work, replace this expression with a
4020c87b03e5Sespie 		     register containing the sum.  */
4021c87b03e5Sespie 
4022c87b03e5Sespie 		  XEXP (x, 0) = old;
4023c87b03e5Sespie 		  new = gen_rtx_PLUS (Pmode, new, new_offset);
4024c87b03e5Sespie 
4025c87b03e5Sespie 		  start_sequence ();
4026c87b03e5Sespie 		  temp = force_operand (new, NULL_RTX);
4027c87b03e5Sespie 		  seq = get_insns ();
4028c87b03e5Sespie 		  end_sequence ();
4029c87b03e5Sespie 
4030c87b03e5Sespie 		  emit_insn_before (seq, object);
4031c87b03e5Sespie 		  if (! validate_change (object, loc, temp, 0)
4032c87b03e5Sespie 		      && ! validate_replace_rtx (x, temp, object))
4033c87b03e5Sespie 		    {
4034c87b03e5Sespie 		      instantiate_virtual_regs_lossage (object);
4035c87b03e5Sespie 		      return 1;
4036c87b03e5Sespie 		    }
4037c87b03e5Sespie 		}
4038c87b03e5Sespie 	    }
4039c87b03e5Sespie 
4040c87b03e5Sespie 	  return 1;
4041c87b03e5Sespie 	}
4042c87b03e5Sespie 
4043c87b03e5Sespie       /* Fall through to generic two-operand expression case.  */
4044c87b03e5Sespie     case EXPR_LIST:
4045c87b03e5Sespie     case CALL:
4046c87b03e5Sespie     case COMPARE:
4047c87b03e5Sespie     case MINUS:
4048c87b03e5Sespie     case MULT:
4049c87b03e5Sespie     case DIV:      case UDIV:
4050c87b03e5Sespie     case MOD:      case UMOD:
4051c87b03e5Sespie     case AND:      case IOR:      case XOR:
4052c87b03e5Sespie     case ROTATERT: case ROTATE:
4053c87b03e5Sespie     case ASHIFTRT: case LSHIFTRT: case ASHIFT:
4054c87b03e5Sespie     case NE:       case EQ:
4055c87b03e5Sespie     case GE:       case GT:       case GEU:    case GTU:
4056c87b03e5Sespie     case LE:       case LT:       case LEU:    case LTU:
4057c87b03e5Sespie       if (XEXP (x, 1) && ! CONSTANT_P (XEXP (x, 1)))
4058c87b03e5Sespie 	instantiate_virtual_regs_1 (&XEXP (x, 1), object, extra_insns);
4059c87b03e5Sespie       loc = &XEXP (x, 0);
4060c87b03e5Sespie       goto restart;
4061c87b03e5Sespie 
4062c87b03e5Sespie     case MEM:
4063c87b03e5Sespie       /* Most cases of MEM that convert to valid addresses have already been
4064c87b03e5Sespie 	 handled by our scan of decls.  The only special handling we
4065c87b03e5Sespie 	 need here is to make a copy of the rtx to ensure it isn't being
4066c87b03e5Sespie 	 shared if we have to change it to a pseudo.
4067c87b03e5Sespie 
4068c87b03e5Sespie 	 If the rtx is a simple reference to an address via a virtual register,
4069c87b03e5Sespie 	 it can potentially be shared.  In such cases, first try to make it
4070c87b03e5Sespie 	 a valid address, which can also be shared.  Otherwise, copy it and
4071c87b03e5Sespie 	 proceed normally.
4072c87b03e5Sespie 
4073c87b03e5Sespie 	 First check for common cases that need no processing.  These are
4074c87b03e5Sespie 	 usually due to instantiation already being done on a previous instance
4075c87b03e5Sespie 	 of a shared rtx.  */
4076c87b03e5Sespie 
4077c87b03e5Sespie       temp = XEXP (x, 0);
4078c87b03e5Sespie       if (CONSTANT_ADDRESS_P (temp)
4079c87b03e5Sespie #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4080c87b03e5Sespie 	  || temp == arg_pointer_rtx
4081c87b03e5Sespie #endif
4082c87b03e5Sespie #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4083c87b03e5Sespie 	  || temp == hard_frame_pointer_rtx
4084c87b03e5Sespie #endif
4085c87b03e5Sespie 	  || temp == frame_pointer_rtx)
4086c87b03e5Sespie 	return 1;
4087c87b03e5Sespie 
4088c87b03e5Sespie       if (GET_CODE (temp) == PLUS
4089c87b03e5Sespie 	  && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4090c87b03e5Sespie 	  && (XEXP (temp, 0) == frame_pointer_rtx
4091c87b03e5Sespie #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
4092c87b03e5Sespie 	      || XEXP (temp, 0) == hard_frame_pointer_rtx
4093c87b03e5Sespie #endif
4094c87b03e5Sespie #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
4095c87b03e5Sespie 	      || XEXP (temp, 0) == arg_pointer_rtx
4096c87b03e5Sespie #endif
4097c87b03e5Sespie 	      ))
4098c87b03e5Sespie 	return 1;
4099c87b03e5Sespie 
4100c87b03e5Sespie       if (temp == virtual_stack_vars_rtx
4101c87b03e5Sespie 	  || temp == virtual_incoming_args_rtx
4102c87b03e5Sespie 	  || (GET_CODE (temp) == PLUS
4103c87b03e5Sespie 	      && CONSTANT_ADDRESS_P (XEXP (temp, 1))
4104c87b03e5Sespie 	      && (XEXP (temp, 0) == virtual_stack_vars_rtx
4105c87b03e5Sespie 		  || XEXP (temp, 0) == virtual_incoming_args_rtx)))
4106c87b03e5Sespie 	{
4107c87b03e5Sespie 	  /* This MEM may be shared.  If the substitution can be done without
4108c87b03e5Sespie 	     the need to generate new pseudos, we want to do it in place
4109c87b03e5Sespie 	     so all copies of the shared rtx benefit.  The call below will
4110c87b03e5Sespie 	     only make substitutions if the resulting address is still
4111c87b03e5Sespie 	     valid.
4112c87b03e5Sespie 
4113c87b03e5Sespie 	     Note that we cannot pass X as the object in the recursive call
4114c87b03e5Sespie 	     since the insn being processed may not allow all valid
4115c87b03e5Sespie 	     addresses.  However, if we were not passed on object, we can
4116c87b03e5Sespie 	     only modify X without copying it if X will have a valid
4117c87b03e5Sespie 	     address.
4118c87b03e5Sespie 
4119c87b03e5Sespie 	     ??? Also note that this can still lose if OBJECT is an insn that
4120c87b03e5Sespie 	     has less restrictions on an address that some other insn.
4121c87b03e5Sespie 	     In that case, we will modify the shared address.  This case
4122c87b03e5Sespie 	     doesn't seem very likely, though.  One case where this could
4123c87b03e5Sespie 	     happen is in the case of a USE or CLOBBER reference, but we
4124c87b03e5Sespie 	     take care of that below.  */
4125c87b03e5Sespie 
4126c87b03e5Sespie 	  if (instantiate_virtual_regs_1 (&XEXP (x, 0),
4127c87b03e5Sespie 					  object ? object : x, 0))
4128c87b03e5Sespie 	    return 1;
4129c87b03e5Sespie 
4130c87b03e5Sespie 	  /* Otherwise make a copy and process that copy.  We copy the entire
4131c87b03e5Sespie 	     RTL expression since it might be a PLUS which could also be
4132c87b03e5Sespie 	     shared.  */
4133c87b03e5Sespie 	  *loc = x = copy_rtx (x);
4134c87b03e5Sespie 	}
4135c87b03e5Sespie 
4136c87b03e5Sespie       /* Fall through to generic unary operation case.  */
4137c87b03e5Sespie     case PREFETCH:
4138c87b03e5Sespie     case SUBREG:
4139c87b03e5Sespie     case STRICT_LOW_PART:
4140c87b03e5Sespie     case NEG:          case NOT:
4141c87b03e5Sespie     case PRE_DEC:      case PRE_INC:      case POST_DEC:    case POST_INC:
4142c87b03e5Sespie     case SIGN_EXTEND:  case ZERO_EXTEND:
4143c87b03e5Sespie     case TRUNCATE:     case FLOAT_EXTEND: case FLOAT_TRUNCATE:
4144c87b03e5Sespie     case FLOAT:        case FIX:
4145c87b03e5Sespie     case UNSIGNED_FIX: case UNSIGNED_FLOAT:
4146c87b03e5Sespie     case ABS:
4147c87b03e5Sespie     case SQRT:
4148c87b03e5Sespie     case FFS:
4149c87b03e5Sespie       /* These case either have just one operand or we know that we need not
4150c87b03e5Sespie 	 check the rest of the operands.  */
4151c87b03e5Sespie       loc = &XEXP (x, 0);
4152c87b03e5Sespie       goto restart;
4153c87b03e5Sespie 
4154c87b03e5Sespie     case USE:
4155c87b03e5Sespie     case CLOBBER:
4156c87b03e5Sespie       /* If the operand is a MEM, see if the change is a valid MEM.  If not,
4157c87b03e5Sespie 	 go ahead and make the invalid one, but do it to a copy.  For a REG,
4158c87b03e5Sespie 	 just make the recursive call, since there's no chance of a problem.  */
4159c87b03e5Sespie 
4160c87b03e5Sespie       if ((GET_CODE (XEXP (x, 0)) == MEM
4161c87b03e5Sespie 	   && instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), XEXP (x, 0),
4162c87b03e5Sespie 					  0))
4163c87b03e5Sespie 	  || (GET_CODE (XEXP (x, 0)) == REG
4164c87b03e5Sespie 	      && instantiate_virtual_regs_1 (&XEXP (x, 0), object, 0)))
4165c87b03e5Sespie 	return 1;
4166c87b03e5Sespie 
4167c87b03e5Sespie       XEXP (x, 0) = copy_rtx (XEXP (x, 0));
4168c87b03e5Sespie       loc = &XEXP (x, 0);
4169c87b03e5Sespie       goto restart;
4170c87b03e5Sespie 
4171c87b03e5Sespie     case REG:
4172c87b03e5Sespie       /* Try to replace with a PLUS.  If that doesn't work, compute the sum
4173c87b03e5Sespie 	 in front of this insn and substitute the temporary.  */
4174c87b03e5Sespie       if ((new = instantiate_new_reg (x, &offset)) != 0)
4175c87b03e5Sespie 	{
4176c87b03e5Sespie 	  temp = plus_constant (new, offset);
4177c87b03e5Sespie 	  if (!validate_change (object, loc, temp, 0))
4178c87b03e5Sespie 	    {
4179c87b03e5Sespie 	      if (! extra_insns)
4180c87b03e5Sespie 		return 0;
4181c87b03e5Sespie 
4182c87b03e5Sespie 	      start_sequence ();
4183c87b03e5Sespie 	      temp = force_operand (temp, NULL_RTX);
4184c87b03e5Sespie 	      seq = get_insns ();
4185c87b03e5Sespie 	      end_sequence ();
4186c87b03e5Sespie 
4187c87b03e5Sespie 	      emit_insn_before (seq, object);
4188c87b03e5Sespie 	      if (! validate_change (object, loc, temp, 0)
4189c87b03e5Sespie 		  && ! validate_replace_rtx (x, temp, object))
4190c87b03e5Sespie 	        instantiate_virtual_regs_lossage (object);
4191c87b03e5Sespie 	    }
4192c87b03e5Sespie 	}
4193c87b03e5Sespie 
4194c87b03e5Sespie       return 1;
4195c87b03e5Sespie 
4196c87b03e5Sespie     case ADDRESSOF:
4197c87b03e5Sespie       if (GET_CODE (XEXP (x, 0)) == REG)
4198c87b03e5Sespie 	return 1;
4199c87b03e5Sespie 
4200c87b03e5Sespie       else if (GET_CODE (XEXP (x, 0)) == MEM)
4201c87b03e5Sespie 	{
4202c87b03e5Sespie 	  /* If we have a (addressof (mem ..)), do any instantiation inside
4203c87b03e5Sespie 	     since we know we'll be making the inside valid when we finally
4204c87b03e5Sespie 	     remove the ADDRESSOF.  */
4205c87b03e5Sespie 	  instantiate_virtual_regs_1 (&XEXP (XEXP (x, 0), 0), NULL_RTX, 0);
4206c87b03e5Sespie 	  return 1;
4207c87b03e5Sespie 	}
4208c87b03e5Sespie       break;
4209c87b03e5Sespie 
4210c87b03e5Sespie     default:
4211c87b03e5Sespie       break;
4212c87b03e5Sespie     }
4213c87b03e5Sespie 
4214c87b03e5Sespie   /* Scan all subexpressions.  */
4215c87b03e5Sespie   fmt = GET_RTX_FORMAT (code);
4216c87b03e5Sespie   for (i = 0; i < GET_RTX_LENGTH (code); i++, fmt++)
4217c87b03e5Sespie     if (*fmt == 'e')
4218c87b03e5Sespie       {
4219c87b03e5Sespie 	if (!instantiate_virtual_regs_1 (&XEXP (x, i), object, extra_insns))
4220c87b03e5Sespie 	  return 0;
4221c87b03e5Sespie       }
4222c87b03e5Sespie     else if (*fmt == 'E')
4223c87b03e5Sespie       for (j = 0; j < XVECLEN (x, i); j++)
4224c87b03e5Sespie 	if (! instantiate_virtual_regs_1 (&XVECEXP (x, i, j), object,
4225c87b03e5Sespie 					  extra_insns))
4226c87b03e5Sespie 	  return 0;
4227c87b03e5Sespie 
4228c87b03e5Sespie   return 1;
4229c87b03e5Sespie }
4230c87b03e5Sespie 
4231c87b03e5Sespie /* Optimization: assuming this function does not receive nonlocal gotos,
4232c87b03e5Sespie    delete the handlers for such, as well as the insns to establish
4233c87b03e5Sespie    and disestablish them.  */
4234c87b03e5Sespie 
4235c87b03e5Sespie static void
delete_handlers()4236c87b03e5Sespie delete_handlers ()
4237c87b03e5Sespie {
4238c87b03e5Sespie   rtx insn;
4239c87b03e5Sespie   for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
4240c87b03e5Sespie     {
4241c87b03e5Sespie       /* Delete the handler by turning off the flag that would
4242c87b03e5Sespie 	 prevent jump_optimize from deleting it.
4243c87b03e5Sespie 	 Also permit deletion of the nonlocal labels themselves
4244c87b03e5Sespie 	 if nothing local refers to them.  */
4245c87b03e5Sespie       if (GET_CODE (insn) == CODE_LABEL)
4246c87b03e5Sespie 	{
4247c87b03e5Sespie 	  tree t, last_t;
4248c87b03e5Sespie 
4249c87b03e5Sespie 	  LABEL_PRESERVE_P (insn) = 0;
4250c87b03e5Sespie 
4251c87b03e5Sespie 	  /* Remove it from the nonlocal_label list, to avoid confusing
4252c87b03e5Sespie 	     flow.  */
4253c87b03e5Sespie 	  for (t = nonlocal_labels, last_t = 0; t;
4254c87b03e5Sespie 	       last_t = t, t = TREE_CHAIN (t))
4255c87b03e5Sespie 	    if (DECL_RTL (TREE_VALUE (t)) == insn)
4256c87b03e5Sespie 	      break;
4257c87b03e5Sespie 	  if (t)
4258c87b03e5Sespie 	    {
4259c87b03e5Sespie 	      if (! last_t)
4260c87b03e5Sespie 		nonlocal_labels = TREE_CHAIN (nonlocal_labels);
4261c87b03e5Sespie 	      else
4262c87b03e5Sespie 		TREE_CHAIN (last_t) = TREE_CHAIN (t);
4263c87b03e5Sespie 	    }
4264c87b03e5Sespie 	}
4265c87b03e5Sespie       if (GET_CODE (insn) == INSN)
4266c87b03e5Sespie 	{
4267c87b03e5Sespie 	  int can_delete = 0;
4268c87b03e5Sespie 	  rtx t;
4269c87b03e5Sespie 	  for (t = nonlocal_goto_handler_slots; t != 0; t = XEXP (t, 1))
4270c87b03e5Sespie 	    if (reg_mentioned_p (t, PATTERN (insn)))
4271c87b03e5Sespie 	      {
4272c87b03e5Sespie 		can_delete = 1;
4273c87b03e5Sespie 		break;
4274c87b03e5Sespie 	      }
4275c87b03e5Sespie 	  if (can_delete
4276c87b03e5Sespie 	      || (nonlocal_goto_stack_level != 0
4277c87b03e5Sespie 		  && reg_mentioned_p (nonlocal_goto_stack_level,
4278c87b03e5Sespie 				      PATTERN (insn))))
4279c87b03e5Sespie 	    delete_related_insns (insn);
4280c87b03e5Sespie 	}
4281c87b03e5Sespie     }
4282c87b03e5Sespie }
4283c87b03e5Sespie 
4284c87b03e5Sespie int
max_parm_reg_num()4285c87b03e5Sespie max_parm_reg_num ()
4286c87b03e5Sespie {
4287c87b03e5Sespie   return max_parm_reg;
4288c87b03e5Sespie }
4289c87b03e5Sespie 
4290c87b03e5Sespie /* Return the first insn following those generated by `assign_parms'.  */
4291c87b03e5Sespie 
4292c87b03e5Sespie rtx
get_first_nonparm_insn()4293c87b03e5Sespie get_first_nonparm_insn ()
4294c87b03e5Sespie {
4295c87b03e5Sespie   if (last_parm_insn)
4296c87b03e5Sespie     return NEXT_INSN (last_parm_insn);
4297c87b03e5Sespie   return get_insns ();
4298c87b03e5Sespie }
4299c87b03e5Sespie 
4300c87b03e5Sespie /* Return the first NOTE_INSN_BLOCK_BEG note in the function.
4301c87b03e5Sespie    Crash if there is none.  */
4302c87b03e5Sespie 
4303c87b03e5Sespie rtx
get_first_block_beg()4304c87b03e5Sespie get_first_block_beg ()
4305c87b03e5Sespie {
4306c87b03e5Sespie   rtx searcher;
4307c87b03e5Sespie   rtx insn = get_first_nonparm_insn ();
4308c87b03e5Sespie 
4309c87b03e5Sespie   for (searcher = insn; searcher; searcher = NEXT_INSN (searcher))
4310c87b03e5Sespie     if (GET_CODE (searcher) == NOTE
4311c87b03e5Sespie 	&& NOTE_LINE_NUMBER (searcher) == NOTE_INSN_BLOCK_BEG)
4312c87b03e5Sespie       return searcher;
4313c87b03e5Sespie 
4314c87b03e5Sespie   abort ();	/* Invalid call to this function.  (See comments above.)  */
4315c87b03e5Sespie   return NULL_RTX;
4316c87b03e5Sespie }
4317c87b03e5Sespie 
4318c87b03e5Sespie /* Return 1 if EXP is an aggregate type (or a value with aggregate type).
4319c87b03e5Sespie    This means a type for which function calls must pass an address to the
4320c87b03e5Sespie    function or get an address back from the function.
4321c87b03e5Sespie    EXP may be a type node or an expression (whose type is tested).  */
4322c87b03e5Sespie 
4323c87b03e5Sespie int
aggregate_value_p(exp)4324c87b03e5Sespie aggregate_value_p (exp)
4325c87b03e5Sespie      tree exp;
4326c87b03e5Sespie {
4327c87b03e5Sespie   int i, regno, nregs;
4328c87b03e5Sespie   rtx reg;
4329c87b03e5Sespie 
4330c87b03e5Sespie   tree type = (TYPE_P (exp)) ? exp : TREE_TYPE (exp);
4331c87b03e5Sespie 
4332c87b03e5Sespie   if (TREE_CODE (type) == VOID_TYPE)
4333c87b03e5Sespie     return 0;
4334c87b03e5Sespie   if (RETURN_IN_MEMORY (type))
4335c87b03e5Sespie     return 1;
4336c87b03e5Sespie   /* Types that are TREE_ADDRESSABLE must be constructed in memory,
4337c87b03e5Sespie      and thus can't be returned in registers.  */
4338c87b03e5Sespie   if (TREE_ADDRESSABLE (type))
4339c87b03e5Sespie     return 1;
4340c87b03e5Sespie   if (flag_pcc_struct_return && AGGREGATE_TYPE_P (type))
4341c87b03e5Sespie     return 1;
4342c87b03e5Sespie   /* Make sure we have suitable call-clobbered regs to return
4343c87b03e5Sespie      the value in; if not, we must return it in memory.  */
4344c87b03e5Sespie   reg = hard_function_value (type, 0, 0);
4345c87b03e5Sespie 
4346c87b03e5Sespie   /* If we have something other than a REG (e.g. a PARALLEL), then assume
4347c87b03e5Sespie      it is OK.  */
4348c87b03e5Sespie   if (GET_CODE (reg) != REG)
4349c87b03e5Sespie     return 0;
4350c87b03e5Sespie 
4351c87b03e5Sespie   regno = REGNO (reg);
4352c87b03e5Sespie   nregs = HARD_REGNO_NREGS (regno, TYPE_MODE (type));
4353c87b03e5Sespie   for (i = 0; i < nregs; i++)
4354c87b03e5Sespie     if (! call_used_regs[regno + i])
4355c87b03e5Sespie       return 1;
4356c87b03e5Sespie   return 0;
4357c87b03e5Sespie }
4358c87b03e5Sespie 
4359c87b03e5Sespie /* Assign RTL expressions to the function's parameters.
4360c87b03e5Sespie    This may involve copying them into registers and using
4361c87b03e5Sespie    those registers as the RTL for them.  */
4362c87b03e5Sespie 
4363c87b03e5Sespie void
assign_parms(fndecl)4364c87b03e5Sespie assign_parms (fndecl)
4365c87b03e5Sespie      tree fndecl;
4366c87b03e5Sespie {
4367c87b03e5Sespie   tree parm;
4368c87b03e5Sespie   rtx entry_parm = 0;
4369c87b03e5Sespie   rtx stack_parm = 0;
4370c87b03e5Sespie   CUMULATIVE_ARGS args_so_far;
4371c87b03e5Sespie   enum machine_mode promoted_mode, passed_mode;
4372c87b03e5Sespie   enum machine_mode nominal_mode, promoted_nominal_mode;
4373c87b03e5Sespie   int unsignedp;
4374c87b03e5Sespie   /* Total space needed so far for args on the stack,
4375c87b03e5Sespie      given as a constant and a tree-expression.  */
4376c87b03e5Sespie   struct args_size stack_args_size;
4377c87b03e5Sespie   tree fntype = TREE_TYPE (fndecl);
4378c87b03e5Sespie   tree fnargs = DECL_ARGUMENTS (fndecl);
4379c87b03e5Sespie   /* This is used for the arg pointer when referring to stack args.  */
4380c87b03e5Sespie   rtx internal_arg_pointer;
4381c87b03e5Sespie   /* This is a dummy PARM_DECL that we used for the function result if
4382c87b03e5Sespie      the function returns a structure.  */
4383c87b03e5Sespie   tree function_result_decl = 0;
4384c87b03e5Sespie #ifdef SETUP_INCOMING_VARARGS
4385c87b03e5Sespie   int varargs_setup = 0;
4386c87b03e5Sespie #endif
4387c87b03e5Sespie   rtx conversion_insns = 0;
4388c87b03e5Sespie   struct args_size alignment_pad;
4389c87b03e5Sespie 
4390c87b03e5Sespie   /* Nonzero if function takes extra anonymous args.
4391c87b03e5Sespie      This means the last named arg must be on the stack
4392c87b03e5Sespie      right before the anonymous ones.  */
4393c87b03e5Sespie   int stdarg
4394c87b03e5Sespie     = (TYPE_ARG_TYPES (fntype) != 0
4395c87b03e5Sespie        && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
4396c87b03e5Sespie 	   != void_type_node));
4397c87b03e5Sespie 
4398c87b03e5Sespie   current_function_stdarg = stdarg;
4399c87b03e5Sespie 
4400c87b03e5Sespie   /* If the reg that the virtual arg pointer will be translated into is
4401c87b03e5Sespie      not a fixed reg or is the stack pointer, make a copy of the virtual
4402c87b03e5Sespie      arg pointer, and address parms via the copy.  The frame pointer is
4403c87b03e5Sespie      considered fixed even though it is not marked as such.
4404c87b03e5Sespie 
4405c87b03e5Sespie      The second time through, simply use ap to avoid generating rtx.  */
4406c87b03e5Sespie 
4407c87b03e5Sespie   if ((ARG_POINTER_REGNUM == STACK_POINTER_REGNUM
4408c87b03e5Sespie        || ! (fixed_regs[ARG_POINTER_REGNUM]
4409c87b03e5Sespie 	     || ARG_POINTER_REGNUM == FRAME_POINTER_REGNUM)))
4410c87b03e5Sespie     internal_arg_pointer = copy_to_reg (virtual_incoming_args_rtx);
4411c87b03e5Sespie   else
4412c87b03e5Sespie     internal_arg_pointer = virtual_incoming_args_rtx;
4413c87b03e5Sespie   current_function_internal_arg_pointer = internal_arg_pointer;
4414c87b03e5Sespie 
4415c87b03e5Sespie   stack_args_size.constant = 0;
4416c87b03e5Sespie   stack_args_size.var = 0;
4417c87b03e5Sespie 
4418c87b03e5Sespie   /* If struct value address is treated as the first argument, make it so.  */
4419c87b03e5Sespie   if (aggregate_value_p (DECL_RESULT (fndecl))
4420c87b03e5Sespie       && ! current_function_returns_pcc_struct
4421c87b03e5Sespie       && struct_value_incoming_rtx == 0)
4422c87b03e5Sespie     {
4423c87b03e5Sespie       tree type = build_pointer_type (TREE_TYPE (fntype));
4424c87b03e5Sespie 
4425c87b03e5Sespie       function_result_decl = build_decl (PARM_DECL, NULL_TREE, type);
4426c87b03e5Sespie 
4427c87b03e5Sespie       DECL_ARG_TYPE (function_result_decl) = type;
4428c87b03e5Sespie       TREE_CHAIN (function_result_decl) = fnargs;
4429c87b03e5Sespie       fnargs = function_result_decl;
4430c87b03e5Sespie     }
4431c87b03e5Sespie 
4432c87b03e5Sespie   max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
4433c87b03e5Sespie   parm_reg_stack_loc = (rtx *) ggc_alloc_cleared (max_parm_reg * sizeof (rtx));
4434c87b03e5Sespie 
4435c87b03e5Sespie #ifdef INIT_CUMULATIVE_INCOMING_ARGS
4436c87b03e5Sespie   INIT_CUMULATIVE_INCOMING_ARGS (args_so_far, fntype, NULL_RTX);
4437c87b03e5Sespie #else
4438c87b03e5Sespie   INIT_CUMULATIVE_ARGS (args_so_far, fntype, NULL_RTX, 0);
4439c87b03e5Sespie #endif
4440c87b03e5Sespie 
4441c87b03e5Sespie   /* We haven't yet found an argument that we must push and pretend the
4442c87b03e5Sespie      caller did.  */
4443c87b03e5Sespie   current_function_pretend_args_size = 0;
4444c87b03e5Sespie 
4445c87b03e5Sespie   for (parm = fnargs; parm; parm = TREE_CHAIN (parm))
4446c87b03e5Sespie     {
4447c87b03e5Sespie       struct args_size stack_offset;
4448c87b03e5Sespie       struct args_size arg_size;
4449c87b03e5Sespie       int passed_pointer = 0;
4450c87b03e5Sespie       int did_conversion = 0;
4451c87b03e5Sespie       tree passed_type = DECL_ARG_TYPE (parm);
4452c87b03e5Sespie       tree nominal_type = TREE_TYPE (parm);
4453c87b03e5Sespie       int pretend_named;
4454c87b03e5Sespie       int last_named = 0, named_arg;
4455c87b03e5Sespie 
4456c87b03e5Sespie       /* Set LAST_NAMED if this is last named arg before last
4457c87b03e5Sespie 	 anonymous args.  */
4458c87b03e5Sespie       if (stdarg)
4459c87b03e5Sespie 	{
4460c87b03e5Sespie 	  tree tem;
4461c87b03e5Sespie 
4462c87b03e5Sespie 	  for (tem = TREE_CHAIN (parm); tem; tem = TREE_CHAIN (tem))
4463c87b03e5Sespie 	    if (DECL_NAME (tem))
4464c87b03e5Sespie 	      break;
4465c87b03e5Sespie 
4466c87b03e5Sespie 	  if (tem == 0)
4467c87b03e5Sespie 	    last_named = 1;
4468c87b03e5Sespie 	}
4469c87b03e5Sespie       /* Set NAMED_ARG if this arg should be treated as a named arg.  For
4470c87b03e5Sespie 	 most machines, if this is a varargs/stdarg function, then we treat
4471c87b03e5Sespie 	 the last named arg as if it were anonymous too.  */
4472c87b03e5Sespie       named_arg = STRICT_ARGUMENT_NAMING ? 1 : ! last_named;
4473c87b03e5Sespie 
4474c87b03e5Sespie       if (TREE_TYPE (parm) == error_mark_node
4475c87b03e5Sespie 	  /* This can happen after weird syntax errors
4476c87b03e5Sespie 	     or if an enum type is defined among the parms.  */
4477c87b03e5Sespie 	  || TREE_CODE (parm) != PARM_DECL
4478c87b03e5Sespie 	  || passed_type == NULL)
4479c87b03e5Sespie 	{
4480c87b03e5Sespie 	  SET_DECL_RTL (parm, gen_rtx_MEM (BLKmode, const0_rtx));
4481c87b03e5Sespie 	  DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4482c87b03e5Sespie 	  TREE_USED (parm) = 1;
4483c87b03e5Sespie 	  continue;
4484c87b03e5Sespie 	}
4485c87b03e5Sespie 
4486c87b03e5Sespie       /* Find mode of arg as it is passed, and mode of arg
4487c87b03e5Sespie 	 as it should be during execution of this function.  */
4488c87b03e5Sespie       passed_mode = TYPE_MODE (passed_type);
4489c87b03e5Sespie       nominal_mode = TYPE_MODE (nominal_type);
4490c87b03e5Sespie 
4491c87b03e5Sespie       /* If the parm's mode is VOID, its value doesn't matter,
4492c87b03e5Sespie 	 and avoid the usual things like emit_move_insn that could crash.  */
4493c87b03e5Sespie       if (nominal_mode == VOIDmode)
4494c87b03e5Sespie 	{
4495c87b03e5Sespie 	  SET_DECL_RTL (parm, const0_rtx);
4496c87b03e5Sespie 	  DECL_INCOMING_RTL (parm) = DECL_RTL (parm);
4497c87b03e5Sespie 	  continue;
4498c87b03e5Sespie 	}
4499c87b03e5Sespie 
4500c87b03e5Sespie       /* If the parm is to be passed as a transparent union, use the
4501c87b03e5Sespie 	 type of the first field for the tests below.  We have already
4502c87b03e5Sespie 	 verified that the modes are the same.  */
4503c87b03e5Sespie       if (DECL_TRANSPARENT_UNION (parm)
4504c87b03e5Sespie 	  || (TREE_CODE (passed_type) == UNION_TYPE
4505c87b03e5Sespie 	      && TYPE_TRANSPARENT_UNION (passed_type)))
4506c87b03e5Sespie 	passed_type = TREE_TYPE (TYPE_FIELDS (passed_type));
4507c87b03e5Sespie 
4508c87b03e5Sespie       /* See if this arg was passed by invisible reference.  It is if
4509c87b03e5Sespie 	 it is an object whose size depends on the contents of the
4510c87b03e5Sespie 	 object itself or if the machine requires these objects be passed
4511c87b03e5Sespie 	 that way.  */
4512c87b03e5Sespie 
4513c87b03e5Sespie       if ((TREE_CODE (TYPE_SIZE (passed_type)) != INTEGER_CST
4514c87b03e5Sespie 	   && contains_placeholder_p (TYPE_SIZE (passed_type)))
4515c87b03e5Sespie 	  || TREE_ADDRESSABLE (passed_type)
4516c87b03e5Sespie #ifdef FUNCTION_ARG_PASS_BY_REFERENCE
4517c87b03e5Sespie 	  || FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, passed_mode,
4518c87b03e5Sespie 					      passed_type, named_arg)
4519c87b03e5Sespie #endif
4520c87b03e5Sespie 	  )
4521c87b03e5Sespie 	{
4522c87b03e5Sespie 	  passed_type = nominal_type = build_pointer_type (passed_type);
4523c87b03e5Sespie 	  passed_pointer = 1;
4524c87b03e5Sespie 	  passed_mode = nominal_mode = Pmode;
4525c87b03e5Sespie 	}
4526c87b03e5Sespie       /* See if the frontend wants to pass this by invisible reference.  */
4527c87b03e5Sespie       else if (passed_type != nominal_type
4528c87b03e5Sespie 	       && POINTER_TYPE_P (passed_type)
4529c87b03e5Sespie 	       && TREE_TYPE (passed_type) == nominal_type)
4530c87b03e5Sespie 	{
4531c87b03e5Sespie 	  nominal_type = passed_type;
4532c87b03e5Sespie 	  passed_pointer = 1;
4533c87b03e5Sespie 	  passed_mode = nominal_mode = Pmode;
4534c87b03e5Sespie 	}
4535c87b03e5Sespie 
4536c87b03e5Sespie       promoted_mode = passed_mode;
4537c87b03e5Sespie 
4538c87b03e5Sespie #ifdef PROMOTE_FUNCTION_ARGS
4539c87b03e5Sespie       /* Compute the mode in which the arg is actually extended to.  */
4540c87b03e5Sespie       unsignedp = TREE_UNSIGNED (passed_type);
4541c87b03e5Sespie       promoted_mode = promote_mode (passed_type, promoted_mode, &unsignedp, 1);
4542c87b03e5Sespie #endif
4543c87b03e5Sespie 
4544c87b03e5Sespie       /* Let machine desc say which reg (if any) the parm arrives in.
4545c87b03e5Sespie 	 0 means it arrives on the stack.  */
4546c87b03e5Sespie #ifdef FUNCTION_INCOMING_ARG
4547c87b03e5Sespie       entry_parm = FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4548c87b03e5Sespie 					  passed_type, named_arg);
4549c87b03e5Sespie #else
4550c87b03e5Sespie       entry_parm = FUNCTION_ARG (args_so_far, promoted_mode,
4551c87b03e5Sespie 				 passed_type, named_arg);
4552c87b03e5Sespie #endif
4553c87b03e5Sespie 
4554c87b03e5Sespie       if (entry_parm == 0)
4555c87b03e5Sespie 	promoted_mode = passed_mode;
4556c87b03e5Sespie 
4557c87b03e5Sespie #ifdef SETUP_INCOMING_VARARGS
4558c87b03e5Sespie       /* If this is the last named parameter, do any required setup for
4559c87b03e5Sespie 	 varargs or stdargs.  We need to know about the case of this being an
4560c87b03e5Sespie 	 addressable type, in which case we skip the registers it
4561c87b03e5Sespie 	 would have arrived in.
4562c87b03e5Sespie 
4563c87b03e5Sespie 	 For stdargs, LAST_NAMED will be set for two parameters, the one that
4564c87b03e5Sespie 	 is actually the last named, and the dummy parameter.  We only
4565c87b03e5Sespie 	 want to do this action once.
4566c87b03e5Sespie 
4567c87b03e5Sespie 	 Also, indicate when RTL generation is to be suppressed.  */
4568c87b03e5Sespie       if (last_named && !varargs_setup)
4569c87b03e5Sespie 	{
4570c87b03e5Sespie 	  SETUP_INCOMING_VARARGS (args_so_far, promoted_mode, passed_type,
4571c87b03e5Sespie 				  current_function_pretend_args_size, 0);
4572c87b03e5Sespie 	  varargs_setup = 1;
4573c87b03e5Sespie 	}
4574c87b03e5Sespie #endif
4575c87b03e5Sespie 
4576c87b03e5Sespie       /* Determine parm's home in the stack,
4577c87b03e5Sespie 	 in case it arrives in the stack or we should pretend it did.
4578c87b03e5Sespie 
4579c87b03e5Sespie 	 Compute the stack position and rtx where the argument arrives
4580c87b03e5Sespie 	 and its size.
4581c87b03e5Sespie 
4582c87b03e5Sespie 	 There is one complexity here:  If this was a parameter that would
4583c87b03e5Sespie 	 have been passed in registers, but wasn't only because it is
4584c87b03e5Sespie 	 __builtin_va_alist, we want locate_and_pad_parm to treat it as if
4585c87b03e5Sespie 	 it came in a register so that REG_PARM_STACK_SPACE isn't skipped.
4586c87b03e5Sespie 	 In this case, we call FUNCTION_ARG with NAMED set to 1 instead of
4587c87b03e5Sespie 	 0 as it was the previous time.  */
4588c87b03e5Sespie 
4589c87b03e5Sespie       pretend_named = named_arg || PRETEND_OUTGOING_VARARGS_NAMED;
4590c87b03e5Sespie       locate_and_pad_parm (promoted_mode, passed_type,
4591c87b03e5Sespie #ifdef STACK_PARMS_IN_REG_PARM_AREA
4592c87b03e5Sespie 			   1,
4593c87b03e5Sespie #else
4594c87b03e5Sespie #ifdef FUNCTION_INCOMING_ARG
4595c87b03e5Sespie 			   FUNCTION_INCOMING_ARG (args_so_far, promoted_mode,
4596c87b03e5Sespie 						  passed_type,
4597c87b03e5Sespie 						  pretend_named) != 0,
4598c87b03e5Sespie #else
4599c87b03e5Sespie 			   FUNCTION_ARG (args_so_far, promoted_mode,
4600c87b03e5Sespie 					 passed_type,
4601c87b03e5Sespie 					 pretend_named) != 0,
4602c87b03e5Sespie #endif
4603c87b03e5Sespie #endif
4604c87b03e5Sespie 			   fndecl, &stack_args_size, &stack_offset, &arg_size,
4605c87b03e5Sespie 			   &alignment_pad);
4606c87b03e5Sespie 
4607c87b03e5Sespie       {
4608c87b03e5Sespie 	rtx offset_rtx = ARGS_SIZE_RTX (stack_offset);
4609c87b03e5Sespie 
4610c87b03e5Sespie 	if (offset_rtx == const0_rtx)
4611c87b03e5Sespie 	  stack_parm = gen_rtx_MEM (promoted_mode, internal_arg_pointer);
4612c87b03e5Sespie 	else
4613c87b03e5Sespie 	  stack_parm = gen_rtx_MEM (promoted_mode,
4614c87b03e5Sespie 				    gen_rtx_PLUS (Pmode,
4615c87b03e5Sespie 						  internal_arg_pointer,
4616c87b03e5Sespie 						  offset_rtx));
4617c87b03e5Sespie 
4618c87b03e5Sespie 	set_mem_attributes (stack_parm, parm, 1);
4619c87b03e5Sespie       }
4620c87b03e5Sespie 
4621c87b03e5Sespie       /* If this parameter was passed both in registers and in the stack,
4622c87b03e5Sespie 	 use the copy on the stack.  */
4623c87b03e5Sespie       if (MUST_PASS_IN_STACK (promoted_mode, passed_type))
4624c87b03e5Sespie 	entry_parm = 0;
4625c87b03e5Sespie 
4626c87b03e5Sespie #ifdef FUNCTION_ARG_PARTIAL_NREGS
4627c87b03e5Sespie       /* If this parm was passed part in regs and part in memory,
4628c87b03e5Sespie 	 pretend it arrived entirely in memory
4629c87b03e5Sespie 	 by pushing the register-part onto the stack.
4630c87b03e5Sespie 
4631c87b03e5Sespie 	 In the special case of a DImode or DFmode that is split,
4632c87b03e5Sespie 	 we could put it together in a pseudoreg directly,
4633c87b03e5Sespie 	 but for now that's not worth bothering with.  */
4634c87b03e5Sespie 
4635c87b03e5Sespie       if (entry_parm)
4636c87b03e5Sespie 	{
4637c87b03e5Sespie 	  int nregs = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, promoted_mode,
4638c87b03e5Sespie 						  passed_type, named_arg);
4639c87b03e5Sespie 
4640c87b03e5Sespie 	  if (nregs > 0)
4641c87b03e5Sespie 	    {
4642c87b03e5Sespie #if defined (REG_PARM_STACK_SPACE) && !defined (MAYBE_REG_PARM_STACK_SPACE)
4643c87b03e5Sespie 	      /* When REG_PARM_STACK_SPACE is nonzero, stack space for
4644c87b03e5Sespie 		 split parameters was allocated by our caller, so we
4645c87b03e5Sespie 		 won't be pushing it in the prolog.  */
4646c87b03e5Sespie 	      if (REG_PARM_STACK_SPACE (fndecl) == 0)
4647c87b03e5Sespie #endif
4648c87b03e5Sespie 	      current_function_pretend_args_size
4649c87b03e5Sespie 		= (((nregs * UNITS_PER_WORD) + (PARM_BOUNDARY / BITS_PER_UNIT) - 1)
4650c87b03e5Sespie 		   / (PARM_BOUNDARY / BITS_PER_UNIT)
4651c87b03e5Sespie 		   * (PARM_BOUNDARY / BITS_PER_UNIT));
4652c87b03e5Sespie 
4653c87b03e5Sespie 	      /* Handle calls that pass values in multiple non-contiguous
4654c87b03e5Sespie 		 locations.  The Irix 6 ABI has examples of this.  */
4655c87b03e5Sespie 	      if (GET_CODE (entry_parm) == PARALLEL)
4656c87b03e5Sespie 		emit_group_store (validize_mem (stack_parm), entry_parm,
4657c87b03e5Sespie 				  int_size_in_bytes (TREE_TYPE (parm)));
4658c87b03e5Sespie 
4659c87b03e5Sespie 	      else
4660c87b03e5Sespie 		move_block_from_reg (REGNO (entry_parm),
4661c87b03e5Sespie 				     validize_mem (stack_parm), nregs,
4662c87b03e5Sespie 				     int_size_in_bytes (TREE_TYPE (parm)));
4663c87b03e5Sespie 
4664c87b03e5Sespie 	      entry_parm = stack_parm;
4665c87b03e5Sespie 	    }
4666c87b03e5Sespie 	}
4667c87b03e5Sespie #endif
4668c87b03e5Sespie 
4669c87b03e5Sespie       /* If we didn't decide this parm came in a register,
4670c87b03e5Sespie 	 by default it came on the stack.  */
4671c87b03e5Sespie       if (entry_parm == 0)
4672c87b03e5Sespie 	entry_parm = stack_parm;
4673c87b03e5Sespie 
4674c87b03e5Sespie       /* Record permanently how this parm was passed.  */
4675c87b03e5Sespie       DECL_INCOMING_RTL (parm) = entry_parm;
4676c87b03e5Sespie 
4677c87b03e5Sespie       /* If there is actually space on the stack for this parm,
4678c87b03e5Sespie 	 count it in stack_args_size; otherwise set stack_parm to 0
4679c87b03e5Sespie 	 to indicate there is no preallocated stack slot for the parm.  */
4680c87b03e5Sespie 
4681c87b03e5Sespie       if (entry_parm == stack_parm
4682c87b03e5Sespie 	  || (GET_CODE (entry_parm) == PARALLEL
4683c87b03e5Sespie 	      && XEXP (XVECEXP (entry_parm, 0, 0), 0) == NULL_RTX)
4684c87b03e5Sespie #if defined (REG_PARM_STACK_SPACE) && ! defined (MAYBE_REG_PARM_STACK_SPACE)
4685c87b03e5Sespie 	  /* On some machines, even if a parm value arrives in a register
4686c87b03e5Sespie 	     there is still an (uninitialized) stack slot allocated for it.
4687c87b03e5Sespie 
4688c87b03e5Sespie 	     ??? When MAYBE_REG_PARM_STACK_SPACE is defined, we can't tell
4689c87b03e5Sespie 	     whether this parameter already has a stack slot allocated,
4690c87b03e5Sespie 	     because an arg block exists only if current_function_args_size
4691c87b03e5Sespie 	     is larger than some threshold, and we haven't calculated that
4692c87b03e5Sespie 	     yet.  So, for now, we just assume that stack slots never exist
4693c87b03e5Sespie 	     in this case.  */
4694c87b03e5Sespie 	  || REG_PARM_STACK_SPACE (fndecl) > 0
4695c87b03e5Sespie #endif
4696c87b03e5Sespie 	  )
4697c87b03e5Sespie 	{
4698c87b03e5Sespie 	  stack_args_size.constant += arg_size.constant;
4699c87b03e5Sespie 	  if (arg_size.var)
4700c87b03e5Sespie 	    ADD_PARM_SIZE (stack_args_size, arg_size.var);
4701c87b03e5Sespie 	}
4702c87b03e5Sespie       else
4703c87b03e5Sespie 	/* No stack slot was pushed for this parm.  */
4704c87b03e5Sespie 	stack_parm = 0;
4705c87b03e5Sespie 
4706c87b03e5Sespie       /* Update info on where next arg arrives in registers.  */
4707c87b03e5Sespie 
4708c87b03e5Sespie       FUNCTION_ARG_ADVANCE (args_so_far, promoted_mode,
4709c87b03e5Sespie 			    passed_type, named_arg);
4710c87b03e5Sespie 
4711c87b03e5Sespie       /* If we can't trust the parm stack slot to be aligned enough
4712c87b03e5Sespie 	 for its ultimate type, don't use that slot after entry.
4713c87b03e5Sespie 	 We'll make another stack slot, if we need one.  */
4714c87b03e5Sespie       {
4715c87b03e5Sespie 	unsigned int thisparm_boundary
4716c87b03e5Sespie 	  = FUNCTION_ARG_BOUNDARY (promoted_mode, passed_type);
4717c87b03e5Sespie 
4718c87b03e5Sespie 	if (GET_MODE_ALIGNMENT (nominal_mode) > thisparm_boundary)
4719c87b03e5Sespie 	  stack_parm = 0;
4720c87b03e5Sespie       }
4721c87b03e5Sespie 
4722c87b03e5Sespie       /* If parm was passed in memory, and we need to convert it on entry,
4723c87b03e5Sespie 	 don't store it back in that same slot.  */
4724c87b03e5Sespie       if (entry_parm != 0
4725c87b03e5Sespie 	  && nominal_mode != BLKmode && nominal_mode != passed_mode)
4726c87b03e5Sespie 	stack_parm = 0;
4727c87b03e5Sespie 
4728c87b03e5Sespie       /* When an argument is passed in multiple locations, we can't
4729c87b03e5Sespie 	 make use of this information, but we can save some copying if
4730c87b03e5Sespie 	 the whole argument is passed in a single register.  */
4731c87b03e5Sespie       if (GET_CODE (entry_parm) == PARALLEL
4732c87b03e5Sespie 	  && nominal_mode != BLKmode && passed_mode != BLKmode)
4733c87b03e5Sespie 	{
4734c87b03e5Sespie 	  int i, len = XVECLEN (entry_parm, 0);
4735c87b03e5Sespie 
4736c87b03e5Sespie 	  for (i = 0; i < len; i++)
4737c87b03e5Sespie 	    if (XEXP (XVECEXP (entry_parm, 0, i), 0) != NULL_RTX
4738c87b03e5Sespie 		&& GET_CODE (XEXP (XVECEXP (entry_parm, 0, i), 0)) == REG
4739c87b03e5Sespie 		&& (GET_MODE (XEXP (XVECEXP (entry_parm, 0, i), 0))
4740c87b03e5Sespie 		    == passed_mode)
4741c87b03e5Sespie 		&& INTVAL (XEXP (XVECEXP (entry_parm, 0, i), 1)) == 0)
4742c87b03e5Sespie 	      {
4743c87b03e5Sespie 		entry_parm = XEXP (XVECEXP (entry_parm, 0, i), 0);
4744c87b03e5Sespie 		DECL_INCOMING_RTL (parm) = entry_parm;
4745c87b03e5Sespie 		break;
4746c87b03e5Sespie 	      }
4747c87b03e5Sespie 	}
4748c87b03e5Sespie 
4749c87b03e5Sespie       /* ENTRY_PARM is an RTX for the parameter as it arrives,
4750c87b03e5Sespie 	 in the mode in which it arrives.
4751c87b03e5Sespie 	 STACK_PARM is an RTX for a stack slot where the parameter can live
4752c87b03e5Sespie 	 during the function (in case we want to put it there).
4753c87b03e5Sespie 	 STACK_PARM is 0 if no stack slot was pushed for it.
4754c87b03e5Sespie 
4755c87b03e5Sespie 	 Now output code if necessary to convert ENTRY_PARM to
4756c87b03e5Sespie 	 the type in which this function declares it,
4757c87b03e5Sespie 	 and store that result in an appropriate place,
4758c87b03e5Sespie 	 which may be a pseudo reg, may be STACK_PARM,
4759c87b03e5Sespie 	 or may be a local stack slot if STACK_PARM is 0.
4760c87b03e5Sespie 
4761c87b03e5Sespie 	 Set DECL_RTL to that place.  */
4762c87b03e5Sespie 
4763c87b03e5Sespie       if (nominal_mode == BLKmode || GET_CODE (entry_parm) == PARALLEL)
4764c87b03e5Sespie 	{
4765c87b03e5Sespie 	  /* If a BLKmode arrives in registers, copy it to a stack slot.
4766c87b03e5Sespie 	     Handle calls that pass values in multiple non-contiguous
4767c87b03e5Sespie 	     locations.  The Irix 6 ABI has examples of this.  */
4768c87b03e5Sespie 	  if (GET_CODE (entry_parm) == REG
4769c87b03e5Sespie 	      || GET_CODE (entry_parm) == PARALLEL)
4770c87b03e5Sespie 	    {
4771c87b03e5Sespie 	      int size_stored
4772c87b03e5Sespie 		= CEIL_ROUND (int_size_in_bytes (TREE_TYPE (parm)),
4773c87b03e5Sespie 			      UNITS_PER_WORD);
4774c87b03e5Sespie 
4775c87b03e5Sespie 	      /* Note that we will be storing an integral number of words.
4776c87b03e5Sespie 		 So we have to be careful to ensure that we allocate an
4777c87b03e5Sespie 		 integral number of words.  We do this below in the
4778c87b03e5Sespie 		 assign_stack_local if space was not allocated in the argument
4779c87b03e5Sespie 		 list.  If it was, this will not work if PARM_BOUNDARY is not
4780c87b03e5Sespie 		 a multiple of BITS_PER_WORD.  It isn't clear how to fix this
4781c87b03e5Sespie 		 if it becomes a problem.  */
4782c87b03e5Sespie 
4783c87b03e5Sespie 	      if (stack_parm == 0)
4784c87b03e5Sespie 		{
4785c87b03e5Sespie 		  stack_parm
4786c87b03e5Sespie 		    = assign_stack_local (GET_MODE (entry_parm),
4787c87b03e5Sespie 					  size_stored, 0);
4788c87b03e5Sespie 		  set_mem_attributes (stack_parm, parm, 1);
4789c87b03e5Sespie 		}
4790c87b03e5Sespie 
4791c87b03e5Sespie 	      else if (PARM_BOUNDARY % BITS_PER_WORD != 0)
4792c87b03e5Sespie 		abort ();
4793c87b03e5Sespie 
4794c87b03e5Sespie 	      /* Handle calls that pass values in multiple non-contiguous
4795c87b03e5Sespie 		 locations.  The Irix 6 ABI has examples of this.  */
4796c87b03e5Sespie 	      if (GET_CODE (entry_parm) == PARALLEL)
4797c87b03e5Sespie 		emit_group_store (validize_mem (stack_parm), entry_parm,
4798c87b03e5Sespie 				  int_size_in_bytes (TREE_TYPE (parm)));
4799c87b03e5Sespie 	      else
4800c87b03e5Sespie 		move_block_from_reg (REGNO (entry_parm),
4801c87b03e5Sespie 				     validize_mem (stack_parm),
4802c87b03e5Sespie 				     size_stored / UNITS_PER_WORD,
4803c87b03e5Sespie 				     int_size_in_bytes (TREE_TYPE (parm)));
4804c87b03e5Sespie 	    }
4805c87b03e5Sespie 	  SET_DECL_RTL (parm, stack_parm);
4806c87b03e5Sespie 	}
4807c87b03e5Sespie       else if (! ((! optimize
4808c87b03e5Sespie 		   && ! DECL_REGISTER (parm))
4809c87b03e5Sespie 		  || TREE_SIDE_EFFECTS (parm)
4810c87b03e5Sespie 		  /* If -ffloat-store specified, don't put explicit
4811c87b03e5Sespie 		     float variables into registers.  */
4812c87b03e5Sespie 		  || (flag_float_store
4813c87b03e5Sespie 		      && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))
4814c87b03e5Sespie 	       /* Always assign pseudo to structure return or item passed
4815c87b03e5Sespie 		  by invisible reference.  */
4816c87b03e5Sespie 	       || passed_pointer || parm == function_result_decl)
4817c87b03e5Sespie 	{
4818c87b03e5Sespie 	  /* Store the parm in a pseudoregister during the function, but we
4819c87b03e5Sespie 	     may need to do it in a wider mode.  */
4820c87b03e5Sespie 
4821c87b03e5Sespie 	  rtx parmreg;
4822c87b03e5Sespie 	  unsigned int regno, regnoi = 0, regnor = 0;
4823c87b03e5Sespie 
4824c87b03e5Sespie 	  unsignedp = TREE_UNSIGNED (TREE_TYPE (parm));
4825c87b03e5Sespie 
4826c87b03e5Sespie 	  promoted_nominal_mode
4827c87b03e5Sespie 	    = promote_mode (TREE_TYPE (parm), nominal_mode, &unsignedp, 0);
4828c87b03e5Sespie 
4829c87b03e5Sespie 	  parmreg = gen_reg_rtx (promoted_nominal_mode);
4830c87b03e5Sespie 	  mark_user_reg (parmreg);
4831c87b03e5Sespie 
4832c87b03e5Sespie 	  /* If this was an item that we received a pointer to, set DECL_RTL
4833c87b03e5Sespie 	     appropriately.  */
4834c87b03e5Sespie 	  if (passed_pointer)
4835c87b03e5Sespie 	    {
4836c87b03e5Sespie 	      rtx x = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (passed_type)),
4837c87b03e5Sespie 				   parmreg);
4838c87b03e5Sespie 	      set_mem_attributes (x, parm, 1);
4839c87b03e5Sespie 	      SET_DECL_RTL (parm, x);
4840c87b03e5Sespie 	    }
4841c87b03e5Sespie 	  else
4842c87b03e5Sespie 	    {
4843c87b03e5Sespie 	      SET_DECL_RTL (parm, parmreg);
4844c87b03e5Sespie 	      maybe_set_unchanging (DECL_RTL (parm), parm);
4845c87b03e5Sespie 	    }
4846c87b03e5Sespie 
4847c87b03e5Sespie 	  /* Copy the value into the register.  */
4848c87b03e5Sespie 	  if (nominal_mode != passed_mode
4849c87b03e5Sespie 	      || promoted_nominal_mode != promoted_mode)
4850c87b03e5Sespie 	    {
4851c87b03e5Sespie 	      int save_tree_used;
4852c87b03e5Sespie 	      /* ENTRY_PARM has been converted to PROMOTED_MODE, its
4853c87b03e5Sespie 		 mode, by the caller.  We now have to convert it to
4854c87b03e5Sespie 		 NOMINAL_MODE, if different.  However, PARMREG may be in
4855c87b03e5Sespie 		 a different mode than NOMINAL_MODE if it is being stored
4856c87b03e5Sespie 		 promoted.
4857c87b03e5Sespie 
4858c87b03e5Sespie 		 If ENTRY_PARM is a hard register, it might be in a register
4859c87b03e5Sespie 		 not valid for operating in its mode (e.g., an odd-numbered
4860c87b03e5Sespie 		 register for a DFmode).  In that case, moves are the only
4861c87b03e5Sespie 		 thing valid, so we can't do a convert from there.  This
4862c87b03e5Sespie 		 occurs when the calling sequence allow such misaligned
4863c87b03e5Sespie 		 usages.
4864c87b03e5Sespie 
4865c87b03e5Sespie 		 In addition, the conversion may involve a call, which could
4866c87b03e5Sespie 		 clobber parameters which haven't been copied to pseudo
4867c87b03e5Sespie 		 registers yet.  Therefore, we must first copy the parm to
4868c87b03e5Sespie 		 a pseudo reg here, and save the conversion until after all
4869c87b03e5Sespie 		 parameters have been moved.  */
4870c87b03e5Sespie 
4871c87b03e5Sespie 	      rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
4872c87b03e5Sespie 
4873c87b03e5Sespie 	      emit_move_insn (tempreg, validize_mem (entry_parm));
4874c87b03e5Sespie 
4875c87b03e5Sespie 	      push_to_sequence (conversion_insns);
4876c87b03e5Sespie 	      tempreg = convert_to_mode (nominal_mode, tempreg, unsignedp);
4877c87b03e5Sespie 
4878c87b03e5Sespie 	      if (GET_CODE (tempreg) == SUBREG
4879c87b03e5Sespie 		  && GET_MODE (tempreg) == nominal_mode
4880c87b03e5Sespie 		  && GET_CODE (SUBREG_REG (tempreg)) == REG
4881c87b03e5Sespie 		  && nominal_mode == passed_mode
4882c87b03e5Sespie 		  && GET_MODE (SUBREG_REG (tempreg)) == GET_MODE (entry_parm)
4883c87b03e5Sespie 		  && GET_MODE_SIZE (GET_MODE (tempreg))
4884c87b03e5Sespie 		     < GET_MODE_SIZE (GET_MODE (entry_parm)))
4885c87b03e5Sespie 		{
4886c87b03e5Sespie 		  /* The argument is already sign/zero extended, so note it
4887c87b03e5Sespie 		     into the subreg.  */
4888c87b03e5Sespie 		  SUBREG_PROMOTED_VAR_P (tempreg) = 1;
4889c87b03e5Sespie 		  SUBREG_PROMOTED_UNSIGNED_SET (tempreg, unsignedp);
4890c87b03e5Sespie 		}
4891c87b03e5Sespie 
4892c87b03e5Sespie 	      /* TREE_USED gets set erroneously during expand_assignment.  */
4893c87b03e5Sespie 	      save_tree_used = TREE_USED (parm);
4894c87b03e5Sespie 	      expand_assignment (parm,
4895c87b03e5Sespie 				 make_tree (nominal_type, tempreg), 0, 0);
4896c87b03e5Sespie 	      TREE_USED (parm) = save_tree_used;
4897c87b03e5Sespie 	      conversion_insns = get_insns ();
4898c87b03e5Sespie 	      did_conversion = 1;
4899c87b03e5Sespie 	      end_sequence ();
4900c87b03e5Sespie 	    }
4901c87b03e5Sespie 	  else
4902c87b03e5Sespie 	    emit_move_insn (parmreg, validize_mem (entry_parm));
4903c87b03e5Sespie 
4904c87b03e5Sespie 	  /* If we were passed a pointer but the actual value
4905c87b03e5Sespie 	     can safely live in a register, put it in one.  */
4906c87b03e5Sespie 	  if (passed_pointer && TYPE_MODE (TREE_TYPE (parm)) != BLKmode
4907c87b03e5Sespie 	      /* If by-reference argument was promoted, demote it.  */
4908c87b03e5Sespie 	      && (TYPE_MODE (TREE_TYPE (parm)) != GET_MODE (DECL_RTL (parm))
4909c87b03e5Sespie 		  || ! ((! optimize
4910c87b03e5Sespie 			 && ! DECL_REGISTER (parm))
4911c87b03e5Sespie 			|| TREE_SIDE_EFFECTS (parm)
4912c87b03e5Sespie 			/* If -ffloat-store specified, don't put explicit
4913c87b03e5Sespie 			   float variables into registers.  */
4914c87b03e5Sespie 			|| (flag_float_store
4915c87b03e5Sespie 			    && TREE_CODE (TREE_TYPE (parm)) == REAL_TYPE))))
4916c87b03e5Sespie 	    {
4917c87b03e5Sespie 	      /* We can't use nominal_mode, because it will have been set to
4918c87b03e5Sespie 		 Pmode above.  We must use the actual mode of the parm.  */
4919c87b03e5Sespie 	      parmreg = gen_reg_rtx (TYPE_MODE (TREE_TYPE (parm)));
4920c87b03e5Sespie 	      mark_user_reg (parmreg);
4921c87b03e5Sespie 	      if (GET_MODE (parmreg) != GET_MODE (DECL_RTL (parm)))
4922c87b03e5Sespie 		{
4923c87b03e5Sespie 		  rtx tempreg = gen_reg_rtx (GET_MODE (DECL_RTL (parm)));
4924c87b03e5Sespie 		  int unsigned_p = TREE_UNSIGNED (TREE_TYPE (parm));
4925c87b03e5Sespie 		  push_to_sequence (conversion_insns);
4926c87b03e5Sespie 		  emit_move_insn (tempreg, DECL_RTL (parm));
4927c87b03e5Sespie 		  SET_DECL_RTL (parm,
4928c87b03e5Sespie 				convert_to_mode (GET_MODE (parmreg),
4929c87b03e5Sespie 						 tempreg,
4930c87b03e5Sespie 						 unsigned_p));
4931c87b03e5Sespie 		  emit_move_insn (parmreg, DECL_RTL (parm));
4932c87b03e5Sespie 		  conversion_insns = get_insns();
4933c87b03e5Sespie 		  did_conversion = 1;
4934c87b03e5Sespie 		  end_sequence ();
4935c87b03e5Sespie 		}
4936c87b03e5Sespie 	      else
4937c87b03e5Sespie 		emit_move_insn (parmreg, DECL_RTL (parm));
4938c87b03e5Sespie 	      SET_DECL_RTL (parm, parmreg);
4939c87b03e5Sespie 	      /* STACK_PARM is the pointer, not the parm, and PARMREG is
4940c87b03e5Sespie 		 now the parm.  */
4941c87b03e5Sespie 	      stack_parm = 0;
4942c87b03e5Sespie 	    }
4943c87b03e5Sespie #ifdef FUNCTION_ARG_CALLEE_COPIES
4944c87b03e5Sespie 	  /* If we are passed an arg by reference and it is our responsibility
4945c87b03e5Sespie 	     to make a copy, do it now.
4946c87b03e5Sespie 	     PASSED_TYPE and PASSED mode now refer to the pointer, not the
4947c87b03e5Sespie 	     original argument, so we must recreate them in the call to
4948c87b03e5Sespie 	     FUNCTION_ARG_CALLEE_COPIES.  */
4949c87b03e5Sespie 	  /* ??? Later add code to handle the case that if the argument isn't
4950c87b03e5Sespie 	     modified, don't do the copy.  */
4951c87b03e5Sespie 
4952c87b03e5Sespie 	  else if (passed_pointer
4953c87b03e5Sespie 		   && FUNCTION_ARG_CALLEE_COPIES (args_so_far,
4954c87b03e5Sespie 						  TYPE_MODE (DECL_ARG_TYPE (parm)),
4955c87b03e5Sespie 						  DECL_ARG_TYPE (parm),
4956c87b03e5Sespie 						  named_arg)
4957c87b03e5Sespie 		   && ! TREE_ADDRESSABLE (DECL_ARG_TYPE (parm)))
4958c87b03e5Sespie 	    {
4959c87b03e5Sespie 	      rtx copy;
4960c87b03e5Sespie 	      tree type = DECL_ARG_TYPE (parm);
4961c87b03e5Sespie 
4962c87b03e5Sespie 	      /* This sequence may involve a library call perhaps clobbering
4963c87b03e5Sespie 		 registers that haven't been copied to pseudos yet.  */
4964c87b03e5Sespie 
4965c87b03e5Sespie 	      push_to_sequence (conversion_insns);
4966c87b03e5Sespie 
4967c87b03e5Sespie 	      if (!COMPLETE_TYPE_P (type)
4968c87b03e5Sespie 		  || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4969c87b03e5Sespie 		/* This is a variable sized object.  */
4970c87b03e5Sespie 		copy = gen_rtx_MEM (BLKmode,
4971c87b03e5Sespie 				    allocate_dynamic_stack_space
4972c87b03e5Sespie 				    (expr_size (parm), NULL_RTX,
4973c87b03e5Sespie 				     TYPE_ALIGN (type)));
4974c87b03e5Sespie 	      else
4975c87b03e5Sespie 		copy = assign_stack_temp (TYPE_MODE (type),
4976c87b03e5Sespie 					  int_size_in_bytes (type), 1);
4977c87b03e5Sespie 	      set_mem_attributes (copy, parm, 1);
4978c87b03e5Sespie 
4979c87b03e5Sespie 	      store_expr (parm, copy, 0);
4980c87b03e5Sespie 	      emit_move_insn (parmreg, XEXP (copy, 0));
4981c87b03e5Sespie 	      conversion_insns = get_insns ();
4982c87b03e5Sespie 	      did_conversion = 1;
4983c87b03e5Sespie 	      end_sequence ();
4984c87b03e5Sespie 	    }
4985c87b03e5Sespie #endif /* FUNCTION_ARG_CALLEE_COPIES */
4986c87b03e5Sespie 
4987c87b03e5Sespie 	  /* In any case, record the parm's desired stack location
4988c87b03e5Sespie 	     in case we later discover it must live in the stack.
4989c87b03e5Sespie 
4990c87b03e5Sespie 	     If it is a COMPLEX value, store the stack location for both
4991c87b03e5Sespie 	     halves.  */
4992c87b03e5Sespie 
4993c87b03e5Sespie 	  if (GET_CODE (parmreg) == CONCAT)
4994c87b03e5Sespie 	    regno = MAX (REGNO (XEXP (parmreg, 0)), REGNO (XEXP (parmreg, 1)));
4995c87b03e5Sespie 	  else
4996c87b03e5Sespie 	    regno = REGNO (parmreg);
4997c87b03e5Sespie 
4998c87b03e5Sespie 	  if (regno >= max_parm_reg)
4999c87b03e5Sespie 	    {
5000c87b03e5Sespie 	      rtx *new;
5001c87b03e5Sespie 	      int old_max_parm_reg = max_parm_reg;
5002c87b03e5Sespie 
5003c87b03e5Sespie 	      /* It's slow to expand this one register at a time,
5004c87b03e5Sespie 		 but it's also rare and we need max_parm_reg to be
5005c87b03e5Sespie 		 precisely correct.  */
5006c87b03e5Sespie 	      max_parm_reg = regno + 1;
5007c87b03e5Sespie 	      new = (rtx *) ggc_realloc (parm_reg_stack_loc,
5008c87b03e5Sespie 				      max_parm_reg * sizeof (rtx));
5009c87b03e5Sespie 	      memset ((char *) (new + old_max_parm_reg), 0,
5010c87b03e5Sespie 		     (max_parm_reg - old_max_parm_reg) * sizeof (rtx));
5011c87b03e5Sespie 	      parm_reg_stack_loc = new;
5012c87b03e5Sespie 	    }
5013c87b03e5Sespie 
5014c87b03e5Sespie 	  if (GET_CODE (parmreg) == CONCAT)
5015c87b03e5Sespie 	    {
5016c87b03e5Sespie 	      enum machine_mode submode = GET_MODE (XEXP (parmreg, 0));
5017c87b03e5Sespie 
5018c87b03e5Sespie 	      regnor = REGNO (gen_realpart (submode, parmreg));
5019c87b03e5Sespie 	      regnoi = REGNO (gen_imagpart (submode, parmreg));
5020c87b03e5Sespie 
5021c87b03e5Sespie 	      if (stack_parm != 0)
5022c87b03e5Sespie 		{
5023c87b03e5Sespie 		  parm_reg_stack_loc[regnor]
5024c87b03e5Sespie 		    = gen_realpart (submode, stack_parm);
5025c87b03e5Sespie 		  parm_reg_stack_loc[regnoi]
5026c87b03e5Sespie 		    = gen_imagpart (submode, stack_parm);
5027c87b03e5Sespie 		}
5028c87b03e5Sespie 	      else
5029c87b03e5Sespie 		{
5030c87b03e5Sespie 		  parm_reg_stack_loc[regnor] = 0;
5031c87b03e5Sespie 		  parm_reg_stack_loc[regnoi] = 0;
5032c87b03e5Sespie 		}
5033c87b03e5Sespie 	    }
5034c87b03e5Sespie 	  else
5035c87b03e5Sespie 	    parm_reg_stack_loc[REGNO (parmreg)] = stack_parm;
5036c87b03e5Sespie 
5037c87b03e5Sespie 	  /* Mark the register as eliminable if we did no conversion
5038c87b03e5Sespie 	     and it was copied from memory at a fixed offset,
5039c87b03e5Sespie 	     and the arg pointer was not copied to a pseudo-reg.
5040c87b03e5Sespie 	     If the arg pointer is a pseudo reg or the offset formed
5041c87b03e5Sespie 	     an invalid address, such memory-equivalences
5042c87b03e5Sespie 	     as we make here would screw up life analysis for it.  */
5043c87b03e5Sespie 	  if (nominal_mode == passed_mode
5044c87b03e5Sespie 	      && ! did_conversion
5045c87b03e5Sespie 	      && stack_parm != 0
5046c87b03e5Sespie 	      && GET_CODE (stack_parm) == MEM
5047c87b03e5Sespie 	      && stack_offset.var == 0
5048c87b03e5Sespie 	      && reg_mentioned_p (virtual_incoming_args_rtx,
5049c87b03e5Sespie 				  XEXP (stack_parm, 0)))
5050c87b03e5Sespie 	    {
5051c87b03e5Sespie 	      rtx linsn = get_last_insn ();
5052c87b03e5Sespie 	      rtx sinsn, set;
5053c87b03e5Sespie 
5054c87b03e5Sespie 	      /* Mark complex types separately.  */
5055c87b03e5Sespie 	      if (GET_CODE (parmreg) == CONCAT)
5056c87b03e5Sespie 		/* Scan backwards for the set of the real and
5057c87b03e5Sespie 		   imaginary parts.  */
5058c87b03e5Sespie 		for (sinsn = linsn; sinsn != 0;
5059c87b03e5Sespie 		     sinsn = prev_nonnote_insn (sinsn))
5060c87b03e5Sespie 		  {
5061c87b03e5Sespie 		    set = single_set (sinsn);
5062c87b03e5Sespie 		    if (set != 0
5063c87b03e5Sespie 			&& SET_DEST (set) == regno_reg_rtx [regnoi])
5064c87b03e5Sespie 		      REG_NOTES (sinsn)
5065c87b03e5Sespie 			= gen_rtx_EXPR_LIST (REG_EQUIV,
5066c87b03e5Sespie 					     parm_reg_stack_loc[regnoi],
5067c87b03e5Sespie 					     REG_NOTES (sinsn));
5068c87b03e5Sespie 		    else if (set != 0
5069c87b03e5Sespie 			     && SET_DEST (set) == regno_reg_rtx [regnor])
5070c87b03e5Sespie 		      REG_NOTES (sinsn)
5071c87b03e5Sespie 			= gen_rtx_EXPR_LIST (REG_EQUIV,
5072c87b03e5Sespie 					     parm_reg_stack_loc[regnor],
5073c87b03e5Sespie 					     REG_NOTES (sinsn));
5074c87b03e5Sespie 		  }
5075c87b03e5Sespie 	      else if ((set = single_set (linsn)) != 0
5076c87b03e5Sespie 		       && SET_DEST (set) == parmreg)
5077c87b03e5Sespie 		REG_NOTES (linsn)
5078c87b03e5Sespie 		  = gen_rtx_EXPR_LIST (REG_EQUIV,
5079c87b03e5Sespie 				       stack_parm, REG_NOTES (linsn));
5080c87b03e5Sespie 	    }
5081c87b03e5Sespie 
5082c87b03e5Sespie 	  /* For pointer data type, suggest pointer register.  */
5083c87b03e5Sespie 	  if (POINTER_TYPE_P (TREE_TYPE (parm)))
5084c87b03e5Sespie 	    mark_reg_pointer (parmreg,
5085c87b03e5Sespie 			      TYPE_ALIGN (TREE_TYPE (TREE_TYPE (parm))));
5086c87b03e5Sespie 
5087c87b03e5Sespie 	  /* If something wants our address, try to use ADDRESSOF.  */
5088c87b03e5Sespie 	  if (TREE_ADDRESSABLE (parm))
5089c87b03e5Sespie 	    {
5090c87b03e5Sespie 	      /* If we end up putting something into the stack,
5091c87b03e5Sespie 		 fixup_var_refs_insns will need to make a pass over
5092c87b03e5Sespie 		 all the instructions.  It looks through the pending
5093c87b03e5Sespie 		 sequences -- but it can't see the ones in the
5094c87b03e5Sespie 		 CONVERSION_INSNS, if they're not on the sequence
5095c87b03e5Sespie 		 stack.  So, we go back to that sequence, just so that
5096c87b03e5Sespie 		 the fixups will happen.  */
5097c87b03e5Sespie 	      push_to_sequence (conversion_insns);
5098c87b03e5Sespie 	      put_var_into_stack (parm, /*rescan=*/true);
5099c87b03e5Sespie 	      conversion_insns = get_insns ();
5100c87b03e5Sespie 	      end_sequence ();
5101c87b03e5Sespie 	    }
5102c87b03e5Sespie 	}
5103c87b03e5Sespie       else
5104c87b03e5Sespie 	{
5105c87b03e5Sespie 	  /* Value must be stored in the stack slot STACK_PARM
5106c87b03e5Sespie 	     during function execution.  */
5107c87b03e5Sespie 
5108c87b03e5Sespie 	  if (promoted_mode != nominal_mode)
5109c87b03e5Sespie 	    {
5110c87b03e5Sespie 	      /* Conversion is required.  */
5111c87b03e5Sespie 	      rtx tempreg = gen_reg_rtx (GET_MODE (entry_parm));
5112c87b03e5Sespie 
5113c87b03e5Sespie 	      emit_move_insn (tempreg, validize_mem (entry_parm));
5114c87b03e5Sespie 
5115c87b03e5Sespie 	      push_to_sequence (conversion_insns);
5116c87b03e5Sespie 	      entry_parm = convert_to_mode (nominal_mode, tempreg,
5117c87b03e5Sespie 					    TREE_UNSIGNED (TREE_TYPE (parm)));
5118c87b03e5Sespie 	      if (stack_parm)
5119c87b03e5Sespie 		/* ??? This may need a big-endian conversion on sparc64.  */
5120c87b03e5Sespie 		stack_parm = adjust_address (stack_parm, nominal_mode, 0);
5121c87b03e5Sespie 
5122c87b03e5Sespie 	      conversion_insns = get_insns ();
5123c87b03e5Sespie 	      did_conversion = 1;
5124c87b03e5Sespie 	      end_sequence ();
5125c87b03e5Sespie 	    }
5126c87b03e5Sespie 
5127c87b03e5Sespie 	  if (entry_parm != stack_parm)
5128c87b03e5Sespie 	    {
5129c87b03e5Sespie 	      if (stack_parm == 0)
5130c87b03e5Sespie 		{
5131c87b03e5Sespie 		  stack_parm
5132c87b03e5Sespie 		    = assign_stack_local (GET_MODE (entry_parm),
5133c87b03e5Sespie 					  GET_MODE_SIZE (GET_MODE (entry_parm)), 0);
5134c87b03e5Sespie 		  set_mem_attributes (stack_parm, parm, 1);
5135c87b03e5Sespie 		}
5136c87b03e5Sespie 
5137c87b03e5Sespie 	      if (promoted_mode != nominal_mode)
5138c87b03e5Sespie 		{
5139c87b03e5Sespie 		  push_to_sequence (conversion_insns);
5140c87b03e5Sespie 		  emit_move_insn (validize_mem (stack_parm),
5141c87b03e5Sespie 				  validize_mem (entry_parm));
5142c87b03e5Sespie 		  conversion_insns = get_insns ();
5143c87b03e5Sespie 		  end_sequence ();
5144c87b03e5Sespie 		}
5145c87b03e5Sespie 	      else
5146c87b03e5Sespie 		emit_move_insn (validize_mem (stack_parm),
5147c87b03e5Sespie 				validize_mem (entry_parm));
5148c87b03e5Sespie 	    }
5149c87b03e5Sespie 
5150c87b03e5Sespie 	  SET_DECL_RTL (parm, stack_parm);
5151c87b03e5Sespie 	}
5152c87b03e5Sespie 
5153c87b03e5Sespie       /* If this "parameter" was the place where we are receiving the
5154c87b03e5Sespie 	 function's incoming structure pointer, set up the result.  */
5155c87b03e5Sespie       if (parm == function_result_decl)
5156c87b03e5Sespie 	{
5157c87b03e5Sespie 	  tree result = DECL_RESULT (fndecl);
5158c87b03e5Sespie 	  rtx addr = DECL_RTL (parm);
5159c87b03e5Sespie 	  rtx x;
5160c87b03e5Sespie 
5161c87b03e5Sespie #ifdef POINTERS_EXTEND_UNSIGNED
5162c87b03e5Sespie 	  if (GET_MODE (addr) != Pmode)
5163c87b03e5Sespie 	    addr = convert_memory_address (Pmode, addr);
5164c87b03e5Sespie #endif
5165c87b03e5Sespie 
5166c87b03e5Sespie 	  x = gen_rtx_MEM (DECL_MODE (result), addr);
5167c87b03e5Sespie 	  set_mem_attributes (x, result, 1);
5168c87b03e5Sespie 	  SET_DECL_RTL (result, x);
5169c87b03e5Sespie 	}
5170c87b03e5Sespie 
5171c87b03e5Sespie       if (GET_CODE (DECL_RTL (parm)) == REG)
5172c87b03e5Sespie 	REGNO_DECL (REGNO (DECL_RTL (parm))) = parm;
5173c87b03e5Sespie       else if (GET_CODE (DECL_RTL (parm)) == CONCAT)
5174c87b03e5Sespie 	{
5175c87b03e5Sespie 	  REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 0))) = parm;
5176c87b03e5Sespie 	  REGNO_DECL (REGNO (XEXP (DECL_RTL (parm), 1))) = parm;
5177c87b03e5Sespie 	}
5178c87b03e5Sespie 
5179c87b03e5Sespie     }
5180c87b03e5Sespie 
5181c87b03e5Sespie   /* Output all parameter conversion instructions (possibly including calls)
5182c87b03e5Sespie      now that all parameters have been copied out of hard registers.  */
5183c87b03e5Sespie   emit_insn (conversion_insns);
5184c87b03e5Sespie 
5185c87b03e5Sespie   last_parm_insn = get_last_insn ();
5186c87b03e5Sespie 
5187c87b03e5Sespie   current_function_args_size = stack_args_size.constant;
5188c87b03e5Sespie 
5189c87b03e5Sespie   /* Adjust function incoming argument size for alignment and
5190c87b03e5Sespie      minimum length.  */
5191c87b03e5Sespie 
5192c87b03e5Sespie #ifdef REG_PARM_STACK_SPACE
5193c87b03e5Sespie #ifndef MAYBE_REG_PARM_STACK_SPACE
5194c87b03e5Sespie   current_function_args_size = MAX (current_function_args_size,
5195c87b03e5Sespie 				    REG_PARM_STACK_SPACE (fndecl));
5196c87b03e5Sespie #endif
5197c87b03e5Sespie #endif
5198c87b03e5Sespie 
5199c87b03e5Sespie #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
5200c87b03e5Sespie 
5201c87b03e5Sespie   current_function_args_size
5202c87b03e5Sespie     = ((current_function_args_size + STACK_BYTES - 1)
5203c87b03e5Sespie        / STACK_BYTES) * STACK_BYTES;
5204c87b03e5Sespie 
5205c87b03e5Sespie #ifdef ARGS_GROW_DOWNWARD
5206c87b03e5Sespie   current_function_arg_offset_rtx
5207c87b03e5Sespie     = (stack_args_size.var == 0 ? GEN_INT (-stack_args_size.constant)
5208c87b03e5Sespie        : expand_expr (size_diffop (stack_args_size.var,
5209c87b03e5Sespie 				   size_int (-stack_args_size.constant)),
5210c87b03e5Sespie 		      NULL_RTX, VOIDmode, 0));
5211c87b03e5Sespie #else
5212c87b03e5Sespie   current_function_arg_offset_rtx = ARGS_SIZE_RTX (stack_args_size);
5213c87b03e5Sespie #endif
5214c87b03e5Sespie 
5215c87b03e5Sespie   /* See how many bytes, if any, of its args a function should try to pop
5216c87b03e5Sespie      on return.  */
5217c87b03e5Sespie 
5218c87b03e5Sespie   current_function_pops_args = RETURN_POPS_ARGS (fndecl, TREE_TYPE (fndecl),
5219c87b03e5Sespie 						 current_function_args_size);
5220c87b03e5Sespie 
5221c87b03e5Sespie   /* For stdarg.h function, save info about
5222c87b03e5Sespie      regs and stack space used by the named args.  */
5223c87b03e5Sespie 
5224c87b03e5Sespie   current_function_args_info = args_so_far;
5225c87b03e5Sespie 
5226c87b03e5Sespie   /* Set the rtx used for the function return value.  Put this in its
5227c87b03e5Sespie      own variable so any optimizers that need this information don't have
5228c87b03e5Sespie      to include tree.h.  Do this here so it gets done when an inlined
5229c87b03e5Sespie      function gets output.  */
5230c87b03e5Sespie 
5231c87b03e5Sespie   current_function_return_rtx
5232c87b03e5Sespie     = (DECL_RTL_SET_P (DECL_RESULT (fndecl))
5233c87b03e5Sespie        ? DECL_RTL (DECL_RESULT (fndecl)) : NULL_RTX);
5234c87b03e5Sespie 
5235c87b03e5Sespie   /* If scalar return value was computed in a pseudo-reg, or was a named
5236c87b03e5Sespie      return value that got dumped to the stack, copy that to the hard
5237c87b03e5Sespie      return register.  */
5238c87b03e5Sespie   if (DECL_RTL_SET_P (DECL_RESULT (fndecl)))
5239c87b03e5Sespie     {
5240c87b03e5Sespie       tree decl_result = DECL_RESULT (fndecl);
5241c87b03e5Sespie       rtx decl_rtl = DECL_RTL (decl_result);
5242c87b03e5Sespie 
5243c87b03e5Sespie       if (REG_P (decl_rtl)
5244c87b03e5Sespie 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
5245c87b03e5Sespie 	  : DECL_REGISTER (decl_result))
5246c87b03e5Sespie 	{
5247c87b03e5Sespie 	  rtx real_decl_rtl;
5248c87b03e5Sespie 
5249c87b03e5Sespie #ifdef FUNCTION_OUTGOING_VALUE
5250c87b03e5Sespie 	  real_decl_rtl = FUNCTION_OUTGOING_VALUE (TREE_TYPE (decl_result),
5251c87b03e5Sespie 						   fndecl);
5252c87b03e5Sespie #else
5253c87b03e5Sespie 	  real_decl_rtl = FUNCTION_VALUE (TREE_TYPE (decl_result),
5254c87b03e5Sespie 					  fndecl);
5255c87b03e5Sespie #endif
5256c87b03e5Sespie 	  REG_FUNCTION_VALUE_P (real_decl_rtl) = 1;
5257c87b03e5Sespie 	  /* The delay slot scheduler assumes that current_function_return_rtx
5258c87b03e5Sespie 	     holds the hard register containing the return value, not a
5259c87b03e5Sespie 	     temporary pseudo.  */
5260c87b03e5Sespie 	  current_function_return_rtx = real_decl_rtl;
5261c87b03e5Sespie 	}
5262c87b03e5Sespie     }
5263c87b03e5Sespie }
5264c87b03e5Sespie 
5265c87b03e5Sespie /* Indicate whether REGNO is an incoming argument to the current function
5266c87b03e5Sespie    that was promoted to a wider mode.  If so, return the RTX for the
5267c87b03e5Sespie    register (to get its mode).  PMODE and PUNSIGNEDP are set to the mode
5268c87b03e5Sespie    that REGNO is promoted from and whether the promotion was signed or
5269c87b03e5Sespie    unsigned.  */
5270c87b03e5Sespie 
5271c87b03e5Sespie #ifdef PROMOTE_FUNCTION_ARGS
5272c87b03e5Sespie 
5273c87b03e5Sespie rtx
promoted_input_arg(regno,pmode,punsignedp)5274c87b03e5Sespie promoted_input_arg (regno, pmode, punsignedp)
5275c87b03e5Sespie      unsigned int regno;
5276c87b03e5Sespie      enum machine_mode *pmode;
5277c87b03e5Sespie      int *punsignedp;
5278c87b03e5Sespie {
5279c87b03e5Sespie   tree arg;
5280c87b03e5Sespie 
5281c87b03e5Sespie   for (arg = DECL_ARGUMENTS (current_function_decl); arg;
5282c87b03e5Sespie        arg = TREE_CHAIN (arg))
5283c87b03e5Sespie     if (GET_CODE (DECL_INCOMING_RTL (arg)) == REG
5284c87b03e5Sespie 	&& REGNO (DECL_INCOMING_RTL (arg)) == regno
5285c87b03e5Sespie 	&& TYPE_MODE (DECL_ARG_TYPE (arg)) == TYPE_MODE (TREE_TYPE (arg)))
5286c87b03e5Sespie       {
5287c87b03e5Sespie 	enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg));
5288c87b03e5Sespie 	int unsignedp = TREE_UNSIGNED (TREE_TYPE (arg));
5289c87b03e5Sespie 
5290c87b03e5Sespie 	mode = promote_mode (TREE_TYPE (arg), mode, &unsignedp, 1);
5291c87b03e5Sespie 	if (mode == GET_MODE (DECL_INCOMING_RTL (arg))
5292c87b03e5Sespie 	    && mode != DECL_MODE (arg))
5293c87b03e5Sespie 	  {
5294c87b03e5Sespie 	    *pmode = DECL_MODE (arg);
5295c87b03e5Sespie 	    *punsignedp = unsignedp;
5296c87b03e5Sespie 	    return DECL_INCOMING_RTL (arg);
5297c87b03e5Sespie 	  }
5298c87b03e5Sespie       }
5299c87b03e5Sespie 
5300c87b03e5Sespie   return 0;
5301c87b03e5Sespie }
5302c87b03e5Sespie 
5303c87b03e5Sespie #endif
5304c87b03e5Sespie 
5305c87b03e5Sespie /* Compute the size and offset from the start of the stacked arguments for a
5306c87b03e5Sespie    parm passed in mode PASSED_MODE and with type TYPE.
5307c87b03e5Sespie 
5308c87b03e5Sespie    INITIAL_OFFSET_PTR points to the current offset into the stacked
5309c87b03e5Sespie    arguments.
5310c87b03e5Sespie 
5311c87b03e5Sespie    The starting offset and size for this parm are returned in *OFFSET_PTR
5312c87b03e5Sespie    and *ARG_SIZE_PTR, respectively.
5313c87b03e5Sespie 
5314c87b03e5Sespie    IN_REGS is nonzero if the argument will be passed in registers.  It will
5315c87b03e5Sespie    never be set if REG_PARM_STACK_SPACE is not defined.
5316c87b03e5Sespie 
5317c87b03e5Sespie    FNDECL is the function in which the argument was defined.
5318c87b03e5Sespie 
5319c87b03e5Sespie    There are two types of rounding that are done.  The first, controlled by
5320c87b03e5Sespie    FUNCTION_ARG_BOUNDARY, forces the offset from the start of the argument
5321c87b03e5Sespie    list to be aligned to the specific boundary (in bits).  This rounding
5322c87b03e5Sespie    affects the initial and starting offsets, but not the argument size.
5323c87b03e5Sespie 
5324c87b03e5Sespie    The second, controlled by FUNCTION_ARG_PADDING and PARM_BOUNDARY,
5325c87b03e5Sespie    optionally rounds the size of the parm to PARM_BOUNDARY.  The
5326c87b03e5Sespie    initial offset is not affected by this rounding, while the size always
5327c87b03e5Sespie    is and the starting offset may be.  */
5328c87b03e5Sespie 
5329c87b03e5Sespie /*  offset_ptr will be negative for ARGS_GROW_DOWNWARD case;
5330c87b03e5Sespie     initial_offset_ptr is positive because locate_and_pad_parm's
5331c87b03e5Sespie     callers pass in the total size of args so far as
5332c87b03e5Sespie     initial_offset_ptr. arg_size_ptr is always positive.  */
5333c87b03e5Sespie 
5334c87b03e5Sespie void
locate_and_pad_parm(passed_mode,type,in_regs,fndecl,initial_offset_ptr,offset_ptr,arg_size_ptr,alignment_pad)5335c87b03e5Sespie locate_and_pad_parm (passed_mode, type, in_regs, fndecl,
5336c87b03e5Sespie 		     initial_offset_ptr, offset_ptr, arg_size_ptr,
5337c87b03e5Sespie 		     alignment_pad)
5338c87b03e5Sespie      enum machine_mode passed_mode;
5339c87b03e5Sespie      tree type;
5340c87b03e5Sespie      int in_regs ATTRIBUTE_UNUSED;
5341c87b03e5Sespie      tree fndecl ATTRIBUTE_UNUSED;
5342c87b03e5Sespie      struct args_size *initial_offset_ptr;
5343c87b03e5Sespie      struct args_size *offset_ptr;
5344c87b03e5Sespie      struct args_size *arg_size_ptr;
5345c87b03e5Sespie      struct args_size *alignment_pad;
5346c87b03e5Sespie 
5347c87b03e5Sespie {
5348c87b03e5Sespie   tree sizetree
5349c87b03e5Sespie     = type ? size_in_bytes (type) : size_int (GET_MODE_SIZE (passed_mode));
5350c87b03e5Sespie   enum direction where_pad = FUNCTION_ARG_PADDING (passed_mode, type);
5351c87b03e5Sespie   int boundary = FUNCTION_ARG_BOUNDARY (passed_mode, type);
5352c87b03e5Sespie #ifdef ARGS_GROW_DOWNWARD
5353c87b03e5Sespie   tree s2 = sizetree;
5354c87b03e5Sespie #endif
5355c87b03e5Sespie 
5356c87b03e5Sespie #ifdef REG_PARM_STACK_SPACE
5357c87b03e5Sespie   /* If we have found a stack parm before we reach the end of the
5358c87b03e5Sespie      area reserved for registers, skip that area.  */
5359c87b03e5Sespie   if (! in_regs)
5360c87b03e5Sespie     {
5361c87b03e5Sespie       int reg_parm_stack_space = 0;
5362c87b03e5Sespie 
5363c87b03e5Sespie #ifdef MAYBE_REG_PARM_STACK_SPACE
5364c87b03e5Sespie       reg_parm_stack_space = MAYBE_REG_PARM_STACK_SPACE;
5365c87b03e5Sespie #else
5366c87b03e5Sespie       reg_parm_stack_space = REG_PARM_STACK_SPACE (fndecl);
5367c87b03e5Sespie #endif
5368c87b03e5Sespie       if (reg_parm_stack_space > 0)
5369c87b03e5Sespie 	{
5370c87b03e5Sespie 	  if (initial_offset_ptr->var)
5371c87b03e5Sespie 	    {
5372c87b03e5Sespie 	      initial_offset_ptr->var
5373c87b03e5Sespie 		= size_binop (MAX_EXPR, ARGS_SIZE_TREE (*initial_offset_ptr),
5374c87b03e5Sespie 			      ssize_int (reg_parm_stack_space));
5375c87b03e5Sespie 	      initial_offset_ptr->constant = 0;
5376c87b03e5Sespie 	    }
5377c87b03e5Sespie 	  else if (initial_offset_ptr->constant < reg_parm_stack_space)
5378c87b03e5Sespie 	    initial_offset_ptr->constant = reg_parm_stack_space;
5379c87b03e5Sespie 	}
5380c87b03e5Sespie     }
5381c87b03e5Sespie #endif /* REG_PARM_STACK_SPACE */
5382c87b03e5Sespie 
5383c87b03e5Sespie   arg_size_ptr->var = 0;
5384c87b03e5Sespie   arg_size_ptr->constant = 0;
5385c87b03e5Sespie   alignment_pad->var = 0;
5386c87b03e5Sespie   alignment_pad->constant = 0;
5387c87b03e5Sespie 
5388c87b03e5Sespie #ifdef ARGS_GROW_DOWNWARD
5389c87b03e5Sespie   if (initial_offset_ptr->var)
5390c87b03e5Sespie     {
5391c87b03e5Sespie       offset_ptr->constant = 0;
5392c87b03e5Sespie       offset_ptr->var = size_binop (MINUS_EXPR, ssize_int (0),
5393c87b03e5Sespie 				    initial_offset_ptr->var);
5394c87b03e5Sespie     }
5395c87b03e5Sespie   else
5396c87b03e5Sespie     {
5397c87b03e5Sespie       offset_ptr->constant = -initial_offset_ptr->constant;
5398c87b03e5Sespie       offset_ptr->var = 0;
5399c87b03e5Sespie     }
5400c87b03e5Sespie 
5401c87b03e5Sespie   if (where_pad != none
5402c87b03e5Sespie       && (!host_integerp (sizetree, 1)
5403c87b03e5Sespie 	  || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5404c87b03e5Sespie     s2 = round_up (s2, PARM_BOUNDARY / BITS_PER_UNIT);
5405c87b03e5Sespie   SUB_PARM_SIZE (*offset_ptr, s2);
5406c87b03e5Sespie 
5407c87b03e5Sespie   if (!in_regs
5408c87b03e5Sespie #ifdef REG_PARM_STACK_SPACE
5409c87b03e5Sespie       || REG_PARM_STACK_SPACE (fndecl) > 0
5410c87b03e5Sespie #endif
5411c87b03e5Sespie      )
5412c87b03e5Sespie     pad_to_arg_alignment (offset_ptr, boundary, alignment_pad);
5413c87b03e5Sespie 
5414c87b03e5Sespie   if (initial_offset_ptr->var)
5415c87b03e5Sespie     arg_size_ptr->var = size_binop (MINUS_EXPR,
5416c87b03e5Sespie 				    size_binop (MINUS_EXPR,
5417c87b03e5Sespie 						ssize_int (0),
5418c87b03e5Sespie 						initial_offset_ptr->var),
5419c87b03e5Sespie 				    offset_ptr->var);
5420c87b03e5Sespie 
5421c87b03e5Sespie   else
5422c87b03e5Sespie     arg_size_ptr->constant = (-initial_offset_ptr->constant
5423c87b03e5Sespie 			      - offset_ptr->constant);
5424c87b03e5Sespie 
5425c87b03e5Sespie   /* Pad_below needs the pre-rounded size to know how much to pad below.
5426c87b03e5Sespie      We only pad parameters which are not in registers as they have their
5427c87b03e5Sespie      padding done elsewhere.  */
5428c87b03e5Sespie   if (where_pad == downward
5429c87b03e5Sespie       && !in_regs)
5430c87b03e5Sespie     pad_below (offset_ptr, passed_mode, sizetree);
5431c87b03e5Sespie 
5432c87b03e5Sespie #else /* !ARGS_GROW_DOWNWARD */
5433c87b03e5Sespie   if (!in_regs
5434c87b03e5Sespie #ifdef REG_PARM_STACK_SPACE
5435c87b03e5Sespie       || REG_PARM_STACK_SPACE (fndecl) > 0
5436c87b03e5Sespie #endif
5437c87b03e5Sespie       )
5438c87b03e5Sespie     pad_to_arg_alignment (initial_offset_ptr, boundary, alignment_pad);
5439c87b03e5Sespie   *offset_ptr = *initial_offset_ptr;
5440c87b03e5Sespie 
5441c87b03e5Sespie #ifdef PUSH_ROUNDING
5442c87b03e5Sespie   if (passed_mode != BLKmode)
5443c87b03e5Sespie     sizetree = size_int (PUSH_ROUNDING (TREE_INT_CST_LOW (sizetree)));
5444c87b03e5Sespie #endif
5445c87b03e5Sespie 
5446c87b03e5Sespie   /* Pad_below needs the pre-rounded size to know how much to pad below
5447c87b03e5Sespie      so this must be done before rounding up.  */
5448c87b03e5Sespie   if (where_pad == downward
5449c87b03e5Sespie     /* However, BLKmode args passed in regs have their padding done elsewhere.
5450c87b03e5Sespie        The stack slot must be able to hold the entire register.  */
5451c87b03e5Sespie       && !(in_regs && passed_mode == BLKmode))
5452c87b03e5Sespie     pad_below (offset_ptr, passed_mode, sizetree);
5453c87b03e5Sespie 
5454c87b03e5Sespie   if (where_pad != none
5455c87b03e5Sespie       && (!host_integerp (sizetree, 1)
5456c87b03e5Sespie 	  || (tree_low_cst (sizetree, 1) * BITS_PER_UNIT) % PARM_BOUNDARY))
5457c87b03e5Sespie     sizetree = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5458c87b03e5Sespie 
5459c87b03e5Sespie   ADD_PARM_SIZE (*arg_size_ptr, sizetree);
5460c87b03e5Sespie #endif /* ARGS_GROW_DOWNWARD */
5461c87b03e5Sespie }
5462c87b03e5Sespie 
5463c87b03e5Sespie /* Round the stack offset in *OFFSET_PTR up to a multiple of BOUNDARY.
5464c87b03e5Sespie    BOUNDARY is measured in bits, but must be a multiple of a storage unit.  */
5465c87b03e5Sespie 
5466c87b03e5Sespie static void
pad_to_arg_alignment(offset_ptr,boundary,alignment_pad)5467c87b03e5Sespie pad_to_arg_alignment (offset_ptr, boundary, alignment_pad)
5468c87b03e5Sespie      struct args_size *offset_ptr;
5469c87b03e5Sespie      int boundary;
5470c87b03e5Sespie      struct args_size *alignment_pad;
5471c87b03e5Sespie {
5472c87b03e5Sespie   tree save_var = NULL_TREE;
5473c87b03e5Sespie   HOST_WIDE_INT save_constant = 0;
5474c87b03e5Sespie 
5475c87b03e5Sespie   int boundary_in_bytes = boundary / BITS_PER_UNIT;
5476c87b03e5Sespie 
5477c87b03e5Sespie   if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5478c87b03e5Sespie     {
5479c87b03e5Sespie       save_var = offset_ptr->var;
5480c87b03e5Sespie       save_constant = offset_ptr->constant;
5481c87b03e5Sespie     }
5482c87b03e5Sespie 
5483c87b03e5Sespie   alignment_pad->var = NULL_TREE;
5484c87b03e5Sespie   alignment_pad->constant = 0;
5485c87b03e5Sespie 
5486c87b03e5Sespie   if (boundary > BITS_PER_UNIT)
5487c87b03e5Sespie     {
5488c87b03e5Sespie       if (offset_ptr->var)
5489c87b03e5Sespie 	{
5490c87b03e5Sespie 	  offset_ptr->var =
5491c87b03e5Sespie #ifdef ARGS_GROW_DOWNWARD
5492c87b03e5Sespie 	    round_down
5493c87b03e5Sespie #else
5494c87b03e5Sespie 	    round_up
5495c87b03e5Sespie #endif
5496c87b03e5Sespie 	      (ARGS_SIZE_TREE (*offset_ptr),
5497c87b03e5Sespie 	       boundary / BITS_PER_UNIT);
5498c87b03e5Sespie 	  offset_ptr->constant = 0; /*?*/
5499c87b03e5Sespie 	  if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5500c87b03e5Sespie 	    alignment_pad->var = size_binop (MINUS_EXPR, offset_ptr->var,
5501c87b03e5Sespie 					     save_var);
5502c87b03e5Sespie 	}
5503c87b03e5Sespie       else
5504c87b03e5Sespie 	{
5505c87b03e5Sespie 	  offset_ptr->constant =
5506c87b03e5Sespie #ifdef ARGS_GROW_DOWNWARD
5507c87b03e5Sespie 	    FLOOR_ROUND (offset_ptr->constant, boundary_in_bytes);
5508c87b03e5Sespie #else
5509c87b03e5Sespie 	    CEIL_ROUND (offset_ptr->constant, boundary_in_bytes);
5510c87b03e5Sespie #endif
5511c87b03e5Sespie 	    if (boundary > PARM_BOUNDARY && boundary > STACK_BOUNDARY)
5512c87b03e5Sespie 	      alignment_pad->constant = offset_ptr->constant - save_constant;
5513c87b03e5Sespie 	}
5514c87b03e5Sespie     }
5515c87b03e5Sespie }
5516c87b03e5Sespie 
5517c87b03e5Sespie static void
pad_below(offset_ptr,passed_mode,sizetree)5518c87b03e5Sespie pad_below (offset_ptr, passed_mode, sizetree)
5519c87b03e5Sespie      struct args_size *offset_ptr;
5520c87b03e5Sespie      enum machine_mode passed_mode;
5521c87b03e5Sespie      tree sizetree;
5522c87b03e5Sespie {
5523c87b03e5Sespie   if (passed_mode != BLKmode)
5524c87b03e5Sespie     {
5525c87b03e5Sespie       if (GET_MODE_BITSIZE (passed_mode) % PARM_BOUNDARY)
5526c87b03e5Sespie 	offset_ptr->constant
5527c87b03e5Sespie 	  += (((GET_MODE_BITSIZE (passed_mode) + PARM_BOUNDARY - 1)
5528c87b03e5Sespie 	       / PARM_BOUNDARY * PARM_BOUNDARY / BITS_PER_UNIT)
5529c87b03e5Sespie 	      - GET_MODE_SIZE (passed_mode));
5530c87b03e5Sespie     }
5531c87b03e5Sespie   else
5532c87b03e5Sespie     {
5533c87b03e5Sespie       if (TREE_CODE (sizetree) != INTEGER_CST
5534c87b03e5Sespie 	  || (TREE_INT_CST_LOW (sizetree) * BITS_PER_UNIT) % PARM_BOUNDARY)
5535c87b03e5Sespie 	{
5536c87b03e5Sespie 	  /* Round the size up to multiple of PARM_BOUNDARY bits.  */
5537c87b03e5Sespie 	  tree s2 = round_up (sizetree, PARM_BOUNDARY / BITS_PER_UNIT);
5538c87b03e5Sespie 	  /* Add it in.  */
5539c87b03e5Sespie 	  ADD_PARM_SIZE (*offset_ptr, s2);
5540c87b03e5Sespie 	  SUB_PARM_SIZE (*offset_ptr, sizetree);
5541c87b03e5Sespie 	}
5542c87b03e5Sespie     }
5543c87b03e5Sespie }
5544c87b03e5Sespie 
5545c87b03e5Sespie /* Walk the tree of blocks describing the binding levels within a function
5546c87b03e5Sespie    and warn about uninitialized variables.
5547c87b03e5Sespie    This is done after calling flow_analysis and before global_alloc
5548c87b03e5Sespie    clobbers the pseudo-regs to hard regs.  */
5549c87b03e5Sespie 
5550c87b03e5Sespie void
uninitialized_vars_warning(block)5551c87b03e5Sespie uninitialized_vars_warning (block)
5552c87b03e5Sespie      tree block;
5553c87b03e5Sespie {
5554c87b03e5Sespie   tree decl, sub;
5555c87b03e5Sespie   for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5556c87b03e5Sespie     {
5557c87b03e5Sespie       if (warn_uninitialized
5558c87b03e5Sespie 	  && TREE_CODE (decl) == VAR_DECL
5559c87b03e5Sespie 	  /* These warnings are unreliable for and aggregates
5560c87b03e5Sespie 	     because assigning the fields one by one can fail to convince
5561c87b03e5Sespie 	     flow.c that the entire aggregate was initialized.
5562c87b03e5Sespie 	     Unions are troublesome because members may be shorter.  */
5563c87b03e5Sespie 	  && ! AGGREGATE_TYPE_P (TREE_TYPE (decl))
5564c87b03e5Sespie 	  && DECL_RTL (decl) != 0
5565c87b03e5Sespie 	  && GET_CODE (DECL_RTL (decl)) == REG
5566c87b03e5Sespie 	  /* Global optimizations can make it difficult to determine if a
5567c87b03e5Sespie 	     particular variable has been initialized.  However, a VAR_DECL
5568c87b03e5Sespie 	     with a nonzero DECL_INITIAL had an initializer, so do not
5569c87b03e5Sespie 	     claim it is potentially uninitialized.
5570c87b03e5Sespie 
5571c87b03e5Sespie 	     We do not care about the actual value in DECL_INITIAL, so we do
5572c87b03e5Sespie 	     not worry that it may be a dangling pointer.  */
5573c87b03e5Sespie 	  && DECL_INITIAL (decl) == NULL_TREE
5574c87b03e5Sespie 	  && regno_uninitialized (REGNO (DECL_RTL (decl))))
5575c87b03e5Sespie 	warning_with_decl (decl,
5576c87b03e5Sespie 			   "`%s' might be used uninitialized in this function");
5577c87b03e5Sespie       if (extra_warnings
5578c87b03e5Sespie 	  && TREE_CODE (decl) == VAR_DECL
5579c87b03e5Sespie 	  && DECL_RTL (decl) != 0
5580c87b03e5Sespie 	  && GET_CODE (DECL_RTL (decl)) == REG
5581c87b03e5Sespie 	  && regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5582c87b03e5Sespie 	warning_with_decl (decl,
5583c87b03e5Sespie 			   "variable `%s' might be clobbered by `longjmp' or `vfork'");
5584c87b03e5Sespie     }
5585c87b03e5Sespie   for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5586c87b03e5Sespie     uninitialized_vars_warning (sub);
5587c87b03e5Sespie }
5588c87b03e5Sespie 
5589c87b03e5Sespie /* Do the appropriate part of uninitialized_vars_warning
5590c87b03e5Sespie    but for arguments instead of local variables.  */
5591c87b03e5Sespie 
5592c87b03e5Sespie void
setjmp_args_warning()5593c87b03e5Sespie setjmp_args_warning ()
5594c87b03e5Sespie {
5595c87b03e5Sespie   tree decl;
5596c87b03e5Sespie   for (decl = DECL_ARGUMENTS (current_function_decl);
5597c87b03e5Sespie        decl; decl = TREE_CHAIN (decl))
5598c87b03e5Sespie     if (DECL_RTL (decl) != 0
5599c87b03e5Sespie 	&& GET_CODE (DECL_RTL (decl)) == REG
5600c87b03e5Sespie 	&& regno_clobbered_at_setjmp (REGNO (DECL_RTL (decl))))
5601c87b03e5Sespie       warning_with_decl (decl,
5602c87b03e5Sespie 			 "argument `%s' might be clobbered by `longjmp' or `vfork'");
5603c87b03e5Sespie }
5604c87b03e5Sespie 
5605c87b03e5Sespie /* If this function call setjmp, put all vars into the stack
5606c87b03e5Sespie    unless they were declared `register'.  */
5607c87b03e5Sespie 
5608c87b03e5Sespie void
setjmp_protect(block)5609c87b03e5Sespie setjmp_protect (block)
5610c87b03e5Sespie      tree block;
5611c87b03e5Sespie {
5612c87b03e5Sespie   tree decl, sub;
5613c87b03e5Sespie   for (decl = BLOCK_VARS (block); decl; decl = TREE_CHAIN (decl))
5614c87b03e5Sespie     if ((TREE_CODE (decl) == VAR_DECL
5615c87b03e5Sespie 	 || TREE_CODE (decl) == PARM_DECL)
5616c87b03e5Sespie 	&& DECL_RTL (decl) != 0
5617c87b03e5Sespie 	&& (GET_CODE (DECL_RTL (decl)) == REG
5618c87b03e5Sespie 	    || (GET_CODE (DECL_RTL (decl)) == MEM
5619c87b03e5Sespie 		&& GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5620c87b03e5Sespie 	/* If this variable came from an inline function, it must be
5621c87b03e5Sespie 	   that its life doesn't overlap the setjmp.  If there was a
5622c87b03e5Sespie 	   setjmp in the function, it would already be in memory.  We
5623c87b03e5Sespie 	   must exclude such variable because their DECL_RTL might be
5624c87b03e5Sespie 	   set to strange things such as virtual_stack_vars_rtx.  */
5625c87b03e5Sespie 	&& ! DECL_FROM_INLINE (decl)
5626c87b03e5Sespie 	&& (
5627c87b03e5Sespie #ifdef NON_SAVING_SETJMP
5628c87b03e5Sespie 	    /* If longjmp doesn't restore the registers,
5629c87b03e5Sespie 	       don't put anything in them.  */
5630c87b03e5Sespie 	    NON_SAVING_SETJMP
5631c87b03e5Sespie 	    ||
5632c87b03e5Sespie #endif
5633c87b03e5Sespie 	    ! DECL_REGISTER (decl)))
5634c87b03e5Sespie       put_var_into_stack (decl, /*rescan=*/true);
5635c87b03e5Sespie   for (sub = BLOCK_SUBBLOCKS (block); sub; sub = TREE_CHAIN (sub))
5636c87b03e5Sespie     setjmp_protect (sub);
5637c87b03e5Sespie }
5638c87b03e5Sespie 
5639c87b03e5Sespie /* Like the previous function, but for args instead of local variables.  */
5640c87b03e5Sespie 
5641c87b03e5Sespie void
setjmp_protect_args()5642c87b03e5Sespie setjmp_protect_args ()
5643c87b03e5Sespie {
5644c87b03e5Sespie   tree decl;
5645c87b03e5Sespie   for (decl = DECL_ARGUMENTS (current_function_decl);
5646c87b03e5Sespie        decl; decl = TREE_CHAIN (decl))
5647c87b03e5Sespie     if ((TREE_CODE (decl) == VAR_DECL
5648c87b03e5Sespie 	 || TREE_CODE (decl) == PARM_DECL)
5649c87b03e5Sespie 	&& DECL_RTL (decl) != 0
5650c87b03e5Sespie 	&& (GET_CODE (DECL_RTL (decl)) == REG
5651c87b03e5Sespie 	    || (GET_CODE (DECL_RTL (decl)) == MEM
5652c87b03e5Sespie 		&& GET_CODE (XEXP (DECL_RTL (decl), 0)) == ADDRESSOF))
5653c87b03e5Sespie 	&& (
5654c87b03e5Sespie 	    /* If longjmp doesn't restore the registers,
5655c87b03e5Sespie 	       don't put anything in them.  */
5656c87b03e5Sespie #ifdef NON_SAVING_SETJMP
5657c87b03e5Sespie 	    NON_SAVING_SETJMP
5658c87b03e5Sespie 	    ||
5659c87b03e5Sespie #endif
5660c87b03e5Sespie 	    ! DECL_REGISTER (decl)))
5661c87b03e5Sespie       put_var_into_stack (decl, /*rescan=*/true);
5662c87b03e5Sespie }
5663c87b03e5Sespie 
5664c87b03e5Sespie /* Return the context-pointer register corresponding to DECL,
5665c87b03e5Sespie    or 0 if it does not need one.  */
5666c87b03e5Sespie 
5667c87b03e5Sespie rtx
lookup_static_chain(decl)5668c87b03e5Sespie lookup_static_chain (decl)
5669c87b03e5Sespie      tree decl;
5670c87b03e5Sespie {
5671c87b03e5Sespie   tree context = decl_function_context (decl);
5672c87b03e5Sespie   tree link;
5673c87b03e5Sespie 
5674c87b03e5Sespie   if (context == 0
5675c87b03e5Sespie       || (TREE_CODE (decl) == FUNCTION_DECL && DECL_NO_STATIC_CHAIN (decl)))
5676c87b03e5Sespie     return 0;
5677c87b03e5Sespie 
5678c87b03e5Sespie   /* We treat inline_function_decl as an alias for the current function
5679c87b03e5Sespie      because that is the inline function whose vars, types, etc.
5680c87b03e5Sespie      are being merged into the current function.
5681c87b03e5Sespie      See expand_inline_function.  */
5682c87b03e5Sespie   if (context == current_function_decl || context == inline_function_decl)
5683c87b03e5Sespie     return virtual_stack_vars_rtx;
5684c87b03e5Sespie 
5685c87b03e5Sespie   for (link = context_display; link; link = TREE_CHAIN (link))
5686c87b03e5Sespie     if (TREE_PURPOSE (link) == context)
5687c87b03e5Sespie       return RTL_EXPR_RTL (TREE_VALUE (link));
5688c87b03e5Sespie 
5689c87b03e5Sespie   abort ();
5690c87b03e5Sespie }
5691c87b03e5Sespie 
5692c87b03e5Sespie /* Convert a stack slot address ADDR for variable VAR
5693c87b03e5Sespie    (from a containing function)
5694c87b03e5Sespie    into an address valid in this function (using a static chain).  */
5695c87b03e5Sespie 
5696c87b03e5Sespie rtx
fix_lexical_addr(addr,var)5697c87b03e5Sespie fix_lexical_addr (addr, var)
5698c87b03e5Sespie      rtx addr;
5699c87b03e5Sespie      tree var;
5700c87b03e5Sespie {
5701c87b03e5Sespie   rtx basereg;
5702c87b03e5Sespie   HOST_WIDE_INT displacement;
5703c87b03e5Sespie   tree context = decl_function_context (var);
5704c87b03e5Sespie   struct function *fp;
5705c87b03e5Sespie   rtx base = 0;
5706c87b03e5Sespie 
5707c87b03e5Sespie   /* If this is the present function, we need not do anything.  */
5708c87b03e5Sespie   if (context == current_function_decl || context == inline_function_decl)
5709c87b03e5Sespie     return addr;
5710c87b03e5Sespie 
5711c87b03e5Sespie   fp = find_function_data (context);
5712c87b03e5Sespie 
5713c87b03e5Sespie   if (GET_CODE (addr) == ADDRESSOF && GET_CODE (XEXP (addr, 0)) == MEM)
5714c87b03e5Sespie     addr = XEXP (XEXP (addr, 0), 0);
5715c87b03e5Sespie 
5716c87b03e5Sespie   /* Decode given address as base reg plus displacement.  */
5717c87b03e5Sespie   if (GET_CODE (addr) == REG)
5718c87b03e5Sespie     basereg = addr, displacement = 0;
5719c87b03e5Sespie   else if (GET_CODE (addr) == PLUS && GET_CODE (XEXP (addr, 1)) == CONST_INT)
5720c87b03e5Sespie     basereg = XEXP (addr, 0), displacement = INTVAL (XEXP (addr, 1));
5721c87b03e5Sespie   else
5722c87b03e5Sespie     abort ();
5723c87b03e5Sespie 
5724c87b03e5Sespie   /* We accept vars reached via the containing function's
5725c87b03e5Sespie      incoming arg pointer and via its stack variables pointer.  */
5726c87b03e5Sespie   if (basereg == fp->internal_arg_pointer)
5727c87b03e5Sespie     {
5728c87b03e5Sespie       /* If reached via arg pointer, get the arg pointer value
5729c87b03e5Sespie 	 out of that function's stack frame.
5730c87b03e5Sespie 
5731c87b03e5Sespie 	 There are two cases:  If a separate ap is needed, allocate a
5732c87b03e5Sespie 	 slot in the outer function for it and dereference it that way.
5733c87b03e5Sespie 	 This is correct even if the real ap is actually a pseudo.
5734c87b03e5Sespie 	 Otherwise, just adjust the offset from the frame pointer to
5735c87b03e5Sespie 	 compensate.  */
5736c87b03e5Sespie 
5737c87b03e5Sespie #ifdef NEED_SEPARATE_AP
5738c87b03e5Sespie       rtx addr;
5739c87b03e5Sespie 
5740c87b03e5Sespie       addr = get_arg_pointer_save_area (fp);
5741c87b03e5Sespie       addr = fix_lexical_addr (XEXP (addr, 0), var);
5742c87b03e5Sespie       addr = memory_address (Pmode, addr);
5743c87b03e5Sespie 
5744c87b03e5Sespie       base = gen_rtx_MEM (Pmode, addr);
5745c87b03e5Sespie       set_mem_alias_set (base, get_frame_alias_set ());
5746c87b03e5Sespie       base = copy_to_reg (base);
5747c87b03e5Sespie #else
5748c87b03e5Sespie       displacement += (FIRST_PARM_OFFSET (context) - STARTING_FRAME_OFFSET);
5749c87b03e5Sespie       base = lookup_static_chain (var);
5750c87b03e5Sespie #endif
5751c87b03e5Sespie     }
5752c87b03e5Sespie 
5753c87b03e5Sespie   else if (basereg == virtual_stack_vars_rtx)
5754c87b03e5Sespie     {
5755c87b03e5Sespie       /* This is the same code as lookup_static_chain, duplicated here to
5756c87b03e5Sespie 	 avoid an extra call to decl_function_context.  */
5757c87b03e5Sespie       tree link;
5758c87b03e5Sespie 
5759c87b03e5Sespie       for (link = context_display; link; link = TREE_CHAIN (link))
5760c87b03e5Sespie 	if (TREE_PURPOSE (link) == context)
5761c87b03e5Sespie 	  {
5762c87b03e5Sespie 	    base = RTL_EXPR_RTL (TREE_VALUE (link));
5763c87b03e5Sespie 	    break;
5764c87b03e5Sespie 	  }
5765c87b03e5Sespie     }
5766c87b03e5Sespie 
5767c87b03e5Sespie   if (base == 0)
5768c87b03e5Sespie     abort ();
5769c87b03e5Sespie 
5770c87b03e5Sespie   /* Use same offset, relative to appropriate static chain or argument
5771c87b03e5Sespie      pointer.  */
5772c87b03e5Sespie   return plus_constant (base, displacement);
5773c87b03e5Sespie }
5774c87b03e5Sespie 
5775c87b03e5Sespie /* Return the address of the trampoline for entering nested fn FUNCTION.
5776c87b03e5Sespie    If necessary, allocate a trampoline (in the stack frame)
5777c87b03e5Sespie    and emit rtl to initialize its contents (at entry to this function).  */
5778c87b03e5Sespie 
5779c87b03e5Sespie rtx
trampoline_address(function)5780c87b03e5Sespie trampoline_address (function)
5781c87b03e5Sespie      tree function;
5782c87b03e5Sespie {
5783c87b03e5Sespie   tree link;
5784c87b03e5Sespie   tree rtlexp;
5785c87b03e5Sespie   rtx tramp;
5786c87b03e5Sespie   struct function *fp;
5787c87b03e5Sespie   tree fn_context;
5788c87b03e5Sespie 
5789c87b03e5Sespie   /* Find an existing trampoline and return it.  */
5790c87b03e5Sespie   for (link = trampoline_list; link; link = TREE_CHAIN (link))
5791c87b03e5Sespie     if (TREE_PURPOSE (link) == function)
5792c87b03e5Sespie       return
5793c87b03e5Sespie 	adjust_trampoline_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0));
5794c87b03e5Sespie 
5795c87b03e5Sespie   for (fp = outer_function_chain; fp; fp = fp->outer)
5796c87b03e5Sespie     for (link = fp->x_trampoline_list; link; link = TREE_CHAIN (link))
5797c87b03e5Sespie       if (TREE_PURPOSE (link) == function)
5798c87b03e5Sespie 	{
5799c87b03e5Sespie 	  tramp = fix_lexical_addr (XEXP (RTL_EXPR_RTL (TREE_VALUE (link)), 0),
5800c87b03e5Sespie 				    function);
5801c87b03e5Sespie 	  return adjust_trampoline_addr (tramp);
5802c87b03e5Sespie 	}
5803c87b03e5Sespie 
5804c87b03e5Sespie   /* None exists; we must make one.  */
5805c87b03e5Sespie 
5806c87b03e5Sespie   /* Find the `struct function' for the function containing FUNCTION.  */
5807c87b03e5Sespie   fp = 0;
5808c87b03e5Sespie   fn_context = decl_function_context (function);
5809c87b03e5Sespie   if (fn_context != current_function_decl
5810c87b03e5Sespie       && fn_context != inline_function_decl)
5811c87b03e5Sespie     fp = find_function_data (fn_context);
5812c87b03e5Sespie 
5813c87b03e5Sespie   /* Allocate run-time space for this trampoline
5814c87b03e5Sespie      (usually in the defining function's stack frame).  */
5815c87b03e5Sespie #ifdef ALLOCATE_TRAMPOLINE
5816c87b03e5Sespie   tramp = ALLOCATE_TRAMPOLINE (fp);
5817c87b03e5Sespie #else
5818c87b03e5Sespie   /* If rounding needed, allocate extra space
5819c87b03e5Sespie      to ensure we have TRAMPOLINE_SIZE bytes left after rounding up.  */
5820c87b03e5Sespie #define TRAMPOLINE_REAL_SIZE \
5821c87b03e5Sespie   (TRAMPOLINE_SIZE + (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT) - 1)
5822c87b03e5Sespie   tramp = assign_stack_local_1 (BLKmode, TRAMPOLINE_REAL_SIZE, 0,
5823c87b03e5Sespie 				fp ? fp : cfun);
5824c87b03e5Sespie #endif
5825c87b03e5Sespie 
5826c87b03e5Sespie   /* Record the trampoline for reuse and note it for later initialization
5827c87b03e5Sespie      by expand_function_end.  */
5828c87b03e5Sespie   if (fp != 0)
5829c87b03e5Sespie     {
5830c87b03e5Sespie       rtlexp = make_node (RTL_EXPR);
5831c87b03e5Sespie       RTL_EXPR_RTL (rtlexp) = tramp;
5832c87b03e5Sespie       fp->x_trampoline_list = tree_cons (function, rtlexp,
5833c87b03e5Sespie 					 fp->x_trampoline_list);
5834c87b03e5Sespie     }
5835c87b03e5Sespie   else
5836c87b03e5Sespie     {
5837c87b03e5Sespie       /* Make the RTL_EXPR node temporary, not momentary, so that the
5838c87b03e5Sespie 	 trampoline_list doesn't become garbage.  */
5839c87b03e5Sespie       rtlexp = make_node (RTL_EXPR);
5840c87b03e5Sespie 
5841c87b03e5Sespie       RTL_EXPR_RTL (rtlexp) = tramp;
5842c87b03e5Sespie       trampoline_list = tree_cons (function, rtlexp, trampoline_list);
5843c87b03e5Sespie     }
5844c87b03e5Sespie 
5845c87b03e5Sespie   tramp = fix_lexical_addr (XEXP (tramp, 0), function);
5846c87b03e5Sespie   return adjust_trampoline_addr (tramp);
5847c87b03e5Sespie }
5848c87b03e5Sespie 
5849c87b03e5Sespie /* Given a trampoline address,
5850c87b03e5Sespie    round it to multiple of TRAMPOLINE_ALIGNMENT.  */
5851c87b03e5Sespie 
5852c87b03e5Sespie static rtx
round_trampoline_addr(tramp)5853c87b03e5Sespie round_trampoline_addr (tramp)
5854c87b03e5Sespie      rtx tramp;
5855c87b03e5Sespie {
5856c87b03e5Sespie   /* Round address up to desired boundary.  */
5857c87b03e5Sespie   rtx temp = gen_reg_rtx (Pmode);
5858c87b03e5Sespie   rtx addend = GEN_INT (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1);
5859c87b03e5Sespie   rtx mask = GEN_INT (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT);
5860c87b03e5Sespie 
5861c87b03e5Sespie   temp  = expand_simple_binop (Pmode, PLUS, tramp, addend,
5862c87b03e5Sespie 			       temp, 0, OPTAB_LIB_WIDEN);
5863c87b03e5Sespie   tramp = expand_simple_binop (Pmode, AND, temp, mask,
5864c87b03e5Sespie 			       temp, 0, OPTAB_LIB_WIDEN);
5865c87b03e5Sespie 
5866c87b03e5Sespie   return tramp;
5867c87b03e5Sespie }
5868c87b03e5Sespie 
5869c87b03e5Sespie /* Given a trampoline address, round it then apply any
5870c87b03e5Sespie    platform-specific adjustments so that the result can be used for a
5871c87b03e5Sespie    function call .  */
5872c87b03e5Sespie 
5873c87b03e5Sespie static rtx
adjust_trampoline_addr(tramp)5874c87b03e5Sespie adjust_trampoline_addr (tramp)
5875c87b03e5Sespie      rtx tramp;
5876c87b03e5Sespie {
5877c87b03e5Sespie   tramp = round_trampoline_addr (tramp);
5878c87b03e5Sespie #ifdef TRAMPOLINE_ADJUST_ADDRESS
5879c87b03e5Sespie   TRAMPOLINE_ADJUST_ADDRESS (tramp);
5880c87b03e5Sespie #endif
5881c87b03e5Sespie   return tramp;
5882c87b03e5Sespie }
5883c87b03e5Sespie 
5884c87b03e5Sespie /* Put all this function's BLOCK nodes including those that are chained
5885c87b03e5Sespie    onto the first block into a vector, and return it.
5886c87b03e5Sespie    Also store in each NOTE for the beginning or end of a block
5887c87b03e5Sespie    the index of that block in the vector.
5888c87b03e5Sespie    The arguments are BLOCK, the chain of top-level blocks of the function,
5889c87b03e5Sespie    and INSNS, the insn chain of the function.  */
5890c87b03e5Sespie 
5891c87b03e5Sespie void
identify_blocks()5892c87b03e5Sespie identify_blocks ()
5893c87b03e5Sespie {
5894c87b03e5Sespie   int n_blocks;
5895c87b03e5Sespie   tree *block_vector, *last_block_vector;
5896c87b03e5Sespie   tree *block_stack;
5897c87b03e5Sespie   tree block = DECL_INITIAL (current_function_decl);
5898c87b03e5Sespie 
5899c87b03e5Sespie   if (block == 0)
5900c87b03e5Sespie     return;
5901c87b03e5Sespie 
5902c87b03e5Sespie   /* Fill the BLOCK_VECTOR with all of the BLOCKs in this function, in
5903c87b03e5Sespie      depth-first order.  */
5904c87b03e5Sespie   block_vector = get_block_vector (block, &n_blocks);
5905c87b03e5Sespie   block_stack = (tree *) xmalloc (n_blocks * sizeof (tree));
5906c87b03e5Sespie 
5907c87b03e5Sespie   last_block_vector = identify_blocks_1 (get_insns (),
5908c87b03e5Sespie 					 block_vector + 1,
5909c87b03e5Sespie 					 block_vector + n_blocks,
5910c87b03e5Sespie 					 block_stack);
5911c87b03e5Sespie 
5912c87b03e5Sespie   /* If we didn't use all of the subblocks, we've misplaced block notes.  */
5913c87b03e5Sespie   /* ??? This appears to happen all the time.  Latent bugs elsewhere?  */
5914c87b03e5Sespie   if (0 && last_block_vector != block_vector + n_blocks)
5915c87b03e5Sespie     abort ();
5916c87b03e5Sespie 
5917c87b03e5Sespie   free (block_vector);
5918c87b03e5Sespie   free (block_stack);
5919c87b03e5Sespie }
5920c87b03e5Sespie 
5921c87b03e5Sespie /* Subroutine of identify_blocks.  Do the block substitution on the
5922c87b03e5Sespie    insn chain beginning with INSNS.  Recurse for CALL_PLACEHOLDER chains.
5923c87b03e5Sespie 
5924c87b03e5Sespie    BLOCK_STACK is pushed and popped for each BLOCK_BEGIN/BLOCK_END pair.
5925c87b03e5Sespie    BLOCK_VECTOR is incremented for each block seen.  */
5926c87b03e5Sespie 
5927c87b03e5Sespie static tree *
identify_blocks_1(insns,block_vector,end_block_vector,orig_block_stack)5928c87b03e5Sespie identify_blocks_1 (insns, block_vector, end_block_vector, orig_block_stack)
5929c87b03e5Sespie      rtx insns;
5930c87b03e5Sespie      tree *block_vector;
5931c87b03e5Sespie      tree *end_block_vector;
5932c87b03e5Sespie      tree *orig_block_stack;
5933c87b03e5Sespie {
5934c87b03e5Sespie   rtx insn;
5935c87b03e5Sespie   tree *block_stack = orig_block_stack;
5936c87b03e5Sespie 
5937c87b03e5Sespie   for (insn = insns; insn; insn = NEXT_INSN (insn))
5938c87b03e5Sespie     {
5939c87b03e5Sespie       if (GET_CODE (insn) == NOTE)
5940c87b03e5Sespie 	{
5941c87b03e5Sespie 	  if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
5942c87b03e5Sespie 	    {
5943c87b03e5Sespie 	      tree b;
5944c87b03e5Sespie 
5945c87b03e5Sespie 	      /* If there are more block notes than BLOCKs, something
5946c87b03e5Sespie 		 is badly wrong.  */
5947c87b03e5Sespie 	      if (block_vector == end_block_vector)
5948c87b03e5Sespie 		abort ();
5949c87b03e5Sespie 
5950c87b03e5Sespie 	      b = *block_vector++;
5951c87b03e5Sespie 	      NOTE_BLOCK (insn) = b;
5952c87b03e5Sespie 	      *block_stack++ = b;
5953c87b03e5Sespie 	    }
5954c87b03e5Sespie 	  else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
5955c87b03e5Sespie 	    {
5956c87b03e5Sespie 	      /* If there are more NOTE_INSN_BLOCK_ENDs than
5957c87b03e5Sespie 		 NOTE_INSN_BLOCK_BEGs, something is badly wrong.  */
5958c87b03e5Sespie 	      if (block_stack == orig_block_stack)
5959c87b03e5Sespie 		abort ();
5960c87b03e5Sespie 
5961c87b03e5Sespie 	      NOTE_BLOCK (insn) = *--block_stack;
5962c87b03e5Sespie 	    }
5963c87b03e5Sespie 	}
5964c87b03e5Sespie       else if (GET_CODE (insn) == CALL_INSN
5965c87b03e5Sespie 	       && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
5966c87b03e5Sespie 	{
5967c87b03e5Sespie 	  rtx cp = PATTERN (insn);
5968c87b03e5Sespie 
5969c87b03e5Sespie 	  block_vector = identify_blocks_1 (XEXP (cp, 0), block_vector,
5970c87b03e5Sespie 					    end_block_vector, block_stack);
5971c87b03e5Sespie 	  if (XEXP (cp, 1))
5972c87b03e5Sespie 	    block_vector = identify_blocks_1 (XEXP (cp, 1), block_vector,
5973c87b03e5Sespie 					      end_block_vector, block_stack);
5974c87b03e5Sespie 	  if (XEXP (cp, 2))
5975c87b03e5Sespie 	    block_vector = identify_blocks_1 (XEXP (cp, 2), block_vector,
5976c87b03e5Sespie 					      end_block_vector, block_stack);
5977c87b03e5Sespie 	}
5978c87b03e5Sespie     }
5979c87b03e5Sespie 
5980c87b03e5Sespie   /* If there are more NOTE_INSN_BLOCK_BEGINs than NOTE_INSN_BLOCK_ENDs,
5981c87b03e5Sespie      something is badly wrong.  */
5982c87b03e5Sespie   if (block_stack != orig_block_stack)
5983c87b03e5Sespie     abort ();
5984c87b03e5Sespie 
5985c87b03e5Sespie   return block_vector;
5986c87b03e5Sespie }
5987c87b03e5Sespie 
5988c87b03e5Sespie /* Identify BLOCKs referenced by more than one NOTE_INSN_BLOCK_{BEG,END},
5989c87b03e5Sespie    and create duplicate blocks.  */
5990c87b03e5Sespie /* ??? Need an option to either create block fragments or to create
5991c87b03e5Sespie    abstract origin duplicates of a source block.  It really depends
5992c87b03e5Sespie    on what optimization has been performed.  */
5993c87b03e5Sespie 
5994c87b03e5Sespie void
reorder_blocks()5995c87b03e5Sespie reorder_blocks ()
5996c87b03e5Sespie {
5997c87b03e5Sespie   tree block = DECL_INITIAL (current_function_decl);
5998c87b03e5Sespie   varray_type block_stack;
5999c87b03e5Sespie 
6000c87b03e5Sespie   if (block == NULL_TREE)
6001c87b03e5Sespie     return;
6002c87b03e5Sespie 
6003c87b03e5Sespie   VARRAY_TREE_INIT (block_stack, 10, "block_stack");
6004c87b03e5Sespie 
6005c87b03e5Sespie   /* Reset the TREE_ASM_WRITTEN bit for all blocks.  */
6006c87b03e5Sespie   reorder_blocks_0 (block);
6007c87b03e5Sespie 
6008c87b03e5Sespie   /* Prune the old trees away, so that they don't get in the way.  */
6009c87b03e5Sespie   BLOCK_SUBBLOCKS (block) = NULL_TREE;
6010c87b03e5Sespie   BLOCK_CHAIN (block) = NULL_TREE;
6011c87b03e5Sespie 
6012c87b03e5Sespie   /* Recreate the block tree from the note nesting.  */
6013c87b03e5Sespie   reorder_blocks_1 (get_insns (), block, &block_stack);
6014c87b03e5Sespie   BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
6015c87b03e5Sespie 
6016c87b03e5Sespie   /* Remove deleted blocks from the block fragment chains.  */
6017c87b03e5Sespie   reorder_fix_fragments (block);
6018c87b03e5Sespie }
6019c87b03e5Sespie 
6020c87b03e5Sespie /* Helper function for reorder_blocks.  Reset TREE_ASM_WRITTEN.  */
6021c87b03e5Sespie 
6022c87b03e5Sespie static void
reorder_blocks_0(block)6023c87b03e5Sespie reorder_blocks_0 (block)
6024c87b03e5Sespie      tree block;
6025c87b03e5Sespie {
6026c87b03e5Sespie   while (block)
6027c87b03e5Sespie     {
6028c87b03e5Sespie       TREE_ASM_WRITTEN (block) = 0;
6029c87b03e5Sespie       reorder_blocks_0 (BLOCK_SUBBLOCKS (block));
6030c87b03e5Sespie       block = BLOCK_CHAIN (block);
6031c87b03e5Sespie     }
6032c87b03e5Sespie }
6033c87b03e5Sespie 
6034c87b03e5Sespie static void
reorder_blocks_1(insns,current_block,p_block_stack)6035c87b03e5Sespie reorder_blocks_1 (insns, current_block, p_block_stack)
6036c87b03e5Sespie      rtx insns;
6037c87b03e5Sespie      tree current_block;
6038c87b03e5Sespie      varray_type *p_block_stack;
6039c87b03e5Sespie {
6040c87b03e5Sespie   rtx insn;
6041c87b03e5Sespie 
6042c87b03e5Sespie   for (insn = insns; insn; insn = NEXT_INSN (insn))
6043c87b03e5Sespie     {
6044c87b03e5Sespie       if (GET_CODE (insn) == NOTE)
6045c87b03e5Sespie 	{
6046c87b03e5Sespie 	  if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG)
6047c87b03e5Sespie 	    {
6048c87b03e5Sespie 	      tree block = NOTE_BLOCK (insn);
6049c87b03e5Sespie 
6050c87b03e5Sespie 	      /* If we have seen this block before, that means it now
6051c87b03e5Sespie 		 spans multiple address regions.  Create a new fragment.  */
6052c87b03e5Sespie 	      if (TREE_ASM_WRITTEN (block))
6053c87b03e5Sespie 		{
6054c87b03e5Sespie 		  tree new_block = copy_node (block);
6055c87b03e5Sespie 		  tree origin;
6056c87b03e5Sespie 
6057c87b03e5Sespie 		  origin = (BLOCK_FRAGMENT_ORIGIN (block)
6058c87b03e5Sespie 			    ? BLOCK_FRAGMENT_ORIGIN (block)
6059c87b03e5Sespie 			    : block);
6060c87b03e5Sespie 		  BLOCK_FRAGMENT_ORIGIN (new_block) = origin;
6061c87b03e5Sespie 		  BLOCK_FRAGMENT_CHAIN (new_block)
6062c87b03e5Sespie 		    = BLOCK_FRAGMENT_CHAIN (origin);
6063c87b03e5Sespie 		  BLOCK_FRAGMENT_CHAIN (origin) = new_block;
6064c87b03e5Sespie 
6065c87b03e5Sespie 		  NOTE_BLOCK (insn) = new_block;
6066c87b03e5Sespie 		  block = new_block;
6067c87b03e5Sespie 		}
6068c87b03e5Sespie 
6069c87b03e5Sespie 	      BLOCK_SUBBLOCKS (block) = 0;
6070c87b03e5Sespie 	      TREE_ASM_WRITTEN (block) = 1;
6071c87b03e5Sespie 	      BLOCK_SUPERCONTEXT (block) = current_block;
6072c87b03e5Sespie 	      BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (current_block);
6073c87b03e5Sespie 	      BLOCK_SUBBLOCKS (current_block) = block;
6074c87b03e5Sespie 	      current_block = block;
6075c87b03e5Sespie 	      VARRAY_PUSH_TREE (*p_block_stack, block);
6076c87b03e5Sespie 	    }
6077c87b03e5Sespie 	  else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END)
6078c87b03e5Sespie 	    {
6079c87b03e5Sespie 	      NOTE_BLOCK (insn) = VARRAY_TOP_TREE (*p_block_stack);
6080c87b03e5Sespie 	      VARRAY_POP (*p_block_stack);
6081c87b03e5Sespie 	      BLOCK_SUBBLOCKS (current_block)
6082c87b03e5Sespie 		= blocks_nreverse (BLOCK_SUBBLOCKS (current_block));
6083c87b03e5Sespie 	      current_block = BLOCK_SUPERCONTEXT (current_block);
6084c87b03e5Sespie 	    }
6085c87b03e5Sespie 	}
6086c87b03e5Sespie       else if (GET_CODE (insn) == CALL_INSN
6087c87b03e5Sespie 	       && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
6088c87b03e5Sespie 	{
6089c87b03e5Sespie 	  rtx cp = PATTERN (insn);
6090c87b03e5Sespie 	  reorder_blocks_1 (XEXP (cp, 0), current_block, p_block_stack);
6091c87b03e5Sespie 	  if (XEXP (cp, 1))
6092c87b03e5Sespie 	    reorder_blocks_1 (XEXP (cp, 1), current_block, p_block_stack);
6093c87b03e5Sespie 	  if (XEXP (cp, 2))
6094c87b03e5Sespie 	    reorder_blocks_1 (XEXP (cp, 2), current_block, p_block_stack);
6095c87b03e5Sespie 	}
6096c87b03e5Sespie     }
6097c87b03e5Sespie }
6098c87b03e5Sespie 
6099c87b03e5Sespie /* Rationalize BLOCK_FRAGMENT_ORIGIN.  If an origin block no longer
6100c87b03e5Sespie    appears in the block tree, select one of the fragments to become
6101c87b03e5Sespie    the new origin block.  */
6102c87b03e5Sespie 
6103c87b03e5Sespie static void
reorder_fix_fragments(block)6104c87b03e5Sespie reorder_fix_fragments (block)
6105c87b03e5Sespie      tree block;
6106c87b03e5Sespie {
6107c87b03e5Sespie   while (block)
6108c87b03e5Sespie     {
6109c87b03e5Sespie       tree dup_origin = BLOCK_FRAGMENT_ORIGIN (block);
6110c87b03e5Sespie       tree new_origin = NULL_TREE;
6111c87b03e5Sespie 
6112c87b03e5Sespie       if (dup_origin)
6113c87b03e5Sespie 	{
6114c87b03e5Sespie 	  if (! TREE_ASM_WRITTEN (dup_origin))
6115c87b03e5Sespie 	    {
6116c87b03e5Sespie 	      new_origin = BLOCK_FRAGMENT_CHAIN (dup_origin);
6117c87b03e5Sespie 
6118c87b03e5Sespie 	      /* Find the first of the remaining fragments.  There must
6119c87b03e5Sespie 		 be at least one -- the current block.  */
6120c87b03e5Sespie 	      while (! TREE_ASM_WRITTEN (new_origin))
6121c87b03e5Sespie 		new_origin = BLOCK_FRAGMENT_CHAIN (new_origin);
6122c87b03e5Sespie 	      BLOCK_FRAGMENT_ORIGIN (new_origin) = NULL_TREE;
6123c87b03e5Sespie 	    }
6124c87b03e5Sespie 	}
6125c87b03e5Sespie       else if (! dup_origin)
6126c87b03e5Sespie 	new_origin = block;
6127c87b03e5Sespie 
6128c87b03e5Sespie       /* Re-root the rest of the fragments to the new origin.  In the
6129c87b03e5Sespie 	 case that DUP_ORIGIN was null, that means BLOCK was the origin
6130c87b03e5Sespie 	 of a chain of fragments and we want to remove those fragments
6131c87b03e5Sespie 	 that didn't make it to the output.  */
6132c87b03e5Sespie       if (new_origin)
6133c87b03e5Sespie 	{
6134c87b03e5Sespie 	  tree *pp = &BLOCK_FRAGMENT_CHAIN (new_origin);
6135c87b03e5Sespie 	  tree chain = *pp;
6136c87b03e5Sespie 
6137c87b03e5Sespie 	  while (chain)
6138c87b03e5Sespie 	    {
6139c87b03e5Sespie 	      if (TREE_ASM_WRITTEN (chain))
6140c87b03e5Sespie 		{
6141c87b03e5Sespie 		  BLOCK_FRAGMENT_ORIGIN (chain) = new_origin;
6142c87b03e5Sespie 		  *pp = chain;
6143c87b03e5Sespie 		  pp = &BLOCK_FRAGMENT_CHAIN (chain);
6144c87b03e5Sespie 		}
6145c87b03e5Sespie 	      chain = BLOCK_FRAGMENT_CHAIN (chain);
6146c87b03e5Sespie 	    }
6147c87b03e5Sespie 	  *pp = NULL_TREE;
6148c87b03e5Sespie 	}
6149c87b03e5Sespie 
6150c87b03e5Sespie       reorder_fix_fragments (BLOCK_SUBBLOCKS (block));
6151c87b03e5Sespie       block = BLOCK_CHAIN (block);
6152c87b03e5Sespie     }
6153c87b03e5Sespie }
6154c87b03e5Sespie 
6155c87b03e5Sespie /* Reverse the order of elements in the chain T of blocks,
6156c87b03e5Sespie    and return the new head of the chain (old last element).  */
6157c87b03e5Sespie 
6158c87b03e5Sespie static tree
blocks_nreverse(t)6159c87b03e5Sespie blocks_nreverse (t)
6160c87b03e5Sespie      tree t;
6161c87b03e5Sespie {
6162c87b03e5Sespie   tree prev = 0, decl, next;
6163c87b03e5Sespie   for (decl = t; decl; decl = next)
6164c87b03e5Sespie     {
6165c87b03e5Sespie       next = BLOCK_CHAIN (decl);
6166c87b03e5Sespie       BLOCK_CHAIN (decl) = prev;
6167c87b03e5Sespie       prev = decl;
6168c87b03e5Sespie     }
6169c87b03e5Sespie   return prev;
6170c87b03e5Sespie }
6171c87b03e5Sespie 
6172c87b03e5Sespie /* Count the subblocks of the list starting with BLOCK.  If VECTOR is
6173c87b03e5Sespie    non-NULL, list them all into VECTOR, in a depth-first preorder
6174c87b03e5Sespie    traversal of the block tree.  Also clear TREE_ASM_WRITTEN in all
6175c87b03e5Sespie    blocks.  */
6176c87b03e5Sespie 
6177c87b03e5Sespie static int
all_blocks(block,vector)6178c87b03e5Sespie all_blocks (block, vector)
6179c87b03e5Sespie      tree block;
6180c87b03e5Sespie      tree *vector;
6181c87b03e5Sespie {
6182c87b03e5Sespie   int n_blocks = 0;
6183c87b03e5Sespie 
6184c87b03e5Sespie   while (block)
6185c87b03e5Sespie     {
6186c87b03e5Sespie       TREE_ASM_WRITTEN (block) = 0;
6187c87b03e5Sespie 
6188c87b03e5Sespie       /* Record this block.  */
6189c87b03e5Sespie       if (vector)
6190c87b03e5Sespie 	vector[n_blocks] = block;
6191c87b03e5Sespie 
6192c87b03e5Sespie       ++n_blocks;
6193c87b03e5Sespie 
6194c87b03e5Sespie       /* Record the subblocks, and their subblocks...  */
6195c87b03e5Sespie       n_blocks += all_blocks (BLOCK_SUBBLOCKS (block),
6196c87b03e5Sespie 			      vector ? vector + n_blocks : 0);
6197c87b03e5Sespie       block = BLOCK_CHAIN (block);
6198c87b03e5Sespie     }
6199c87b03e5Sespie 
6200c87b03e5Sespie   return n_blocks;
6201c87b03e5Sespie }
6202c87b03e5Sespie 
6203c87b03e5Sespie /* Return a vector containing all the blocks rooted at BLOCK.  The
6204c87b03e5Sespie    number of elements in the vector is stored in N_BLOCKS_P.  The
6205c87b03e5Sespie    vector is dynamically allocated; it is the caller's responsibility
6206c87b03e5Sespie    to call `free' on the pointer returned.  */
6207c87b03e5Sespie 
6208c87b03e5Sespie static tree *
get_block_vector(block,n_blocks_p)6209c87b03e5Sespie get_block_vector (block, n_blocks_p)
6210c87b03e5Sespie      tree block;
6211c87b03e5Sespie      int *n_blocks_p;
6212c87b03e5Sespie {
6213c87b03e5Sespie   tree *block_vector;
6214c87b03e5Sespie 
6215c87b03e5Sespie   *n_blocks_p = all_blocks (block, NULL);
6216c87b03e5Sespie   block_vector = (tree *) xmalloc (*n_blocks_p * sizeof (tree));
6217c87b03e5Sespie   all_blocks (block, block_vector);
6218c87b03e5Sespie 
6219c87b03e5Sespie   return block_vector;
6220c87b03e5Sespie }
6221c87b03e5Sespie 
6222c87b03e5Sespie static int next_block_index = 2;
6223c87b03e5Sespie 
6224c87b03e5Sespie /* Set BLOCK_NUMBER for all the blocks in FN.  */
6225c87b03e5Sespie 
6226c87b03e5Sespie void
number_blocks(fn)6227c87b03e5Sespie number_blocks (fn)
6228c87b03e5Sespie      tree fn;
6229c87b03e5Sespie {
6230c87b03e5Sespie   int i;
6231c87b03e5Sespie   int n_blocks;
6232c87b03e5Sespie   tree *block_vector;
6233c87b03e5Sespie 
6234c87b03e5Sespie   /* For SDB and XCOFF debugging output, we start numbering the blocks
6235c87b03e5Sespie      from 1 within each function, rather than keeping a running
6236c87b03e5Sespie      count.  */
6237c87b03e5Sespie #if defined (SDB_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
6238c87b03e5Sespie   if (write_symbols == SDB_DEBUG || write_symbols == XCOFF_DEBUG)
6239c87b03e5Sespie     next_block_index = 1;
6240c87b03e5Sespie #endif
6241c87b03e5Sespie 
6242c87b03e5Sespie   block_vector = get_block_vector (DECL_INITIAL (fn), &n_blocks);
6243c87b03e5Sespie 
6244c87b03e5Sespie   /* The top-level BLOCK isn't numbered at all.  */
6245c87b03e5Sespie   for (i = 1; i < n_blocks; ++i)
6246c87b03e5Sespie     /* We number the blocks from two.  */
6247c87b03e5Sespie     BLOCK_NUMBER (block_vector[i]) = next_block_index++;
6248c87b03e5Sespie 
6249c87b03e5Sespie   free (block_vector);
6250c87b03e5Sespie 
6251c87b03e5Sespie   return;
6252c87b03e5Sespie }
6253c87b03e5Sespie 
6254c87b03e5Sespie /* If VAR is present in a subblock of BLOCK, return the subblock.  */
6255c87b03e5Sespie 
6256c87b03e5Sespie tree
debug_find_var_in_block_tree(var,block)6257c87b03e5Sespie debug_find_var_in_block_tree (var, block)
6258c87b03e5Sespie      tree var;
6259c87b03e5Sespie      tree block;
6260c87b03e5Sespie {
6261c87b03e5Sespie   tree t;
6262c87b03e5Sespie 
6263c87b03e5Sespie   for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
6264c87b03e5Sespie     if (t == var)
6265c87b03e5Sespie       return block;
6266c87b03e5Sespie 
6267c87b03e5Sespie   for (t = BLOCK_SUBBLOCKS (block); t; t = TREE_CHAIN (t))
6268c87b03e5Sespie     {
6269c87b03e5Sespie       tree ret = debug_find_var_in_block_tree (var, t);
6270c87b03e5Sespie       if (ret)
6271c87b03e5Sespie 	return ret;
6272c87b03e5Sespie     }
6273c87b03e5Sespie 
6274c87b03e5Sespie   return NULL_TREE;
6275c87b03e5Sespie }
6276c87b03e5Sespie 
6277c87b03e5Sespie /* Allocate a function structure and reset its contents to the defaults.  */
6278c87b03e5Sespie 
6279c87b03e5Sespie static void
prepare_function_start()6280c87b03e5Sespie prepare_function_start ()
6281c87b03e5Sespie {
6282c87b03e5Sespie   cfun = (struct function *) ggc_alloc_cleared (sizeof (struct function));
6283c87b03e5Sespie 
6284c87b03e5Sespie   init_stmt_for_function ();
6285c87b03e5Sespie   init_eh_for_function ();
6286c87b03e5Sespie 
6287c87b03e5Sespie   cse_not_expected = ! optimize;
6288c87b03e5Sespie 
6289c87b03e5Sespie   /* Caller save not needed yet.  */
6290c87b03e5Sespie   caller_save_needed = 0;
6291c87b03e5Sespie 
6292c87b03e5Sespie   /* No stack slots have been made yet.  */
6293c87b03e5Sespie   stack_slot_list = 0;
6294c87b03e5Sespie 
6295c87b03e5Sespie   current_function_has_nonlocal_label = 0;
6296c87b03e5Sespie   current_function_has_nonlocal_goto = 0;
6297c87b03e5Sespie 
6298c87b03e5Sespie   /* There is no stack slot for handling nonlocal gotos.  */
6299c87b03e5Sespie   nonlocal_goto_handler_slots = 0;
6300c87b03e5Sespie   nonlocal_goto_stack_level = 0;
6301c87b03e5Sespie 
6302c87b03e5Sespie   /* No labels have been declared for nonlocal use.  */
6303c87b03e5Sespie   nonlocal_labels = 0;
6304c87b03e5Sespie   nonlocal_goto_handler_labels = 0;
6305c87b03e5Sespie 
6306c87b03e5Sespie   /* No function calls so far in this function.  */
6307c87b03e5Sespie   function_call_count = 0;
6308c87b03e5Sespie 
6309c87b03e5Sespie   /* No parm regs have been allocated.
6310c87b03e5Sespie      (This is important for output_inline_function.)  */
6311c87b03e5Sespie   max_parm_reg = LAST_VIRTUAL_REGISTER + 1;
6312c87b03e5Sespie 
6313c87b03e5Sespie   /* Initialize the RTL mechanism.  */
6314c87b03e5Sespie   init_emit ();
6315c87b03e5Sespie 
6316c87b03e5Sespie   /* Initialize the queue of pending postincrement and postdecrements,
6317c87b03e5Sespie      and some other info in expr.c.  */
6318c87b03e5Sespie   init_expr ();
6319c87b03e5Sespie 
6320c87b03e5Sespie   /* We haven't done register allocation yet.  */
6321c87b03e5Sespie   reg_renumber = 0;
6322c87b03e5Sespie 
6323c87b03e5Sespie   init_varasm_status (cfun);
6324c87b03e5Sespie 
6325c87b03e5Sespie   /* Clear out data used for inlining.  */
6326c87b03e5Sespie   cfun->inlinable = 0;
6327c87b03e5Sespie   cfun->original_decl_initial = 0;
6328c87b03e5Sespie   cfun->original_arg_vector = 0;
6329c87b03e5Sespie 
6330c87b03e5Sespie   cfun->stack_alignment_needed = STACK_BOUNDARY;
6331c87b03e5Sespie   cfun->preferred_stack_boundary = STACK_BOUNDARY;
6332c87b03e5Sespie 
6333c87b03e5Sespie   /* Set if a call to setjmp is seen.  */
6334c87b03e5Sespie   current_function_calls_setjmp = 0;
6335c87b03e5Sespie 
6336c87b03e5Sespie   /* Set if a call to longjmp is seen.  */
6337c87b03e5Sespie   current_function_calls_longjmp = 0;
6338c87b03e5Sespie 
6339c87b03e5Sespie   current_function_calls_alloca = 0;
6340c87b03e5Sespie   current_function_contains_functions = 0;
6341c87b03e5Sespie   current_function_is_leaf = 0;
6342c87b03e5Sespie   current_function_nothrow = 0;
6343c87b03e5Sespie   current_function_sp_is_unchanging = 0;
6344c87b03e5Sespie   current_function_uses_only_leaf_regs = 0;
6345c87b03e5Sespie   current_function_has_computed_jump = 0;
6346c87b03e5Sespie   current_function_is_thunk = 0;
6347c87b03e5Sespie 
6348c87b03e5Sespie   current_function_returns_pcc_struct = 0;
6349c87b03e5Sespie   current_function_returns_struct = 0;
6350c87b03e5Sespie   current_function_epilogue_delay_list = 0;
6351c87b03e5Sespie   current_function_uses_const_pool = 0;
6352c87b03e5Sespie   current_function_uses_pic_offset_table = 0;
6353c87b03e5Sespie   current_function_cannot_inline = 0;
6354c87b03e5Sespie 
6355c87b03e5Sespie   /* We have not yet needed to make a label to jump to for tail-recursion.  */
6356c87b03e5Sespie   tail_recursion_label = 0;
6357c87b03e5Sespie 
6358c87b03e5Sespie   /* We haven't had a need to make a save area for ap yet.  */
6359c87b03e5Sespie   arg_pointer_save_area = 0;
6360c87b03e5Sespie 
6361c87b03e5Sespie   /* No stack slots allocated yet.  */
6362c87b03e5Sespie   frame_offset = 0;
6363c87b03e5Sespie 
6364c87b03e5Sespie   /* No SAVE_EXPRs in this function yet.  */
6365c87b03e5Sespie   save_expr_regs = 0;
6366c87b03e5Sespie 
6367c87b03e5Sespie   /* No RTL_EXPRs in this function yet.  */
6368c87b03e5Sespie   rtl_expr_chain = 0;
6369c87b03e5Sespie 
6370c87b03e5Sespie   /* Set up to allocate temporaries.  */
6371c87b03e5Sespie   init_temp_slots ();
6372c87b03e5Sespie 
6373c87b03e5Sespie   /* Indicate that we need to distinguish between the return value of the
6374c87b03e5Sespie      present function and the return value of a function being called.  */
6375c87b03e5Sespie   rtx_equal_function_value_matters = 1;
6376c87b03e5Sespie 
6377c87b03e5Sespie   /* Indicate that we have not instantiated virtual registers yet.  */
6378c87b03e5Sespie   virtuals_instantiated = 0;
6379c87b03e5Sespie 
6380c87b03e5Sespie   /* Indicate that we want CONCATs now.  */
6381c87b03e5Sespie   generating_concat_p = 1;
6382c87b03e5Sespie 
6383c87b03e5Sespie   /* Indicate we have no need of a frame pointer yet.  */
6384c87b03e5Sespie   frame_pointer_needed = 0;
6385c87b03e5Sespie 
6386c87b03e5Sespie   /* By default assume not stdarg.  */
6387c87b03e5Sespie   current_function_stdarg = 0;
6388c87b03e5Sespie 
6389c87b03e5Sespie   /* We haven't made any trampolines for this function yet.  */
6390c87b03e5Sespie   trampoline_list = 0;
6391c87b03e5Sespie 
6392c87b03e5Sespie   init_pending_stack_adjust ();
6393c87b03e5Sespie   inhibit_defer_pop = 0;
6394c87b03e5Sespie 
6395c87b03e5Sespie   current_function_outgoing_args_size = 0;
6396c87b03e5Sespie 
6397c87b03e5Sespie   current_function_funcdef_no = funcdef_no++;
6398c87b03e5Sespie 
6399c87b03e5Sespie   cfun->arc_profile = profile_arc_flag || flag_test_coverage;
6400c87b03e5Sespie 
6401c87b03e5Sespie   cfun->arc_profile = profile_arc_flag || flag_test_coverage;
6402c87b03e5Sespie 
6403c87b03e5Sespie   cfun->function_frequency = FUNCTION_FREQUENCY_NORMAL;
6404c87b03e5Sespie 
6405c87b03e5Sespie   cfun->max_jumptable_ents = 0;
6406c87b03e5Sespie 
6407c87b03e5Sespie   (*lang_hooks.function.init) (cfun);
6408c87b03e5Sespie   if (init_machine_status)
6409c87b03e5Sespie     cfun->machine = (*init_machine_status) ();
6410c87b03e5Sespie }
6411c87b03e5Sespie 
6412c87b03e5Sespie /* Initialize the rtl expansion mechanism so that we can do simple things
6413c87b03e5Sespie    like generate sequences.  This is used to provide a context during global
6414c87b03e5Sespie    initialization of some passes.  */
6415c87b03e5Sespie void
init_dummy_function_start()6416c87b03e5Sespie init_dummy_function_start ()
6417c87b03e5Sespie {
6418c87b03e5Sespie   prepare_function_start ();
6419c87b03e5Sespie }
6420c87b03e5Sespie 
6421c87b03e5Sespie /* Generate RTL for the start of the function SUBR (a FUNCTION_DECL tree node)
6422c87b03e5Sespie    and initialize static variables for generating RTL for the statements
6423c87b03e5Sespie    of the function.  */
6424c87b03e5Sespie 
6425c87b03e5Sespie void
init_function_start(subr,filename,line)6426c87b03e5Sespie init_function_start (subr, filename, line)
6427c87b03e5Sespie      tree subr;
6428c87b03e5Sespie      const char *filename;
6429c87b03e5Sespie      int line;
6430c87b03e5Sespie {
6431c87b03e5Sespie   prepare_function_start ();
6432c87b03e5Sespie 
6433c87b03e5Sespie   current_function_name = (*lang_hooks.decl_printable_name) (subr, 2);
6434c87b03e5Sespie   cfun->decl = subr;
6435c87b03e5Sespie 
6436c87b03e5Sespie   /* Nonzero if this is a nested function that uses a static chain.  */
6437c87b03e5Sespie 
6438c87b03e5Sespie   current_function_needs_context
6439c87b03e5Sespie     = (decl_function_context (current_function_decl) != 0
6440c87b03e5Sespie        && ! DECL_NO_STATIC_CHAIN (current_function_decl));
6441c87b03e5Sespie 
6442c87b03e5Sespie   /* Within function body, compute a type's size as soon it is laid out.  */
6443c87b03e5Sespie   immediate_size_expand++;
6444c87b03e5Sespie 
6445c87b03e5Sespie   /* Prevent ever trying to delete the first instruction of a function.
6446c87b03e5Sespie      Also tell final how to output a linenum before the function prologue.
6447c87b03e5Sespie      Note linenums could be missing, e.g. when compiling a Java .class file.  */
6448c87b03e5Sespie   if (line > 0)
6449c87b03e5Sespie     emit_line_note (filename, line);
6450c87b03e5Sespie 
6451c87b03e5Sespie   /* Make sure first insn is a note even if we don't want linenums.
6452c87b03e5Sespie      This makes sure the first insn will never be deleted.
6453c87b03e5Sespie      Also, final expects a note to appear there.  */
6454c87b03e5Sespie   emit_note (NULL, NOTE_INSN_DELETED);
6455c87b03e5Sespie 
6456c87b03e5Sespie   /* Set flags used by final.c.  */
6457c87b03e5Sespie   if (aggregate_value_p (DECL_RESULT (subr)))
6458c87b03e5Sespie     {
6459c87b03e5Sespie #ifdef PCC_STATIC_STRUCT_RETURN
6460c87b03e5Sespie       current_function_returns_pcc_struct = 1;
6461c87b03e5Sespie #endif
6462c87b03e5Sespie       current_function_returns_struct = 1;
6463c87b03e5Sespie     }
6464c87b03e5Sespie 
6465c87b03e5Sespie   /* Warn if this value is an aggregate type,
6466c87b03e5Sespie      regardless of which calling convention we are using for it.  */
6467c87b03e5Sespie   if (warn_aggregate_return
6468c87b03e5Sespie       && AGGREGATE_TYPE_P (TREE_TYPE (DECL_RESULT (subr))))
6469c87b03e5Sespie     warning ("function returns an aggregate");
6470c87b03e5Sespie 
6471c87b03e5Sespie   current_function_returns_pointer
6472c87b03e5Sespie     = POINTER_TYPE_P (TREE_TYPE (DECL_RESULT (subr)));
6473c87b03e5Sespie }
6474c87b03e5Sespie 
6475c87b03e5Sespie /* Make sure all values used by the optimization passes have sane
6476c87b03e5Sespie    defaults.  */
6477c87b03e5Sespie void
init_function_for_compilation()6478c87b03e5Sespie init_function_for_compilation ()
6479c87b03e5Sespie {
6480c87b03e5Sespie   reg_renumber = 0;
6481c87b03e5Sespie 
6482c87b03e5Sespie   /* No prologue/epilogue insns yet.  */
6483c87b03e5Sespie   VARRAY_GROW (prologue, 0);
6484c87b03e5Sespie   VARRAY_GROW (epilogue, 0);
6485c87b03e5Sespie   VARRAY_GROW (sibcall_epilogue, 0);
6486c87b03e5Sespie }
6487c87b03e5Sespie 
6488c87b03e5Sespie /* Expand a call to __main at the beginning of a possible main function.  */
6489c87b03e5Sespie 
6490c87b03e5Sespie #if defined(INIT_SECTION_ASM_OP) && !defined(INVOKE__main)
6491c87b03e5Sespie #undef HAS_INIT_SECTION
6492c87b03e5Sespie #define HAS_INIT_SECTION
6493c87b03e5Sespie #endif
6494c87b03e5Sespie 
6495c87b03e5Sespie void
expand_main_function()6496c87b03e5Sespie expand_main_function ()
6497c87b03e5Sespie {
6498c87b03e5Sespie #ifdef FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN
6499c87b03e5Sespie   if (FORCE_PREFERRED_STACK_BOUNDARY_IN_MAIN)
6500c87b03e5Sespie     {
6501c87b03e5Sespie       int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
6502c87b03e5Sespie       rtx tmp, seq;
6503c87b03e5Sespie 
6504c87b03e5Sespie       start_sequence ();
6505c87b03e5Sespie       /* Forcibly align the stack.  */
6506c87b03e5Sespie #ifdef STACK_GROWS_DOWNWARD
6507c87b03e5Sespie       tmp = expand_simple_binop (Pmode, AND, stack_pointer_rtx, GEN_INT(-align),
6508c87b03e5Sespie 				 stack_pointer_rtx, 1, OPTAB_WIDEN);
6509c87b03e5Sespie #else
6510c87b03e5Sespie       tmp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
6511c87b03e5Sespie 				 GEN_INT (align - 1), NULL_RTX, 1, OPTAB_WIDEN);
6512c87b03e5Sespie       tmp = expand_simple_binop (Pmode, AND, tmp, GEN_INT (-align),
6513c87b03e5Sespie 				 stack_pointer_rtx, 1, OPTAB_WIDEN);
6514c87b03e5Sespie #endif
6515c87b03e5Sespie       if (tmp != stack_pointer_rtx)
6516c87b03e5Sespie 	emit_move_insn (stack_pointer_rtx, tmp);
6517c87b03e5Sespie 
6518c87b03e5Sespie       /* Enlist allocate_dynamic_stack_space to pick up the pieces.  */
6519c87b03e5Sespie       tmp = force_reg (Pmode, const0_rtx);
6520c87b03e5Sespie       allocate_dynamic_stack_space (tmp, NULL_RTX, BIGGEST_ALIGNMENT);
6521c87b03e5Sespie       seq = get_insns ();
6522c87b03e5Sespie       end_sequence ();
6523c87b03e5Sespie 
6524c87b03e5Sespie       for (tmp = get_last_insn (); tmp; tmp = PREV_INSN (tmp))
6525c87b03e5Sespie 	if (NOTE_P (tmp) && NOTE_LINE_NUMBER (tmp) == NOTE_INSN_FUNCTION_BEG)
6526c87b03e5Sespie 	  break;
6527c87b03e5Sespie       if (tmp)
6528c87b03e5Sespie 	emit_insn_before (seq, tmp);
6529c87b03e5Sespie       else
6530c87b03e5Sespie 	emit_insn (seq);
6531c87b03e5Sespie     }
6532c87b03e5Sespie #endif
6533c87b03e5Sespie 
6534c87b03e5Sespie #ifndef HAS_INIT_SECTION
6535c87b03e5Sespie   emit_library_call (gen_rtx_SYMBOL_REF (Pmode, NAME__MAIN), LCT_NORMAL,
6536c87b03e5Sespie 		     VOIDmode, 0);
6537c87b03e5Sespie #endif
6538c87b03e5Sespie }
6539c87b03e5Sespie 
6540c87b03e5Sespie /* The PENDING_SIZES represent the sizes of variable-sized types.
6541c87b03e5Sespie    Create RTL for the various sizes now (using temporary variables),
6542c87b03e5Sespie    so that we can refer to the sizes from the RTL we are generating
6543c87b03e5Sespie    for the current function.  The PENDING_SIZES are a TREE_LIST.  The
6544c87b03e5Sespie    TREE_VALUE of each node is a SAVE_EXPR.  */
6545c87b03e5Sespie 
6546c87b03e5Sespie void
expand_pending_sizes(pending_sizes)6547c87b03e5Sespie expand_pending_sizes (pending_sizes)
6548c87b03e5Sespie      tree pending_sizes;
6549c87b03e5Sespie {
6550c87b03e5Sespie   tree tem;
6551c87b03e5Sespie 
6552c87b03e5Sespie   /* Evaluate now the sizes of any types declared among the arguments.  */
6553c87b03e5Sespie   for (tem = pending_sizes; tem; tem = TREE_CHAIN (tem))
6554c87b03e5Sespie     {
6555c87b03e5Sespie       expand_expr (TREE_VALUE (tem), const0_rtx, VOIDmode, 0);
6556c87b03e5Sespie       /* Flush the queue in case this parameter declaration has
6557c87b03e5Sespie 	 side-effects.  */
6558c87b03e5Sespie       emit_queue ();
6559c87b03e5Sespie     }
6560c87b03e5Sespie }
6561c87b03e5Sespie 
6562c87b03e5Sespie /* Start the RTL for a new function, and set variables used for
6563c87b03e5Sespie    emitting RTL.
6564c87b03e5Sespie    SUBR is the FUNCTION_DECL node.
6565c87b03e5Sespie    PARMS_HAVE_CLEANUPS is nonzero if there are cleanups associated with
6566c87b03e5Sespie    the function's parameters, which must be run at any return statement.  */
6567c87b03e5Sespie 
6568c87b03e5Sespie void
expand_function_start(subr,parms_have_cleanups)6569c87b03e5Sespie expand_function_start (subr, parms_have_cleanups)
6570c87b03e5Sespie      tree subr;
6571c87b03e5Sespie      int parms_have_cleanups;
6572c87b03e5Sespie {
6573c87b03e5Sespie   tree tem;
6574c87b03e5Sespie   rtx last_ptr = NULL_RTX;
6575c87b03e5Sespie 
6576c87b03e5Sespie   /* Make sure volatile mem refs aren't considered
6577c87b03e5Sespie      valid operands of arithmetic insns.  */
6578c87b03e5Sespie   init_recog_no_volatile ();
6579c87b03e5Sespie 
6580c87b03e5Sespie   current_function_instrument_entry_exit
6581c87b03e5Sespie     = (flag_instrument_function_entry_exit
6582c87b03e5Sespie        && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6583c87b03e5Sespie 
6584c87b03e5Sespie   current_function_profile
6585c87b03e5Sespie     = (profile_flag
6586c87b03e5Sespie        && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (subr));
6587c87b03e5Sespie 
6588c87b03e5Sespie   current_function_limit_stack
6589c87b03e5Sespie     = (stack_limit_rtx != NULL_RTX && ! DECL_NO_LIMIT_STACK (subr));
6590c87b03e5Sespie 
6591c87b03e5Sespie   /* If function gets a static chain arg, store it in the stack frame.
6592c87b03e5Sespie      Do this first, so it gets the first stack slot offset.  */
6593c87b03e5Sespie   if (current_function_needs_context)
6594c87b03e5Sespie     {
6595c87b03e5Sespie       last_ptr = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
6596c87b03e5Sespie 
6597c87b03e5Sespie       /* Delay copying static chain if it is not a register to avoid
6598c87b03e5Sespie 	 conflicts with regs used for parameters.  */
6599c87b03e5Sespie       if (! SMALL_REGISTER_CLASSES
6600c87b03e5Sespie 	  || GET_CODE (static_chain_incoming_rtx) == REG)
6601c87b03e5Sespie 	emit_move_insn (last_ptr, static_chain_incoming_rtx);
6602c87b03e5Sespie     }
6603c87b03e5Sespie 
6604c87b03e5Sespie   /* If the parameters of this function need cleaning up, get a label
6605c87b03e5Sespie      for the beginning of the code which executes those cleanups.  This must
6606c87b03e5Sespie      be done before doing anything with return_label.  */
6607c87b03e5Sespie   if (parms_have_cleanups)
6608c87b03e5Sespie     cleanup_label = gen_label_rtx ();
6609c87b03e5Sespie   else
6610c87b03e5Sespie     cleanup_label = 0;
6611c87b03e5Sespie 
6612c87b03e5Sespie   /* Make the label for return statements to jump to.  Do not special
6613c87b03e5Sespie      case machines with special return instructions -- they will be
6614c87b03e5Sespie      handled later during jump, ifcvt, or epilogue creation.  */
6615c87b03e5Sespie   return_label = gen_label_rtx ();
6616c87b03e5Sespie 
6617c87b03e5Sespie   /* Initialize rtx used to return the value.  */
6618c87b03e5Sespie   /* Do this before assign_parms so that we copy the struct value address
6619c87b03e5Sespie      before any library calls that assign parms might generate.  */
6620c87b03e5Sespie 
6621c87b03e5Sespie   /* Decide whether to return the value in memory or in a register.  */
6622c87b03e5Sespie   if (aggregate_value_p (DECL_RESULT (subr)))
6623c87b03e5Sespie     {
6624c87b03e5Sespie       /* Returning something that won't go in a register.  */
6625c87b03e5Sespie       rtx value_address = 0;
6626c87b03e5Sespie 
6627c87b03e5Sespie #ifdef PCC_STATIC_STRUCT_RETURN
6628c87b03e5Sespie       if (current_function_returns_pcc_struct)
6629c87b03e5Sespie 	{
6630c87b03e5Sespie 	  int size = int_size_in_bytes (TREE_TYPE (DECL_RESULT (subr)));
6631c87b03e5Sespie 	  value_address = assemble_static_space (size);
6632c87b03e5Sespie 	}
6633c87b03e5Sespie       else
6634c87b03e5Sespie #endif
6635c87b03e5Sespie 	{
6636c87b03e5Sespie 	  /* Expect to be passed the address of a place to store the value.
6637c87b03e5Sespie 	     If it is passed as an argument, assign_parms will take care of
6638c87b03e5Sespie 	     it.  */
6639c87b03e5Sespie 	  if (struct_value_incoming_rtx)
6640c87b03e5Sespie 	    {
6641c87b03e5Sespie 	      value_address = gen_reg_rtx (Pmode);
6642c87b03e5Sespie 	      emit_move_insn (value_address, struct_value_incoming_rtx);
6643c87b03e5Sespie 	    }
6644c87b03e5Sespie 	}
6645c87b03e5Sespie       if (value_address)
6646c87b03e5Sespie 	{
6647c87b03e5Sespie 	  rtx x = gen_rtx_MEM (DECL_MODE (DECL_RESULT (subr)), value_address);
6648c87b03e5Sespie 	  set_mem_attributes (x, DECL_RESULT (subr), 1);
6649c87b03e5Sespie 	  SET_DECL_RTL (DECL_RESULT (subr), x);
6650c87b03e5Sespie 	}
6651c87b03e5Sespie     }
6652c87b03e5Sespie   else if (DECL_MODE (DECL_RESULT (subr)) == VOIDmode)
6653c87b03e5Sespie     /* If return mode is void, this decl rtl should not be used.  */
6654c87b03e5Sespie     SET_DECL_RTL (DECL_RESULT (subr), NULL_RTX);
6655c87b03e5Sespie   else
6656c87b03e5Sespie     {
6657c87b03e5Sespie       /* Compute the return values into a pseudo reg, which we will copy
6658c87b03e5Sespie 	 into the true return register after the cleanups are done.  */
6659c87b03e5Sespie 
6660c87b03e5Sespie       /* In order to figure out what mode to use for the pseudo, we
6661c87b03e5Sespie 	 figure out what the mode of the eventual return register will
6662c87b03e5Sespie 	 actually be, and use that.  */
6663c87b03e5Sespie       rtx hard_reg
6664c87b03e5Sespie 	= hard_function_value (TREE_TYPE (DECL_RESULT (subr)),
6665c87b03e5Sespie 			       subr, 1);
6666c87b03e5Sespie 
6667c87b03e5Sespie       /* Structures that are returned in registers are not aggregate_value_p,
6668c87b03e5Sespie 	 so we may see a PARALLEL or a REG.  */
6669c87b03e5Sespie       if (REG_P (hard_reg))
6670c87b03e5Sespie 	SET_DECL_RTL (DECL_RESULT (subr), gen_reg_rtx (GET_MODE (hard_reg)));
6671c87b03e5Sespie       else if (GET_CODE (hard_reg) == PARALLEL)
6672c87b03e5Sespie 	SET_DECL_RTL (DECL_RESULT (subr), gen_group_rtx (hard_reg));
6673c87b03e5Sespie       else
6674c87b03e5Sespie 	abort ();
6675c87b03e5Sespie 
6676c87b03e5Sespie       /* Set DECL_REGISTER flag so that expand_function_end will copy the
6677c87b03e5Sespie 	 result to the real return register(s).  */
6678c87b03e5Sespie       DECL_REGISTER (DECL_RESULT (subr)) = 1;
6679c87b03e5Sespie     }
6680c87b03e5Sespie 
6681c87b03e5Sespie   /* Initialize rtx for parameters and local variables.
6682c87b03e5Sespie      In some cases this requires emitting insns.  */
6683c87b03e5Sespie 
6684c87b03e5Sespie   assign_parms (subr);
6685c87b03e5Sespie 
6686c87b03e5Sespie   /* Copy the static chain now if it wasn't a register.  The delay is to
6687c87b03e5Sespie      avoid conflicts with the parameter passing registers.  */
6688c87b03e5Sespie 
6689c87b03e5Sespie   if (SMALL_REGISTER_CLASSES && current_function_needs_context)
6690c87b03e5Sespie     if (GET_CODE (static_chain_incoming_rtx) != REG)
6691c87b03e5Sespie       emit_move_insn (last_ptr, static_chain_incoming_rtx);
6692c87b03e5Sespie 
6693c87b03e5Sespie   /* The following was moved from init_function_start.
6694c87b03e5Sespie      The move is supposed to make sdb output more accurate.  */
6695c87b03e5Sespie   /* Indicate the beginning of the function body,
6696c87b03e5Sespie      as opposed to parm setup.  */
6697c87b03e5Sespie   emit_note (NULL, NOTE_INSN_FUNCTION_BEG);
6698c87b03e5Sespie 
6699c87b03e5Sespie   if (GET_CODE (get_last_insn ()) != NOTE)
6700c87b03e5Sespie     emit_note (NULL, NOTE_INSN_DELETED);
6701c87b03e5Sespie   parm_birth_insn = get_last_insn ();
6702c87b03e5Sespie 
6703c87b03e5Sespie   context_display = 0;
6704c87b03e5Sespie   if (current_function_needs_context)
6705c87b03e5Sespie     {
6706c87b03e5Sespie       /* Fetch static chain values for containing functions.  */
6707c87b03e5Sespie       tem = decl_function_context (current_function_decl);
6708c87b03e5Sespie       /* Copy the static chain pointer into a pseudo.  If we have
6709c87b03e5Sespie 	 small register classes, copy the value from memory if
6710c87b03e5Sespie 	 static_chain_incoming_rtx is a REG.  */
6711c87b03e5Sespie       if (tem)
6712c87b03e5Sespie 	{
6713c87b03e5Sespie 	  /* If the static chain originally came in a register, put it back
6714c87b03e5Sespie 	     there, then move it out in the next insn.  The reason for
6715c87b03e5Sespie 	     this peculiar code is to satisfy function integration.  */
6716c87b03e5Sespie 	  if (SMALL_REGISTER_CLASSES
6717c87b03e5Sespie 	      && GET_CODE (static_chain_incoming_rtx) == REG)
6718c87b03e5Sespie 	    emit_move_insn (static_chain_incoming_rtx, last_ptr);
6719c87b03e5Sespie 	  last_ptr = copy_to_reg (static_chain_incoming_rtx);
6720c87b03e5Sespie 	}
6721c87b03e5Sespie 
6722c87b03e5Sespie       while (tem)
6723c87b03e5Sespie 	{
6724c87b03e5Sespie 	  tree rtlexp = make_node (RTL_EXPR);
6725c87b03e5Sespie 
6726c87b03e5Sespie 	  RTL_EXPR_RTL (rtlexp) = last_ptr;
6727c87b03e5Sespie 	  context_display = tree_cons (tem, rtlexp, context_display);
6728c87b03e5Sespie 	  tem = decl_function_context (tem);
6729c87b03e5Sespie 	  if (tem == 0)
6730c87b03e5Sespie 	    break;
6731c87b03e5Sespie 	  /* Chain thru stack frames, assuming pointer to next lexical frame
6732c87b03e5Sespie 	     is found at the place we always store it.  */
6733c87b03e5Sespie #ifdef FRAME_GROWS_DOWNWARD
6734c87b03e5Sespie 	  last_ptr = plus_constant (last_ptr,
6735c87b03e5Sespie 				    -(HOST_WIDE_INT) GET_MODE_SIZE (Pmode));
6736c87b03e5Sespie #endif
6737c87b03e5Sespie 	  last_ptr = gen_rtx_MEM (Pmode, memory_address (Pmode, last_ptr));
6738c87b03e5Sespie 	  set_mem_alias_set (last_ptr, get_frame_alias_set ());
6739c87b03e5Sespie 	  last_ptr = copy_to_reg (last_ptr);
6740c87b03e5Sespie 
6741c87b03e5Sespie 	  /* If we are not optimizing, ensure that we know that this
6742c87b03e5Sespie 	     piece of context is live over the entire function.  */
6743c87b03e5Sespie 	  if (! optimize)
6744c87b03e5Sespie 	    save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, last_ptr,
6745c87b03e5Sespie 						save_expr_regs);
6746c87b03e5Sespie 	}
6747c87b03e5Sespie     }
6748c87b03e5Sespie 
6749c87b03e5Sespie   if (current_function_instrument_entry_exit)
6750c87b03e5Sespie     {
6751c87b03e5Sespie       rtx fun = DECL_RTL (current_function_decl);
6752c87b03e5Sespie       if (GET_CODE (fun) == MEM)
6753c87b03e5Sespie 	fun = XEXP (fun, 0);
6754c87b03e5Sespie       else
6755c87b03e5Sespie 	abort ();
6756c87b03e5Sespie       emit_library_call (profile_function_entry_libfunc, LCT_NORMAL, VOIDmode,
6757c87b03e5Sespie 			 2, fun, Pmode,
6758c87b03e5Sespie 			 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
6759c87b03e5Sespie 						     0,
6760c87b03e5Sespie 						     hard_frame_pointer_rtx),
6761c87b03e5Sespie 			 Pmode);
6762c87b03e5Sespie     }
6763c87b03e5Sespie 
6764c87b03e5Sespie   if (current_function_profile)
6765c87b03e5Sespie     {
6766c87b03e5Sespie #ifdef PROFILE_HOOK
6767c87b03e5Sespie       PROFILE_HOOK (current_function_funcdef_no);
6768c87b03e5Sespie #endif
6769c87b03e5Sespie     }
6770c87b03e5Sespie 
6771c87b03e5Sespie   /* After the display initializations is where the tail-recursion label
6772c87b03e5Sespie      should go, if we end up needing one.   Ensure we have a NOTE here
6773c87b03e5Sespie      since some things (like trampolines) get placed before this.  */
6774c87b03e5Sespie   tail_recursion_reentry = emit_note (NULL, NOTE_INSN_DELETED);
6775c87b03e5Sespie 
6776c87b03e5Sespie   /* Evaluate now the sizes of any types declared among the arguments.  */
6777c87b03e5Sespie   expand_pending_sizes (nreverse (get_pending_sizes ()));
6778c87b03e5Sespie 
6779c87b03e5Sespie   /* Make sure there is a line number after the function entry setup code.  */
6780c87b03e5Sespie   force_next_line_note ();
6781c87b03e5Sespie }
6782c87b03e5Sespie 
6783c87b03e5Sespie /* Undo the effects of init_dummy_function_start.  */
6784c87b03e5Sespie void
expand_dummy_function_end()6785c87b03e5Sespie expand_dummy_function_end ()
6786c87b03e5Sespie {
6787c87b03e5Sespie   /* End any sequences that failed to be closed due to syntax errors.  */
6788c87b03e5Sespie   while (in_sequence_p ())
6789c87b03e5Sespie     end_sequence ();
6790c87b03e5Sespie 
6791c87b03e5Sespie   /* Outside function body, can't compute type's actual size
6792c87b03e5Sespie      until next function's body starts.  */
6793c87b03e5Sespie 
6794c87b03e5Sespie   free_after_parsing (cfun);
6795c87b03e5Sespie   free_after_compilation (cfun);
6796c87b03e5Sespie   cfun = 0;
6797c87b03e5Sespie }
6798c87b03e5Sespie 
6799c87b03e5Sespie /* Call DOIT for each hard register used as a return value from
6800c87b03e5Sespie    the current function.  */
6801c87b03e5Sespie 
6802c87b03e5Sespie void
6803c87b03e5Sespie diddle_return_value (doit, arg)
6804c87b03e5Sespie      void (*doit) PARAMS ((rtx, void *));
6805c87b03e5Sespie      void *arg;
6806c87b03e5Sespie {
6807c87b03e5Sespie   rtx outgoing = current_function_return_rtx;
6808c87b03e5Sespie 
6809c87b03e5Sespie   if (! outgoing)
6810c87b03e5Sespie     return;
6811c87b03e5Sespie 
6812c87b03e5Sespie   if (GET_CODE (outgoing) == REG)
6813c87b03e5Sespie     (*doit) (outgoing, arg);
6814c87b03e5Sespie   else if (GET_CODE (outgoing) == PARALLEL)
6815c87b03e5Sespie     {
6816c87b03e5Sespie       int i;
6817c87b03e5Sespie 
6818c87b03e5Sespie       for (i = 0; i < XVECLEN (outgoing, 0); i++)
6819c87b03e5Sespie 	{
6820c87b03e5Sespie 	  rtx x = XEXP (XVECEXP (outgoing, 0, i), 0);
6821c87b03e5Sespie 
6822c87b03e5Sespie 	  if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6823c87b03e5Sespie 	    (*doit) (x, arg);
6824c87b03e5Sespie 	}
6825c87b03e5Sespie     }
6826c87b03e5Sespie }
6827c87b03e5Sespie 
6828c87b03e5Sespie static void
do_clobber_return_reg(reg,arg)6829c87b03e5Sespie do_clobber_return_reg (reg, arg)
6830c87b03e5Sespie      rtx reg;
6831c87b03e5Sespie      void *arg ATTRIBUTE_UNUSED;
6832c87b03e5Sespie {
6833c87b03e5Sespie   emit_insn (gen_rtx_CLOBBER (VOIDmode, reg));
6834c87b03e5Sespie }
6835c87b03e5Sespie 
6836c87b03e5Sespie void
clobber_return_register()6837c87b03e5Sespie clobber_return_register ()
6838c87b03e5Sespie {
6839c87b03e5Sespie   diddle_return_value (do_clobber_return_reg, NULL);
6840c87b03e5Sespie 
6841c87b03e5Sespie   /* In case we do use pseudo to return value, clobber it too.  */
6842c87b03e5Sespie   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
6843c87b03e5Sespie     {
6844c87b03e5Sespie       tree decl_result = DECL_RESULT (current_function_decl);
6845c87b03e5Sespie       rtx decl_rtl = DECL_RTL (decl_result);
6846c87b03e5Sespie       if (REG_P (decl_rtl) && REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER)
6847c87b03e5Sespie 	{
6848c87b03e5Sespie 	  do_clobber_return_reg (decl_rtl, NULL);
6849c87b03e5Sespie 	}
6850c87b03e5Sespie     }
6851c87b03e5Sespie }
6852c87b03e5Sespie 
6853c87b03e5Sespie static void
do_use_return_reg(reg,arg)6854c87b03e5Sespie do_use_return_reg (reg, arg)
6855c87b03e5Sespie      rtx reg;
6856c87b03e5Sespie      void *arg ATTRIBUTE_UNUSED;
6857c87b03e5Sespie {
6858c87b03e5Sespie   emit_insn (gen_rtx_USE (VOIDmode, reg));
6859c87b03e5Sespie }
6860c87b03e5Sespie 
6861c87b03e5Sespie void
use_return_register()6862c87b03e5Sespie use_return_register ()
6863c87b03e5Sespie {
6864c87b03e5Sespie   diddle_return_value (do_use_return_reg, NULL);
6865c87b03e5Sespie }
6866c87b03e5Sespie 
6867c87b03e5Sespie static GTY(()) rtx initial_trampoline;
6868c87b03e5Sespie 
6869c87b03e5Sespie /* Generate RTL for the end of the current function.
6870c87b03e5Sespie    FILENAME and LINE are the current position in the source file.
6871c87b03e5Sespie 
6872c87b03e5Sespie    It is up to language-specific callers to do cleanups for parameters--
6873c87b03e5Sespie    or else, supply 1 for END_BINDINGS and we will call expand_end_bindings.  */
6874c87b03e5Sespie 
6875c87b03e5Sespie void
expand_function_end(filename,line,end_bindings)6876c87b03e5Sespie expand_function_end (filename, line, end_bindings)
6877c87b03e5Sespie      const char *filename;
6878c87b03e5Sespie      int line;
6879c87b03e5Sespie      int end_bindings;
6880c87b03e5Sespie {
6881c87b03e5Sespie   tree link;
6882c87b03e5Sespie   rtx clobber_after;
6883c87b03e5Sespie 
6884c87b03e5Sespie   finish_expr_for_function ();
6885c87b03e5Sespie 
6886c87b03e5Sespie   /* If arg_pointer_save_area was referenced only from a nested
6887c87b03e5Sespie      function, we will not have initialized it yet.  Do that now.  */
6888c87b03e5Sespie   if (arg_pointer_save_area && ! cfun->arg_pointer_save_area_init)
6889c87b03e5Sespie     get_arg_pointer_save_area (cfun);
6890c87b03e5Sespie 
6891c87b03e5Sespie #ifdef NON_SAVING_SETJMP
6892c87b03e5Sespie   /* Don't put any variables in registers if we call setjmp
6893c87b03e5Sespie      on a machine that fails to restore the registers.  */
6894c87b03e5Sespie   if (NON_SAVING_SETJMP && current_function_calls_setjmp)
6895c87b03e5Sespie     {
6896c87b03e5Sespie       if (DECL_INITIAL (current_function_decl) != error_mark_node)
6897c87b03e5Sespie 	setjmp_protect (DECL_INITIAL (current_function_decl));
6898c87b03e5Sespie 
6899c87b03e5Sespie       setjmp_protect_args ();
6900c87b03e5Sespie     }
6901c87b03e5Sespie #endif
6902c87b03e5Sespie 
6903c87b03e5Sespie   /* Initialize any trampolines required by this function.  */
6904c87b03e5Sespie   for (link = trampoline_list; link; link = TREE_CHAIN (link))
6905c87b03e5Sespie     {
6906c87b03e5Sespie       tree function = TREE_PURPOSE (link);
6907c87b03e5Sespie       rtx context ATTRIBUTE_UNUSED = lookup_static_chain (function);
6908c87b03e5Sespie       rtx tramp = RTL_EXPR_RTL (TREE_VALUE (link));
6909c87b03e5Sespie #ifdef TRAMPOLINE_TEMPLATE
6910c87b03e5Sespie       rtx blktramp;
6911c87b03e5Sespie #endif
6912c87b03e5Sespie       rtx seq;
6913c87b03e5Sespie 
6914c87b03e5Sespie #ifdef TRAMPOLINE_TEMPLATE
6915c87b03e5Sespie       /* First make sure this compilation has a template for
6916c87b03e5Sespie 	 initializing trampolines.  */
6917c87b03e5Sespie       if (initial_trampoline == 0)
6918c87b03e5Sespie 	{
6919c87b03e5Sespie 	  initial_trampoline
6920c87b03e5Sespie 	    = gen_rtx_MEM (BLKmode, assemble_trampoline_template ());
6921c87b03e5Sespie 	  set_mem_align (initial_trampoline, TRAMPOLINE_ALIGNMENT);
6922c87b03e5Sespie 	}
6923c87b03e5Sespie #endif
6924c87b03e5Sespie 
6925c87b03e5Sespie       /* Generate insns to initialize the trampoline.  */
6926c87b03e5Sespie       start_sequence ();
6927c87b03e5Sespie       tramp = round_trampoline_addr (XEXP (tramp, 0));
6928c87b03e5Sespie #ifdef TRAMPOLINE_TEMPLATE
6929c87b03e5Sespie       blktramp = replace_equiv_address (initial_trampoline, tramp);
6930c87b03e5Sespie       emit_block_move (blktramp, initial_trampoline,
6931c87b03e5Sespie 		       GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
6932c87b03e5Sespie #endif
693306dc6460Sespie       trampolines_created = 1;
6934c87b03e5Sespie       INITIALIZE_TRAMPOLINE (tramp, XEXP (DECL_RTL (function), 0), context);
6935c87b03e5Sespie       seq = get_insns ();
6936c87b03e5Sespie       end_sequence ();
6937c87b03e5Sespie 
6938c87b03e5Sespie       /* Put those insns at entry to the containing function (this one).  */
6939c87b03e5Sespie       emit_insn_before (seq, tail_recursion_reentry);
6940c87b03e5Sespie     }
6941c87b03e5Sespie 
6942c87b03e5Sespie   /* If we are doing stack checking and this function makes calls,
6943c87b03e5Sespie      do a stack probe at the start of the function to ensure we have enough
6944c87b03e5Sespie      space for another stack frame.  */
6945c87b03e5Sespie   if (flag_stack_check && ! STACK_CHECK_BUILTIN)
6946c87b03e5Sespie     {
6947c87b03e5Sespie       rtx insn, seq;
6948c87b03e5Sespie 
6949c87b03e5Sespie       for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
6950c87b03e5Sespie 	if (GET_CODE (insn) == CALL_INSN)
6951c87b03e5Sespie 	  {
6952c87b03e5Sespie 	    start_sequence ();
6953c87b03e5Sespie 	    probe_stack_range (STACK_CHECK_PROTECT,
6954c87b03e5Sespie 			       GEN_INT (STACK_CHECK_MAX_FRAME_SIZE));
6955c87b03e5Sespie 	    seq = get_insns ();
6956c87b03e5Sespie 	    end_sequence ();
6957c87b03e5Sespie 	    emit_insn_before (seq, tail_recursion_reentry);
6958c87b03e5Sespie 	    break;
6959c87b03e5Sespie 	  }
6960c87b03e5Sespie     }
6961c87b03e5Sespie 
6962c87b03e5Sespie   /* Warn about unused parms if extra warnings were specified.  */
6963c87b03e5Sespie   /* Either ``-W -Wunused'' or ``-Wunused-parameter'' enables this
6964c87b03e5Sespie      warning.  WARN_UNUSED_PARAMETER is negative when set by
6965c87b03e5Sespie      -Wunused.  */
6966c87b03e5Sespie   if (warn_unused_parameter > 0
6967c87b03e5Sespie       || (warn_unused_parameter < 0 && extra_warnings))
6968c87b03e5Sespie     {
6969c87b03e5Sespie       tree decl;
6970c87b03e5Sespie 
6971c87b03e5Sespie       for (decl = DECL_ARGUMENTS (current_function_decl);
6972c87b03e5Sespie 	   decl; decl = TREE_CHAIN (decl))
6973c87b03e5Sespie 	if (! TREE_USED (decl) && TREE_CODE (decl) == PARM_DECL
6974c87b03e5Sespie 	    && DECL_NAME (decl) && ! DECL_ARTIFICIAL (decl))
6975c87b03e5Sespie 	  warning_with_decl (decl, "unused parameter `%s'");
6976c87b03e5Sespie     }
6977c87b03e5Sespie 
6978c87b03e5Sespie   /* Delete handlers for nonlocal gotos if nothing uses them.  */
6979c87b03e5Sespie   if (nonlocal_goto_handler_slots != 0
6980c87b03e5Sespie       && ! current_function_has_nonlocal_label)
6981c87b03e5Sespie     delete_handlers ();
6982c87b03e5Sespie 
6983c87b03e5Sespie   /* End any sequences that failed to be closed due to syntax errors.  */
6984c87b03e5Sespie   while (in_sequence_p ())
6985c87b03e5Sespie     end_sequence ();
6986c87b03e5Sespie 
6987c87b03e5Sespie   /* Outside function body, can't compute type's actual size
6988c87b03e5Sespie      until next function's body starts.  */
6989c87b03e5Sespie   immediate_size_expand--;
6990c87b03e5Sespie 
6991c87b03e5Sespie   clear_pending_stack_adjust ();
6992c87b03e5Sespie   do_pending_stack_adjust ();
6993c87b03e5Sespie 
699406dc6460Sespie   /* ???  This is a kludge.  We want to ensure that instructions that
699506dc6460Sespie      may trap are not moved into the epilogue by scheduling, because
699606dc6460Sespie      we don't always emit unwind information for the epilogue.
699706dc6460Sespie      However, not all machine descriptions define a blockage insn, so
699806dc6460Sespie      emit an ASM_INPUT to act as one.  */
699906dc6460Sespie   if (flag_non_call_exceptions)
700006dc6460Sespie     emit_insn (gen_rtx_ASM_INPUT (VOIDmode, ""));
700106dc6460Sespie 
7002c87b03e5Sespie   /* Mark the end of the function body.
7003c87b03e5Sespie      If control reaches this insn, the function can drop through
7004c87b03e5Sespie      without returning a value.  */
7005c87b03e5Sespie   emit_note (NULL, NOTE_INSN_FUNCTION_END);
7006c87b03e5Sespie 
7007c87b03e5Sespie   /* Must mark the last line number note in the function, so that the test
7008c87b03e5Sespie      coverage code can avoid counting the last line twice.  This just tells
7009c87b03e5Sespie      the code to ignore the immediately following line note, since there
7010c87b03e5Sespie      already exists a copy of this note somewhere above.  This line number
7011c87b03e5Sespie      note is still needed for debugging though, so we can't delete it.  */
7012c87b03e5Sespie   if (flag_test_coverage)
7013c87b03e5Sespie     emit_note (NULL, NOTE_INSN_REPEATED_LINE_NUMBER);
7014c87b03e5Sespie 
7015c87b03e5Sespie   /* Output a linenumber for the end of the function.
7016c87b03e5Sespie      SDB depends on this.  */
7017c87b03e5Sespie   emit_line_note_force (filename, line);
7018c87b03e5Sespie 
7019c87b03e5Sespie   /* Before the return label (if any), clobber the return
7020c87b03e5Sespie      registers so that they are not propagated live to the rest of
7021c87b03e5Sespie      the function.  This can only happen with functions that drop
7022c87b03e5Sespie      through; if there had been a return statement, there would
7023c87b03e5Sespie      have either been a return rtx, or a jump to the return label.
7024c87b03e5Sespie 
7025c87b03e5Sespie      We delay actual code generation after the current_function_value_rtx
7026c87b03e5Sespie      is computed.  */
7027c87b03e5Sespie   clobber_after = get_last_insn ();
7028c87b03e5Sespie 
7029c87b03e5Sespie   /* Output the label for the actual return from the function,
7030c87b03e5Sespie      if one is expected.  This happens either because a function epilogue
7031c87b03e5Sespie      is used instead of a return instruction, or because a return was done
7032c87b03e5Sespie      with a goto in order to run local cleanups, or because of pcc-style
7033c87b03e5Sespie      structure returning.  */
7034c87b03e5Sespie   if (return_label)
7035c87b03e5Sespie     emit_label (return_label);
7036c87b03e5Sespie 
7037c87b03e5Sespie   /* C++ uses this.  */
7038c87b03e5Sespie   if (end_bindings)
7039c87b03e5Sespie     expand_end_bindings (0, 0, 0);
7040c87b03e5Sespie 
7041c87b03e5Sespie   if (current_function_instrument_entry_exit)
7042c87b03e5Sespie     {
7043c87b03e5Sespie       rtx fun = DECL_RTL (current_function_decl);
7044c87b03e5Sespie       if (GET_CODE (fun) == MEM)
7045c87b03e5Sespie 	fun = XEXP (fun, 0);
7046c87b03e5Sespie       else
7047c87b03e5Sespie 	abort ();
7048c87b03e5Sespie       emit_library_call (profile_function_exit_libfunc, LCT_NORMAL, VOIDmode,
7049c87b03e5Sespie 			 2, fun, Pmode,
7050c87b03e5Sespie 			 expand_builtin_return_addr (BUILT_IN_RETURN_ADDRESS,
7051c87b03e5Sespie 						     0,
7052c87b03e5Sespie 						     hard_frame_pointer_rtx),
7053c87b03e5Sespie 			 Pmode);
7054c87b03e5Sespie     }
7055c87b03e5Sespie 
7056c87b03e5Sespie   /* Let except.c know where it should emit the call to unregister
7057c87b03e5Sespie      the function context for sjlj exceptions.  */
7058c87b03e5Sespie   if (flag_exceptions && USING_SJLJ_EXCEPTIONS)
7059c87b03e5Sespie     sjlj_emit_function_exit_after (get_last_insn ());
7060c87b03e5Sespie 
7061c87b03e5Sespie   /* If we had calls to alloca, and this machine needs
7062c87b03e5Sespie      an accurate stack pointer to exit the function,
7063c87b03e5Sespie      insert some code to save and restore the stack pointer.  */
7064c87b03e5Sespie #ifdef EXIT_IGNORE_STACK
7065c87b03e5Sespie   if (! EXIT_IGNORE_STACK)
7066c87b03e5Sespie #endif
7067c87b03e5Sespie     if (current_function_calls_alloca)
7068c87b03e5Sespie       {
7069c87b03e5Sespie 	rtx tem = 0;
7070c87b03e5Sespie 
7071c87b03e5Sespie 	emit_stack_save (SAVE_FUNCTION, &tem, parm_birth_insn);
7072c87b03e5Sespie 	emit_stack_restore (SAVE_FUNCTION, tem, NULL_RTX);
7073c87b03e5Sespie       }
7074c87b03e5Sespie 
7075c87b03e5Sespie   /* If scalar return value was computed in a pseudo-reg, or was a named
7076c87b03e5Sespie      return value that got dumped to the stack, copy that to the hard
7077c87b03e5Sespie      return register.  */
7078c87b03e5Sespie   if (DECL_RTL_SET_P (DECL_RESULT (current_function_decl)))
7079c87b03e5Sespie     {
7080c87b03e5Sespie       tree decl_result = DECL_RESULT (current_function_decl);
7081c87b03e5Sespie       rtx decl_rtl = DECL_RTL (decl_result);
7082c87b03e5Sespie 
7083c87b03e5Sespie       if (REG_P (decl_rtl)
7084c87b03e5Sespie 	  ? REGNO (decl_rtl) >= FIRST_PSEUDO_REGISTER
7085c87b03e5Sespie 	  : DECL_REGISTER (decl_result))
7086c87b03e5Sespie 	{
7087c87b03e5Sespie 	  rtx real_decl_rtl = current_function_return_rtx;
7088c87b03e5Sespie 
7089c87b03e5Sespie 	  /* This should be set in assign_parms.  */
7090c87b03e5Sespie 	  if (! REG_FUNCTION_VALUE_P (real_decl_rtl))
7091c87b03e5Sespie 	    abort ();
7092c87b03e5Sespie 
7093c87b03e5Sespie 	  /* If this is a BLKmode structure being returned in registers,
7094c87b03e5Sespie 	     then use the mode computed in expand_return.  Note that if
7095c87b03e5Sespie 	     decl_rtl is memory, then its mode may have been changed,
7096c87b03e5Sespie 	     but that current_function_return_rtx has not.  */
7097c87b03e5Sespie 	  if (GET_MODE (real_decl_rtl) == BLKmode)
7098c87b03e5Sespie 	    PUT_MODE (real_decl_rtl, GET_MODE (decl_rtl));
7099c87b03e5Sespie 
7100c87b03e5Sespie 	  /* If a named return value dumped decl_return to memory, then
7101c87b03e5Sespie 	     we may need to re-do the PROMOTE_MODE signed/unsigned
7102c87b03e5Sespie 	     extension.  */
7103c87b03e5Sespie 	  if (GET_MODE (real_decl_rtl) != GET_MODE (decl_rtl))
7104c87b03e5Sespie 	    {
7105c87b03e5Sespie 	      int unsignedp = TREE_UNSIGNED (TREE_TYPE (decl_result));
7106c87b03e5Sespie 
7107c87b03e5Sespie #ifdef PROMOTE_FUNCTION_RETURN
7108c87b03e5Sespie 	      promote_mode (TREE_TYPE (decl_result), GET_MODE (decl_rtl),
7109c87b03e5Sespie 			    &unsignedp, 1);
7110c87b03e5Sespie #endif
7111c87b03e5Sespie 
7112c87b03e5Sespie 	      convert_move (real_decl_rtl, decl_rtl, unsignedp);
7113c87b03e5Sespie 	    }
7114c87b03e5Sespie 	  else if (GET_CODE (real_decl_rtl) == PARALLEL)
7115c87b03e5Sespie 	    {
7116c87b03e5Sespie 	      /* If expand_function_start has created a PARALLEL for decl_rtl,
7117c87b03e5Sespie 		 move the result to the real return registers.  Otherwise, do
7118c87b03e5Sespie 		 a group load from decl_rtl for a named return.  */
7119c87b03e5Sespie 	      if (GET_CODE (decl_rtl) == PARALLEL)
7120c87b03e5Sespie 		emit_group_move (real_decl_rtl, decl_rtl);
7121c87b03e5Sespie 	      else
7122c87b03e5Sespie 		emit_group_load (real_decl_rtl, decl_rtl,
7123c87b03e5Sespie 				 int_size_in_bytes (TREE_TYPE (decl_result)));
7124c87b03e5Sespie 	    }
7125c87b03e5Sespie 	  else
7126c87b03e5Sespie 	    emit_move_insn (real_decl_rtl, decl_rtl);
7127c87b03e5Sespie 	}
7128c87b03e5Sespie     }
7129c87b03e5Sespie 
7130c87b03e5Sespie   /* If returning a structure, arrange to return the address of the value
7131c87b03e5Sespie      in a place where debuggers expect to find it.
7132c87b03e5Sespie 
7133c87b03e5Sespie      If returning a structure PCC style,
7134c87b03e5Sespie      the caller also depends on this value.
7135c87b03e5Sespie      And current_function_returns_pcc_struct is not necessarily set.  */
7136c87b03e5Sespie   if (current_function_returns_struct
7137c87b03e5Sespie       || current_function_returns_pcc_struct)
7138c87b03e5Sespie     {
7139c87b03e5Sespie       rtx value_address
7140c87b03e5Sespie 	= XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
7141c87b03e5Sespie       tree type = TREE_TYPE (DECL_RESULT (current_function_decl));
7142c87b03e5Sespie #ifdef FUNCTION_OUTGOING_VALUE
7143c87b03e5Sespie       rtx outgoing
7144c87b03e5Sespie 	= FUNCTION_OUTGOING_VALUE (build_pointer_type (type),
7145c87b03e5Sespie 				   current_function_decl);
7146c87b03e5Sespie #else
7147c87b03e5Sespie       rtx outgoing
7148c87b03e5Sespie 	= FUNCTION_VALUE (build_pointer_type (type), current_function_decl);
7149c87b03e5Sespie #endif
7150c87b03e5Sespie 
7151c87b03e5Sespie       /* Mark this as a function return value so integrate will delete the
7152c87b03e5Sespie 	 assignment and USE below when inlining this function.  */
7153c87b03e5Sespie       REG_FUNCTION_VALUE_P (outgoing) = 1;
7154c87b03e5Sespie 
7155c87b03e5Sespie #ifdef POINTERS_EXTEND_UNSIGNED
7156c87b03e5Sespie       /* The address may be ptr_mode and OUTGOING may be Pmode.  */
7157c87b03e5Sespie       if (GET_MODE (outgoing) != GET_MODE (value_address))
7158c87b03e5Sespie 	value_address = convert_memory_address (GET_MODE (outgoing),
7159c87b03e5Sespie 						value_address);
7160c87b03e5Sespie #endif
7161c87b03e5Sespie 
7162c87b03e5Sespie       emit_move_insn (outgoing, value_address);
7163c87b03e5Sespie 
7164c87b03e5Sespie       /* Show return register used to hold result (in this case the address
7165c87b03e5Sespie 	 of the result.  */
7166c87b03e5Sespie       current_function_return_rtx = outgoing;
7167c87b03e5Sespie     }
7168c87b03e5Sespie 
7169c87b03e5Sespie   /* If this is an implementation of throw, do what's necessary to
7170c87b03e5Sespie      communicate between __builtin_eh_return and the epilogue.  */
7171c87b03e5Sespie   expand_eh_return ();
7172c87b03e5Sespie 
7173c87b03e5Sespie   /* Emit the actual code to clobber return register.  */
7174c87b03e5Sespie   {
7175c87b03e5Sespie     rtx seq, after;
7176c87b03e5Sespie 
7177c87b03e5Sespie     start_sequence ();
7178c87b03e5Sespie     clobber_return_register ();
7179c87b03e5Sespie     seq = get_insns ();
7180c87b03e5Sespie     end_sequence ();
7181c87b03e5Sespie 
7182c87b03e5Sespie     after = emit_insn_after (seq, clobber_after);
7183c87b03e5Sespie 
7184c87b03e5Sespie     if (clobber_after != after)
7185c87b03e5Sespie       cfun->x_clobber_return_insn = after;
7186c87b03e5Sespie   }
7187c87b03e5Sespie 
7188c87b03e5Sespie   /* ??? This should no longer be necessary since stupid is no longer with
7189c87b03e5Sespie      us, but there are some parts of the compiler (eg reload_combine, and
7190c87b03e5Sespie      sh mach_dep_reorg) that still try and compute their own lifetime info
7191c87b03e5Sespie      instead of using the general framework.  */
7192c87b03e5Sespie   use_return_register ();
7193c87b03e5Sespie 
7194c87b03e5Sespie   /* Fix up any gotos that jumped out to the outermost
7195c87b03e5Sespie      binding level of the function.
7196c87b03e5Sespie      Must follow emitting RETURN_LABEL.  */
7197c87b03e5Sespie 
7198c87b03e5Sespie   /* If you have any cleanups to do at this point,
7199c87b03e5Sespie      and they need to create temporary variables,
7200c87b03e5Sespie      then you will lose.  */
7201c87b03e5Sespie   expand_fixups (get_insns ());
7202c87b03e5Sespie }
7203c87b03e5Sespie 
7204c87b03e5Sespie rtx
get_arg_pointer_save_area(f)7205c87b03e5Sespie get_arg_pointer_save_area (f)
7206c87b03e5Sespie      struct function *f;
7207c87b03e5Sespie {
7208c87b03e5Sespie   rtx ret = f->x_arg_pointer_save_area;
7209c87b03e5Sespie 
7210c87b03e5Sespie   if (! ret)
7211c87b03e5Sespie     {
7212c87b03e5Sespie       ret = assign_stack_local_1 (Pmode, GET_MODE_SIZE (Pmode), 0, f);
7213c87b03e5Sespie       f->x_arg_pointer_save_area = ret;
7214c87b03e5Sespie     }
7215c87b03e5Sespie 
7216c87b03e5Sespie   if (f == cfun && ! f->arg_pointer_save_area_init)
7217c87b03e5Sespie     {
7218c87b03e5Sespie       rtx seq;
7219c87b03e5Sespie 
7220c87b03e5Sespie       /* Save the arg pointer at the beginning of the function.  The
7221c87b03e5Sespie 	 generated stack slot may not be a valid memory address, so we
7222c87b03e5Sespie 	 have to check it and fix it if necessary.  */
7223c87b03e5Sespie       start_sequence ();
7224c87b03e5Sespie       emit_move_insn (validize_mem (ret), virtual_incoming_args_rtx);
7225c87b03e5Sespie       seq = get_insns ();
7226c87b03e5Sespie       end_sequence ();
7227c87b03e5Sespie 
7228c87b03e5Sespie       push_topmost_sequence ();
7229c87b03e5Sespie       emit_insn_after (seq, get_insns ());
7230c87b03e5Sespie       pop_topmost_sequence ();
7231c87b03e5Sespie     }
7232c87b03e5Sespie 
7233c87b03e5Sespie   return ret;
7234c87b03e5Sespie }
7235c87b03e5Sespie 
7236c87b03e5Sespie /* Extend a vector that records the INSN_UIDs of INSNS
7237c87b03e5Sespie    (a list of one or more insns).  */
7238c87b03e5Sespie 
7239c87b03e5Sespie static void
record_insns(insns,vecp)7240c87b03e5Sespie record_insns (insns, vecp)
7241c87b03e5Sespie      rtx insns;
7242c87b03e5Sespie      varray_type *vecp;
7243c87b03e5Sespie {
7244c87b03e5Sespie   int i, len;
7245c87b03e5Sespie   rtx tmp;
7246c87b03e5Sespie 
7247c87b03e5Sespie   tmp = insns;
7248c87b03e5Sespie   len = 0;
7249c87b03e5Sespie   while (tmp != NULL_RTX)
7250c87b03e5Sespie     {
7251c87b03e5Sespie       len++;
7252c87b03e5Sespie       tmp = NEXT_INSN (tmp);
7253c87b03e5Sespie     }
7254c87b03e5Sespie 
7255c87b03e5Sespie   i = VARRAY_SIZE (*vecp);
7256c87b03e5Sespie   VARRAY_GROW (*vecp, i + len);
7257c87b03e5Sespie   tmp = insns;
7258c87b03e5Sespie   while (tmp != NULL_RTX)
7259c87b03e5Sespie     {
7260c87b03e5Sespie       VARRAY_INT (*vecp, i) = INSN_UID (tmp);
7261c87b03e5Sespie       i++;
7262c87b03e5Sespie       tmp = NEXT_INSN (tmp);
7263c87b03e5Sespie     }
7264c87b03e5Sespie }
7265c87b03e5Sespie 
7266c87b03e5Sespie /* Determine how many INSN_UIDs in VEC are part of INSN.  Because we can
7267c87b03e5Sespie    be running after reorg, SEQUENCE rtl is possible.  */
7268c87b03e5Sespie 
7269c87b03e5Sespie static int
contains(insn,vec)7270c87b03e5Sespie contains (insn, vec)
7271c87b03e5Sespie      rtx insn;
7272c87b03e5Sespie      varray_type vec;
7273c87b03e5Sespie {
7274c87b03e5Sespie   int i, j;
7275c87b03e5Sespie 
7276c87b03e5Sespie   if (GET_CODE (insn) == INSN
7277c87b03e5Sespie       && GET_CODE (PATTERN (insn)) == SEQUENCE)
7278c87b03e5Sespie     {
7279c87b03e5Sespie       int count = 0;
7280c87b03e5Sespie       for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
7281c87b03e5Sespie 	for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7282c87b03e5Sespie 	  if (INSN_UID (XVECEXP (PATTERN (insn), 0, i)) == VARRAY_INT (vec, j))
7283c87b03e5Sespie 	    count++;
7284c87b03e5Sespie       return count;
7285c87b03e5Sespie     }
7286c87b03e5Sespie   else
7287c87b03e5Sespie     {
7288c87b03e5Sespie       for (j = VARRAY_SIZE (vec) - 1; j >= 0; --j)
7289c87b03e5Sespie 	if (INSN_UID (insn) == VARRAY_INT (vec, j))
7290c87b03e5Sespie 	  return 1;
7291c87b03e5Sespie     }
7292c87b03e5Sespie   return 0;
7293c87b03e5Sespie }
7294c87b03e5Sespie 
7295c87b03e5Sespie int
prologue_epilogue_contains(insn)7296c87b03e5Sespie prologue_epilogue_contains (insn)
7297c87b03e5Sespie      rtx insn;
7298c87b03e5Sespie {
7299c87b03e5Sespie   if (contains (insn, prologue))
7300c87b03e5Sespie     return 1;
7301c87b03e5Sespie   if (contains (insn, epilogue))
7302c87b03e5Sespie     return 1;
7303c87b03e5Sespie   return 0;
7304c87b03e5Sespie }
7305c87b03e5Sespie 
7306c87b03e5Sespie int
sibcall_epilogue_contains(insn)7307c87b03e5Sespie sibcall_epilogue_contains (insn)
7308c87b03e5Sespie      rtx insn;
7309c87b03e5Sespie {
7310c87b03e5Sespie   if (sibcall_epilogue)
7311c87b03e5Sespie     return contains (insn, sibcall_epilogue);
7312c87b03e5Sespie   return 0;
7313c87b03e5Sespie }
7314c87b03e5Sespie 
7315c87b03e5Sespie #ifdef HAVE_return
7316c87b03e5Sespie /* Insert gen_return at the end of block BB.  This also means updating
7317c87b03e5Sespie    block_for_insn appropriately.  */
7318c87b03e5Sespie 
7319c87b03e5Sespie static void
emit_return_into_block(bb,line_note)7320c87b03e5Sespie emit_return_into_block (bb, line_note)
7321c87b03e5Sespie      basic_block bb;
7322c87b03e5Sespie      rtx line_note;
7323c87b03e5Sespie {
7324c87b03e5Sespie   rtx p, end;
7325c87b03e5Sespie 
7326c87b03e5Sespie   p = NEXT_INSN (bb->end);
7327c87b03e5Sespie   end = emit_jump_insn_after (gen_return (), bb->end);
7328c87b03e5Sespie   if (line_note)
7329c87b03e5Sespie     emit_line_note_after (NOTE_SOURCE_FILE (line_note),
7330c87b03e5Sespie 			  NOTE_LINE_NUMBER (line_note), PREV_INSN (bb->end));
7331c87b03e5Sespie }
7332c87b03e5Sespie #endif /* HAVE_return */
7333c87b03e5Sespie 
7334c87b03e5Sespie #if defined(HAVE_epilogue) && defined(INCOMING_RETURN_ADDR_RTX)
7335c87b03e5Sespie 
7336c87b03e5Sespie /* These functions convert the epilogue into a variant that does not modify the
7337c87b03e5Sespie    stack pointer.  This is used in cases where a function returns an object
7338c87b03e5Sespie    whose size is not known until it is computed.  The called function leaves the
7339c87b03e5Sespie    object on the stack, leaves the stack depressed, and returns a pointer to
7340c87b03e5Sespie    the object.
7341c87b03e5Sespie 
7342c87b03e5Sespie    What we need to do is track all modifications and references to the stack
7343c87b03e5Sespie    pointer, deleting the modifications and changing the references to point to
7344c87b03e5Sespie    the location the stack pointer would have pointed to had the modifications
7345c87b03e5Sespie    taken place.
7346c87b03e5Sespie 
7347c87b03e5Sespie    These functions need to be portable so we need to make as few assumptions
7348c87b03e5Sespie    about the epilogue as we can.  However, the epilogue basically contains
7349c87b03e5Sespie    three things: instructions to reset the stack pointer, instructions to
7350c87b03e5Sespie    reload registers, possibly including the frame pointer, and an
7351c87b03e5Sespie    instruction to return to the caller.
7352c87b03e5Sespie 
7353c87b03e5Sespie    If we can't be sure of what a relevant epilogue insn is doing, we abort.
7354c87b03e5Sespie    We also make no attempt to validate the insns we make since if they are
7355c87b03e5Sespie    invalid, we probably can't do anything valid.  The intent is that these
7356c87b03e5Sespie    routines get "smarter" as more and more machines start to use them and
7357c87b03e5Sespie    they try operating on different epilogues.
7358c87b03e5Sespie 
7359c87b03e5Sespie    We use the following structure to track what the part of the epilogue that
7360c87b03e5Sespie    we've already processed has done.  We keep two copies of the SP equivalence,
7361c87b03e5Sespie    one for use during the insn we are processing and one for use in the next
7362c87b03e5Sespie    insn.  The difference is because one part of a PARALLEL may adjust SP
7363c87b03e5Sespie    and the other may use it.  */
7364c87b03e5Sespie 
7365c87b03e5Sespie struct epi_info
7366c87b03e5Sespie {
7367c87b03e5Sespie   rtx sp_equiv_reg;		/* REG that SP is set from, perhaps SP.  */
7368c87b03e5Sespie   HOST_WIDE_INT sp_offset;	/* Offset from SP_EQUIV_REG of present SP.  */
7369c87b03e5Sespie   rtx new_sp_equiv_reg;		/* REG to be used at end of insn.  */
7370c87b03e5Sespie   HOST_WIDE_INT new_sp_offset;	/* Offset to be used at end of insn.  */
7371c87b03e5Sespie   rtx equiv_reg_src;		/* If nonzero, the value that SP_EQUIV_REG
7372c87b03e5Sespie 				   should be set to once we no longer need
7373c87b03e5Sespie 				   its value.  */
7374c87b03e5Sespie };
7375c87b03e5Sespie 
7376c87b03e5Sespie static void handle_epilogue_set PARAMS ((rtx, struct epi_info *));
7377c87b03e5Sespie static void emit_equiv_load PARAMS ((struct epi_info *));
7378c87b03e5Sespie 
7379c87b03e5Sespie /* Modify INSN, a list of one or more insns that is part of the epilogue, to
7380c87b03e5Sespie    no modifications to the stack pointer.  Return the new list of insns.  */
7381c87b03e5Sespie 
7382c87b03e5Sespie static rtx
keep_stack_depressed(insns)7383c87b03e5Sespie keep_stack_depressed (insns)
7384c87b03e5Sespie      rtx insns;
7385c87b03e5Sespie {
7386c87b03e5Sespie   int j;
7387c87b03e5Sespie   struct epi_info info;
7388c87b03e5Sespie   rtx insn, next;
7389c87b03e5Sespie 
7390c87b03e5Sespie   /* If the epilogue is just a single instruction, it ust be OK as is.  */
7391c87b03e5Sespie 
7392c87b03e5Sespie   if (NEXT_INSN (insns) == NULL_RTX)
7393c87b03e5Sespie     return insns;
7394c87b03e5Sespie 
7395c87b03e5Sespie   /* Otherwise, start a sequence, initialize the information we have, and
7396c87b03e5Sespie      process all the insns we were given.  */
7397c87b03e5Sespie   start_sequence ();
7398c87b03e5Sespie 
7399c87b03e5Sespie   info.sp_equiv_reg = stack_pointer_rtx;
7400c87b03e5Sespie   info.sp_offset = 0;
7401c87b03e5Sespie   info.equiv_reg_src = 0;
7402c87b03e5Sespie 
7403c87b03e5Sespie   insn = insns;
7404c87b03e5Sespie   next = NULL_RTX;
7405c87b03e5Sespie   while (insn != NULL_RTX)
7406c87b03e5Sespie     {
7407c87b03e5Sespie       next = NEXT_INSN (insn);
7408c87b03e5Sespie 
7409c87b03e5Sespie       if (!INSN_P (insn))
7410c87b03e5Sespie 	{
7411c87b03e5Sespie 	  add_insn (insn);
7412c87b03e5Sespie 	  insn = next;
7413c87b03e5Sespie 	  continue;
7414c87b03e5Sespie 	}
7415c87b03e5Sespie 
7416c87b03e5Sespie       /* If this insn references the register that SP is equivalent to and
7417c87b03e5Sespie 	 we have a pending load to that register, we must force out the load
7418c87b03e5Sespie 	 first and then indicate we no longer know what SP's equivalent is.  */
7419c87b03e5Sespie       if (info.equiv_reg_src != 0
7420c87b03e5Sespie 	  && reg_referenced_p (info.sp_equiv_reg, PATTERN (insn)))
7421c87b03e5Sespie 	{
7422c87b03e5Sespie 	  emit_equiv_load (&info);
7423c87b03e5Sespie 	  info.sp_equiv_reg = 0;
7424c87b03e5Sespie 	}
7425c87b03e5Sespie 
7426c87b03e5Sespie       info.new_sp_equiv_reg = info.sp_equiv_reg;
7427c87b03e5Sespie       info.new_sp_offset = info.sp_offset;
7428c87b03e5Sespie 
7429c87b03e5Sespie       /* If this is a (RETURN) and the return address is on the stack,
7430c87b03e5Sespie 	 update the address and change to an indirect jump.  */
7431c87b03e5Sespie       if (GET_CODE (PATTERN (insn)) == RETURN
7432c87b03e5Sespie 	  || (GET_CODE (PATTERN (insn)) == PARALLEL
7433c87b03e5Sespie 	      && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == RETURN))
7434c87b03e5Sespie 	{
7435c87b03e5Sespie 	  rtx retaddr = INCOMING_RETURN_ADDR_RTX;
7436c87b03e5Sespie 	  rtx base = 0;
7437c87b03e5Sespie 	  HOST_WIDE_INT offset = 0;
7438c87b03e5Sespie 	  rtx jump_insn, jump_set;
7439c87b03e5Sespie 
7440c87b03e5Sespie 	  /* If the return address is in a register, we can emit the insn
7441c87b03e5Sespie 	     unchanged.  Otherwise, it must be a MEM and we see what the
7442c87b03e5Sespie 	     base register and offset are.  In any case, we have to emit any
7443c87b03e5Sespie 	     pending load to the equivalent reg of SP, if any.  */
7444c87b03e5Sespie 	  if (GET_CODE (retaddr) == REG)
7445c87b03e5Sespie 	    {
7446c87b03e5Sespie 	      emit_equiv_load (&info);
7447c87b03e5Sespie 	      add_insn (insn);
7448c87b03e5Sespie 	      insn = next;
7449c87b03e5Sespie 	      continue;
7450c87b03e5Sespie 	    }
7451c87b03e5Sespie 	  else if (GET_CODE (retaddr) == MEM
7452c87b03e5Sespie 		   && GET_CODE (XEXP (retaddr, 0)) == REG)
7453c87b03e5Sespie 	    base = gen_rtx_REG (Pmode, REGNO (XEXP (retaddr, 0))), offset = 0;
7454c87b03e5Sespie 	  else if (GET_CODE (retaddr) == MEM
7455c87b03e5Sespie 		   && GET_CODE (XEXP (retaddr, 0)) == PLUS
7456c87b03e5Sespie 		   && GET_CODE (XEXP (XEXP (retaddr, 0), 0)) == REG
7457c87b03e5Sespie 		   && GET_CODE (XEXP (XEXP (retaddr, 0), 1)) == CONST_INT)
7458c87b03e5Sespie 	    {
7459c87b03e5Sespie 	      base = gen_rtx_REG (Pmode, REGNO (XEXP (XEXP (retaddr, 0), 0)));
7460c87b03e5Sespie 	      offset = INTVAL (XEXP (XEXP (retaddr, 0), 1));
7461c87b03e5Sespie 	    }
7462c87b03e5Sespie 	  else
7463c87b03e5Sespie 	    abort ();
7464c87b03e5Sespie 
7465c87b03e5Sespie 	  /* If the base of the location containing the return pointer
7466c87b03e5Sespie 	     is SP, we must update it with the replacement address.  Otherwise,
7467c87b03e5Sespie 	     just build the necessary MEM.  */
7468c87b03e5Sespie 	  retaddr = plus_constant (base, offset);
7469c87b03e5Sespie 	  if (base == stack_pointer_rtx)
7470c87b03e5Sespie 	    retaddr = simplify_replace_rtx (retaddr, stack_pointer_rtx,
7471c87b03e5Sespie 					    plus_constant (info.sp_equiv_reg,
7472c87b03e5Sespie 							   info.sp_offset));
7473c87b03e5Sespie 
7474c87b03e5Sespie 	  retaddr = gen_rtx_MEM (Pmode, retaddr);
7475c87b03e5Sespie 
7476c87b03e5Sespie 	  /* If there is a pending load to the equivalent register for SP
7477c87b03e5Sespie 	     and we reference that register, we must load our address into
7478c87b03e5Sespie 	     a scratch register and then do that load.  */
7479c87b03e5Sespie 	  if (info.equiv_reg_src
7480c87b03e5Sespie 	      && reg_overlap_mentioned_p (info.equiv_reg_src, retaddr))
7481c87b03e5Sespie 	    {
7482c87b03e5Sespie 	      unsigned int regno;
7483c87b03e5Sespie 	      rtx reg;
7484c87b03e5Sespie 
7485c87b03e5Sespie 	      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7486c87b03e5Sespie 		if (HARD_REGNO_MODE_OK (regno, Pmode)
7487c87b03e5Sespie 		    && !fixed_regs[regno]
7488c87b03e5Sespie 		    && TEST_HARD_REG_BIT (regs_invalidated_by_call, regno)
7489c87b03e5Sespie 		    && !REGNO_REG_SET_P (EXIT_BLOCK_PTR->global_live_at_start,
7490c87b03e5Sespie 					 regno)
7491c87b03e5Sespie 		    && !refers_to_regno_p (regno,
7492c87b03e5Sespie 					   regno + HARD_REGNO_NREGS (regno,
7493c87b03e5Sespie 								     Pmode),
7494c87b03e5Sespie 					   info.equiv_reg_src, NULL))
7495c87b03e5Sespie 		  break;
7496c87b03e5Sespie 
7497c87b03e5Sespie 	      if (regno == FIRST_PSEUDO_REGISTER)
7498c87b03e5Sespie 		abort ();
7499c87b03e5Sespie 
7500c87b03e5Sespie 	      reg = gen_rtx_REG (Pmode, regno);
7501c87b03e5Sespie 	      emit_move_insn (reg, retaddr);
7502c87b03e5Sespie 	      retaddr = reg;
7503c87b03e5Sespie 	    }
7504c87b03e5Sespie 
7505c87b03e5Sespie 	  emit_equiv_load (&info);
7506c87b03e5Sespie 	  jump_insn = emit_jump_insn (gen_indirect_jump (retaddr));
7507c87b03e5Sespie 
7508c87b03e5Sespie 	  /* Show the SET in the above insn is a RETURN.  */
7509c87b03e5Sespie 	  jump_set = single_set (jump_insn);
7510c87b03e5Sespie 	  if (jump_set == 0)
7511c87b03e5Sespie 	    abort ();
7512c87b03e5Sespie 	  else
7513c87b03e5Sespie 	    SET_IS_RETURN_P (jump_set) = 1;
7514c87b03e5Sespie 	}
7515c87b03e5Sespie 
7516c87b03e5Sespie       /* If SP is not mentioned in the pattern and its equivalent register, if
7517c87b03e5Sespie 	 any, is not modified, just emit it.  Otherwise, if neither is set,
7518c87b03e5Sespie 	 replace the reference to SP and emit the insn.  If none of those are
7519c87b03e5Sespie 	 true, handle each SET individually.  */
7520c87b03e5Sespie       else if (!reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))
7521c87b03e5Sespie 	       && (info.sp_equiv_reg == stack_pointer_rtx
7522c87b03e5Sespie 		   || !reg_set_p (info.sp_equiv_reg, insn)))
7523c87b03e5Sespie 	add_insn (insn);
7524c87b03e5Sespie       else if (! reg_set_p (stack_pointer_rtx, insn)
7525c87b03e5Sespie 	       && (info.sp_equiv_reg == stack_pointer_rtx
7526c87b03e5Sespie 		   || !reg_set_p (info.sp_equiv_reg, insn)))
7527c87b03e5Sespie 	{
7528c87b03e5Sespie 	  if (! validate_replace_rtx (stack_pointer_rtx,
7529c87b03e5Sespie 				      plus_constant (info.sp_equiv_reg,
7530c87b03e5Sespie 						     info.sp_offset),
7531c87b03e5Sespie 				      insn))
7532c87b03e5Sespie 	    abort ();
7533c87b03e5Sespie 
7534c87b03e5Sespie 	  add_insn (insn);
7535c87b03e5Sespie 	}
7536c87b03e5Sespie       else if (GET_CODE (PATTERN (insn)) == SET)
7537c87b03e5Sespie 	handle_epilogue_set (PATTERN (insn), &info);
7538c87b03e5Sespie       else if (GET_CODE (PATTERN (insn)) == PARALLEL)
7539c87b03e5Sespie 	{
7540c87b03e5Sespie 	  for (j = 0; j < XVECLEN (PATTERN (insn), 0); j++)
7541c87b03e5Sespie 	    if (GET_CODE (XVECEXP (PATTERN (insn), 0, j)) == SET)
7542c87b03e5Sespie 	      handle_epilogue_set (XVECEXP (PATTERN (insn), 0, j), &info);
7543c87b03e5Sespie 	}
7544c87b03e5Sespie       else
7545c87b03e5Sespie 	add_insn (insn);
7546c87b03e5Sespie 
7547c87b03e5Sespie       info.sp_equiv_reg = info.new_sp_equiv_reg;
7548c87b03e5Sespie       info.sp_offset = info.new_sp_offset;
7549c87b03e5Sespie 
7550c87b03e5Sespie       insn = next;
7551c87b03e5Sespie     }
7552c87b03e5Sespie 
7553c87b03e5Sespie   insns = get_insns ();
7554c87b03e5Sespie   end_sequence ();
7555c87b03e5Sespie   return insns;
7556c87b03e5Sespie }
7557c87b03e5Sespie 
7558c87b03e5Sespie /* SET is a SET from an insn in the epilogue.  P is a pointer to the epi_info
7559c87b03e5Sespie    structure that contains information about what we've seen so far.  We
7560c87b03e5Sespie    process this SET by either updating that data or by emitting one or
7561c87b03e5Sespie    more insns.  */
7562c87b03e5Sespie 
7563c87b03e5Sespie static void
handle_epilogue_set(set,p)7564c87b03e5Sespie handle_epilogue_set (set, p)
7565c87b03e5Sespie      rtx set;
7566c87b03e5Sespie      struct epi_info *p;
7567c87b03e5Sespie {
7568c87b03e5Sespie   /* First handle the case where we are setting SP.  Record what it is being
7569c87b03e5Sespie      set from.  If unknown, abort.  */
7570c87b03e5Sespie   if (reg_set_p (stack_pointer_rtx, set))
7571c87b03e5Sespie     {
7572c87b03e5Sespie       if (SET_DEST (set) != stack_pointer_rtx)
7573c87b03e5Sespie 	abort ();
7574c87b03e5Sespie 
7575c87b03e5Sespie       if (GET_CODE (SET_SRC (set)) == PLUS
7576c87b03e5Sespie 	  && GET_CODE (XEXP (SET_SRC (set), 1)) == CONST_INT)
7577c87b03e5Sespie 	{
7578c87b03e5Sespie 	  p->new_sp_equiv_reg = XEXP (SET_SRC (set), 0);
7579c87b03e5Sespie 	  p->new_sp_offset = INTVAL (XEXP (SET_SRC (set), 1));
7580c87b03e5Sespie 	}
7581c87b03e5Sespie       else
7582c87b03e5Sespie 	p->new_sp_equiv_reg = SET_SRC (set), p->new_sp_offset = 0;
7583c87b03e5Sespie 
7584c87b03e5Sespie       /* If we are adjusting SP, we adjust from the old data.  */
7585c87b03e5Sespie       if (p->new_sp_equiv_reg == stack_pointer_rtx)
7586c87b03e5Sespie 	{
7587c87b03e5Sespie 	  p->new_sp_equiv_reg = p->sp_equiv_reg;
7588c87b03e5Sespie 	  p->new_sp_offset += p->sp_offset;
7589c87b03e5Sespie 	}
7590c87b03e5Sespie 
7591c87b03e5Sespie       if (p->new_sp_equiv_reg == 0 || GET_CODE (p->new_sp_equiv_reg) != REG)
7592c87b03e5Sespie 	abort ();
7593c87b03e5Sespie 
7594c87b03e5Sespie       return;
7595c87b03e5Sespie     }
7596c87b03e5Sespie 
7597c87b03e5Sespie   /* Next handle the case where we are setting SP's equivalent register.
7598c87b03e5Sespie      If we already have a value to set it to, abort.  We could update, but
7599c87b03e5Sespie      there seems little point in handling that case.  Note that we have
7600c87b03e5Sespie      to allow for the case where we are setting the register set in
7601c87b03e5Sespie      the previous part of a PARALLEL inside a single insn.  But use the
7602c87b03e5Sespie      old offset for any updates within this insn.  */
7603c87b03e5Sespie   else if (p->new_sp_equiv_reg != 0 && reg_set_p (p->new_sp_equiv_reg, set))
7604c87b03e5Sespie     {
7605c87b03e5Sespie       if (!rtx_equal_p (p->new_sp_equiv_reg, SET_DEST (set))
7606c87b03e5Sespie 	  || p->equiv_reg_src != 0)
7607c87b03e5Sespie 	abort ();
7608c87b03e5Sespie       else
7609c87b03e5Sespie 	p->equiv_reg_src
7610c87b03e5Sespie 	  = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7611c87b03e5Sespie 				  plus_constant (p->sp_equiv_reg,
7612c87b03e5Sespie 						 p->sp_offset));
7613c87b03e5Sespie     }
7614c87b03e5Sespie 
7615c87b03e5Sespie   /* Otherwise, replace any references to SP in the insn to its new value
7616c87b03e5Sespie      and emit the insn.  */
7617c87b03e5Sespie   else
7618c87b03e5Sespie     {
7619c87b03e5Sespie       SET_SRC (set) = simplify_replace_rtx (SET_SRC (set), stack_pointer_rtx,
7620c87b03e5Sespie 					    plus_constant (p->sp_equiv_reg,
7621c87b03e5Sespie 							   p->sp_offset));
7622c87b03e5Sespie       SET_DEST (set) = simplify_replace_rtx (SET_DEST (set), stack_pointer_rtx,
7623c87b03e5Sespie 					     plus_constant (p->sp_equiv_reg,
7624c87b03e5Sespie 							    p->sp_offset));
7625c87b03e5Sespie       emit_insn (set);
7626c87b03e5Sespie     }
7627c87b03e5Sespie }
7628c87b03e5Sespie 
7629c87b03e5Sespie /* Emit an insn to do the load shown in p->equiv_reg_src, if needed.  */
7630c87b03e5Sespie 
7631c87b03e5Sespie static void
emit_equiv_load(p)7632c87b03e5Sespie emit_equiv_load (p)
7633c87b03e5Sespie      struct epi_info *p;
7634c87b03e5Sespie {
7635c87b03e5Sespie   if (p->equiv_reg_src != 0)
7636c87b03e5Sespie     emit_move_insn (p->sp_equiv_reg, p->equiv_reg_src);
7637c87b03e5Sespie 
7638c87b03e5Sespie   p->equiv_reg_src = 0;
7639c87b03e5Sespie }
7640c87b03e5Sespie #endif
7641c87b03e5Sespie 
7642c87b03e5Sespie /* Generate the prologue and epilogue RTL if the machine supports it.  Thread
7643c87b03e5Sespie    this into place with notes indicating where the prologue ends and where
7644c87b03e5Sespie    the epilogue begins.  Update the basic block information when possible.  */
7645c87b03e5Sespie 
7646c87b03e5Sespie void
thread_prologue_and_epilogue_insns(f)7647c87b03e5Sespie thread_prologue_and_epilogue_insns (f)
7648c87b03e5Sespie      rtx f ATTRIBUTE_UNUSED;
7649c87b03e5Sespie {
7650c87b03e5Sespie   int inserted = 0;
7651c87b03e5Sespie   edge e;
7652c87b03e5Sespie #if defined (HAVE_sibcall_epilogue) || defined (HAVE_epilogue) || defined (HAVE_return) || defined (HAVE_prologue)
7653c87b03e5Sespie   rtx seq;
7654c87b03e5Sespie #endif
7655c87b03e5Sespie #ifdef HAVE_prologue
7656c87b03e5Sespie   rtx prologue_end = NULL_RTX;
7657c87b03e5Sespie #endif
7658c87b03e5Sespie #if defined (HAVE_epilogue) || defined(HAVE_return)
7659c87b03e5Sespie   rtx epilogue_end = NULL_RTX;
7660c87b03e5Sespie #endif
7661c87b03e5Sespie 
7662c87b03e5Sespie #ifdef HAVE_prologue
7663c87b03e5Sespie   if (HAVE_prologue)
7664c87b03e5Sespie     {
7665c87b03e5Sespie       start_sequence ();
7666c87b03e5Sespie       seq = gen_prologue ();
7667c87b03e5Sespie       emit_insn (seq);
7668c87b03e5Sespie 
7669c87b03e5Sespie       /* Retain a map of the prologue insns.  */
7670c87b03e5Sespie       record_insns (seq, &prologue);
7671c87b03e5Sespie       prologue_end = emit_note (NULL, NOTE_INSN_PROLOGUE_END);
7672c87b03e5Sespie 
7673c87b03e5Sespie       seq = get_insns ();
7674c87b03e5Sespie       end_sequence ();
7675c87b03e5Sespie 
7676c87b03e5Sespie       /* Can't deal with multiple successors of the entry block
7677c87b03e5Sespie          at the moment.  Function should always have at least one
7678c87b03e5Sespie          entry point.  */
7679c87b03e5Sespie       if (!ENTRY_BLOCK_PTR->succ || ENTRY_BLOCK_PTR->succ->succ_next)
7680c87b03e5Sespie 	abort ();
7681c87b03e5Sespie 
7682c87b03e5Sespie       insert_insn_on_edge (seq, ENTRY_BLOCK_PTR->succ);
7683c87b03e5Sespie       inserted = 1;
7684c87b03e5Sespie     }
7685c87b03e5Sespie #endif
7686c87b03e5Sespie 
7687c87b03e5Sespie   /* If the exit block has no non-fake predecessors, we don't need
7688c87b03e5Sespie      an epilogue.  */
7689c87b03e5Sespie   for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7690c87b03e5Sespie     if ((e->flags & EDGE_FAKE) == 0)
7691c87b03e5Sespie       break;
7692c87b03e5Sespie   if (e == NULL)
7693c87b03e5Sespie     goto epilogue_done;
7694c87b03e5Sespie 
7695c87b03e5Sespie #ifdef HAVE_return
7696c87b03e5Sespie   if (optimize && HAVE_return)
7697c87b03e5Sespie     {
7698c87b03e5Sespie       /* If we're allowed to generate a simple return instruction,
7699c87b03e5Sespie 	 then by definition we don't need a full epilogue.  Examine
7700c87b03e5Sespie 	 the block that falls through to EXIT.   If it does not
7701c87b03e5Sespie 	 contain any code, examine its predecessors and try to
7702c87b03e5Sespie 	 emit (conditional) return instructions.  */
7703c87b03e5Sespie 
7704c87b03e5Sespie       basic_block last;
7705c87b03e5Sespie       edge e_next;
7706c87b03e5Sespie       rtx label;
7707c87b03e5Sespie 
7708c87b03e5Sespie       for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7709c87b03e5Sespie 	if (e->flags & EDGE_FALLTHRU)
7710c87b03e5Sespie 	  break;
7711c87b03e5Sespie       if (e == NULL)
7712c87b03e5Sespie 	goto epilogue_done;
7713c87b03e5Sespie       last = e->src;
7714c87b03e5Sespie 
7715c87b03e5Sespie       /* Verify that there are no active instructions in the last block.  */
7716c87b03e5Sespie       label = last->end;
7717c87b03e5Sespie       while (label && GET_CODE (label) != CODE_LABEL)
7718c87b03e5Sespie 	{
7719c87b03e5Sespie 	  if (active_insn_p (label))
7720c87b03e5Sespie 	    break;
7721c87b03e5Sespie 	  label = PREV_INSN (label);
7722c87b03e5Sespie 	}
7723c87b03e5Sespie 
7724c87b03e5Sespie       if (last->head == label && GET_CODE (label) == CODE_LABEL)
7725c87b03e5Sespie 	{
7726c87b03e5Sespie 	  rtx epilogue_line_note = NULL_RTX;
7727c87b03e5Sespie 
7728c87b03e5Sespie 	  /* Locate the line number associated with the closing brace,
7729c87b03e5Sespie 	     if we can find one.  */
7730c87b03e5Sespie 	  for (seq = get_last_insn ();
7731c87b03e5Sespie 	       seq && ! active_insn_p (seq);
7732c87b03e5Sespie 	       seq = PREV_INSN (seq))
7733c87b03e5Sespie 	    if (GET_CODE (seq) == NOTE && NOTE_LINE_NUMBER (seq) > 0)
7734c87b03e5Sespie 	      {
7735c87b03e5Sespie 		epilogue_line_note = seq;
7736c87b03e5Sespie 		break;
7737c87b03e5Sespie 	      }
7738c87b03e5Sespie 
7739c87b03e5Sespie 	  for (e = last->pred; e; e = e_next)
7740c87b03e5Sespie 	    {
7741c87b03e5Sespie 	      basic_block bb = e->src;
7742c87b03e5Sespie 	      rtx jump;
7743c87b03e5Sespie 
7744c87b03e5Sespie 	      e_next = e->pred_next;
7745c87b03e5Sespie 	      if (bb == ENTRY_BLOCK_PTR)
7746c87b03e5Sespie 		continue;
7747c87b03e5Sespie 
7748c87b03e5Sespie 	      jump = bb->end;
7749c87b03e5Sespie 	      if ((GET_CODE (jump) != JUMP_INSN) || JUMP_LABEL (jump) != label)
7750c87b03e5Sespie 		continue;
7751c87b03e5Sespie 
7752c87b03e5Sespie 	      /* If we have an unconditional jump, we can replace that
7753c87b03e5Sespie 		 with a simple return instruction.  */
7754c87b03e5Sespie 	      if (simplejump_p (jump))
7755c87b03e5Sespie 		{
7756c87b03e5Sespie 		  emit_return_into_block (bb, epilogue_line_note);
7757c87b03e5Sespie 		  delete_insn (jump);
7758c87b03e5Sespie 		}
7759c87b03e5Sespie 
7760c87b03e5Sespie 	      /* If we have a conditional jump, we can try to replace
7761c87b03e5Sespie 		 that with a conditional return instruction.  */
7762c87b03e5Sespie 	      else if (condjump_p (jump))
7763c87b03e5Sespie 		{
7764c87b03e5Sespie 		  if (! redirect_jump (jump, 0, 0))
7765c87b03e5Sespie 		    continue;
7766c87b03e5Sespie 
7767c87b03e5Sespie 		  /* If this block has only one successor, it both jumps
7768c87b03e5Sespie 		     and falls through to the fallthru block, so we can't
7769c87b03e5Sespie 		     delete the edge.  */
7770c87b03e5Sespie 		  if (bb->succ->succ_next == NULL)
7771c87b03e5Sespie 		    continue;
7772c87b03e5Sespie 		}
7773c87b03e5Sespie 	      else
7774c87b03e5Sespie 		continue;
7775c87b03e5Sespie 
7776c87b03e5Sespie 	      /* Fix up the CFG for the successful change we just made.  */
7777c87b03e5Sespie 	      redirect_edge_succ (e, EXIT_BLOCK_PTR);
7778c87b03e5Sespie 	    }
7779c87b03e5Sespie 
7780c87b03e5Sespie 	  /* Emit a return insn for the exit fallthru block.  Whether
7781c87b03e5Sespie 	     this is still reachable will be determined later.  */
7782c87b03e5Sespie 
7783c87b03e5Sespie 	  emit_barrier_after (last->end);
7784c87b03e5Sespie 	  emit_return_into_block (last, epilogue_line_note);
7785c87b03e5Sespie 	  epilogue_end = last->end;
7786c87b03e5Sespie 	  last->succ->flags &= ~EDGE_FALLTHRU;
7787c87b03e5Sespie 	  goto epilogue_done;
7788c87b03e5Sespie 	}
7789c87b03e5Sespie     }
7790c87b03e5Sespie #endif
7791c87b03e5Sespie #ifdef HAVE_epilogue
7792c87b03e5Sespie   if (HAVE_epilogue)
7793c87b03e5Sespie     {
7794c87b03e5Sespie       /* Find the edge that falls through to EXIT.  Other edges may exist
7795c87b03e5Sespie 	 due to RETURN instructions, but those don't need epilogues.
7796c87b03e5Sespie 	 There really shouldn't be a mixture -- either all should have
7797c87b03e5Sespie 	 been converted or none, however...  */
7798c87b03e5Sespie 
7799c87b03e5Sespie       for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7800c87b03e5Sespie 	if (e->flags & EDGE_FALLTHRU)
7801c87b03e5Sespie 	  break;
7802c87b03e5Sespie       if (e == NULL)
7803c87b03e5Sespie 	goto epilogue_done;
7804c87b03e5Sespie 
7805c87b03e5Sespie       start_sequence ();
7806c87b03e5Sespie       epilogue_end = emit_note (NULL, NOTE_INSN_EPILOGUE_BEG);
7807c87b03e5Sespie 
7808c87b03e5Sespie       seq = gen_epilogue ();
7809c87b03e5Sespie 
7810c87b03e5Sespie #ifdef INCOMING_RETURN_ADDR_RTX
7811c87b03e5Sespie       /* If this function returns with the stack depressed and we can support
7812c87b03e5Sespie 	 it, massage the epilogue to actually do that.  */
7813c87b03e5Sespie       if (TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
7814c87b03e5Sespie 	  && TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))
7815c87b03e5Sespie 	seq = keep_stack_depressed (seq);
7816c87b03e5Sespie #endif
7817c87b03e5Sespie 
7818c87b03e5Sespie       emit_jump_insn (seq);
7819c87b03e5Sespie 
7820c87b03e5Sespie       /* Retain a map of the epilogue insns.  */
7821c87b03e5Sespie       record_insns (seq, &epilogue);
7822c87b03e5Sespie 
7823c87b03e5Sespie       seq = get_insns ();
7824c87b03e5Sespie       end_sequence ();
7825c87b03e5Sespie 
7826c87b03e5Sespie       insert_insn_on_edge (seq, e);
7827c87b03e5Sespie       inserted = 1;
7828c87b03e5Sespie     }
7829c87b03e5Sespie #endif
7830c87b03e5Sespie epilogue_done:
7831c87b03e5Sespie 
7832c87b03e5Sespie   if (inserted)
7833c87b03e5Sespie     commit_edge_insertions ();
7834c87b03e5Sespie 
7835c87b03e5Sespie #ifdef HAVE_sibcall_epilogue
7836c87b03e5Sespie   /* Emit sibling epilogues before any sibling call sites.  */
7837c87b03e5Sespie   for (e = EXIT_BLOCK_PTR->pred; e; e = e->pred_next)
7838c87b03e5Sespie     {
7839c87b03e5Sespie       basic_block bb = e->src;
7840c87b03e5Sespie       rtx insn = bb->end;
7841c87b03e5Sespie       rtx i;
7842c87b03e5Sespie       rtx newinsn;
7843c87b03e5Sespie 
7844c87b03e5Sespie       if (GET_CODE (insn) != CALL_INSN
7845c87b03e5Sespie 	  || ! SIBLING_CALL_P (insn))
7846c87b03e5Sespie 	continue;
7847c87b03e5Sespie 
7848c87b03e5Sespie       start_sequence ();
7849c87b03e5Sespie       emit_insn (gen_sibcall_epilogue ());
7850c87b03e5Sespie       seq = get_insns ();
7851c87b03e5Sespie       end_sequence ();
7852c87b03e5Sespie 
7853c87b03e5Sespie       /* Retain a map of the epilogue insns.  Used in life analysis to
7854c87b03e5Sespie 	 avoid getting rid of sibcall epilogue insns.  Do this before we
7855c87b03e5Sespie 	 actually emit the sequence.  */
7856c87b03e5Sespie       record_insns (seq, &sibcall_epilogue);
7857c87b03e5Sespie 
7858c87b03e5Sespie       i = PREV_INSN (insn);
7859c87b03e5Sespie       newinsn = emit_insn_before (seq, insn);
7860c87b03e5Sespie     }
7861c87b03e5Sespie #endif
7862c87b03e5Sespie 
7863c87b03e5Sespie #ifdef HAVE_prologue
7864c87b03e5Sespie   if (prologue_end)
7865c87b03e5Sespie     {
7866c87b03e5Sespie       rtx insn, prev;
7867c87b03e5Sespie 
7868c87b03e5Sespie       /* GDB handles `break f' by setting a breakpoint on the first
7869c87b03e5Sespie 	 line note after the prologue.  Which means (1) that if
7870c87b03e5Sespie 	 there are line number notes before where we inserted the
7871c87b03e5Sespie 	 prologue we should move them, and (2) we should generate a
7872c87b03e5Sespie 	 note before the end of the first basic block, if there isn't
7873c87b03e5Sespie 	 one already there.
7874c87b03e5Sespie 
7875c87b03e5Sespie 	 ??? This behavior is completely broken when dealing with
7876c87b03e5Sespie 	 multiple entry functions.  We simply place the note always
7877c87b03e5Sespie 	 into first basic block and let alternate entry points
7878c87b03e5Sespie 	 to be missed.
7879c87b03e5Sespie        */
7880c87b03e5Sespie 
7881c87b03e5Sespie       for (insn = prologue_end; insn; insn = prev)
7882c87b03e5Sespie 	{
7883c87b03e5Sespie 	  prev = PREV_INSN (insn);
7884c87b03e5Sespie 	  if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7885c87b03e5Sespie 	    {
7886c87b03e5Sespie 	      /* Note that we cannot reorder the first insn in the
7887c87b03e5Sespie 		 chain, since rest_of_compilation relies on that
7888c87b03e5Sespie 		 remaining constant.  */
7889c87b03e5Sespie 	      if (prev == NULL)
7890c87b03e5Sespie 		break;
7891c87b03e5Sespie 	      reorder_insns (insn, insn, prologue_end);
7892c87b03e5Sespie 	    }
7893c87b03e5Sespie 	}
7894c87b03e5Sespie 
7895c87b03e5Sespie       /* Find the last line number note in the first block.  */
7896c87b03e5Sespie       for (insn = ENTRY_BLOCK_PTR->next_bb->end;
7897c87b03e5Sespie 	   insn != prologue_end && insn;
7898c87b03e5Sespie 	   insn = PREV_INSN (insn))
7899c87b03e5Sespie 	if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7900c87b03e5Sespie 	  break;
7901c87b03e5Sespie 
7902c87b03e5Sespie       /* If we didn't find one, make a copy of the first line number
7903c87b03e5Sespie 	 we run across.  */
7904c87b03e5Sespie       if (! insn)
7905c87b03e5Sespie 	{
7906c87b03e5Sespie 	  for (insn = next_active_insn (prologue_end);
7907c87b03e5Sespie 	       insn;
7908c87b03e5Sespie 	       insn = PREV_INSN (insn))
7909c87b03e5Sespie 	    if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7910c87b03e5Sespie 	      {
7911c87b03e5Sespie 		emit_line_note_after (NOTE_SOURCE_FILE (insn),
7912c87b03e5Sespie 				      NOTE_LINE_NUMBER (insn),
7913c87b03e5Sespie 				      prologue_end);
7914c87b03e5Sespie 		break;
7915c87b03e5Sespie 	      }
7916c87b03e5Sespie 	}
7917c87b03e5Sespie     }
7918c87b03e5Sespie #endif
7919c87b03e5Sespie #ifdef HAVE_epilogue
7920c87b03e5Sespie   if (epilogue_end)
7921c87b03e5Sespie     {
7922c87b03e5Sespie       rtx insn, next;
7923c87b03e5Sespie 
7924c87b03e5Sespie       /* Similarly, move any line notes that appear after the epilogue.
7925c87b03e5Sespie          There is no need, however, to be quite so anal about the existence
7926c87b03e5Sespie 	 of such a note.  */
7927c87b03e5Sespie       for (insn = epilogue_end; insn; insn = next)
7928c87b03e5Sespie 	{
7929c87b03e5Sespie 	  next = NEXT_INSN (insn);
7930c87b03e5Sespie 	  if (GET_CODE (insn) == NOTE && NOTE_LINE_NUMBER (insn) > 0)
7931c87b03e5Sespie 	    reorder_insns (insn, insn, PREV_INSN (epilogue_end));
7932c87b03e5Sespie 	}
7933c87b03e5Sespie     }
7934c87b03e5Sespie #endif
7935c87b03e5Sespie }
7936c87b03e5Sespie 
7937c87b03e5Sespie /* Reposition the prologue-end and epilogue-begin notes after instruction
7938c87b03e5Sespie    scheduling and delayed branch scheduling.  */
7939c87b03e5Sespie 
7940c87b03e5Sespie void
reposition_prologue_and_epilogue_notes(f)7941c87b03e5Sespie reposition_prologue_and_epilogue_notes (f)
7942c87b03e5Sespie      rtx f ATTRIBUTE_UNUSED;
7943c87b03e5Sespie {
7944c87b03e5Sespie #if defined (HAVE_prologue) || defined (HAVE_epilogue)
7945c87b03e5Sespie   rtx insn, last, note;
7946c87b03e5Sespie   int len;
7947c87b03e5Sespie 
7948c87b03e5Sespie   if ((len = VARRAY_SIZE (prologue)) > 0)
7949c87b03e5Sespie     {
7950c87b03e5Sespie       last = 0, note = 0;
7951c87b03e5Sespie 
7952c87b03e5Sespie       /* Scan from the beginning until we reach the last prologue insn.
7953c87b03e5Sespie 	 We apparently can't depend on basic_block_{head,end} after
7954c87b03e5Sespie 	 reorg has run.  */
7955c87b03e5Sespie       for (insn = f; insn; insn = NEXT_INSN (insn))
7956c87b03e5Sespie 	{
7957c87b03e5Sespie 	  if (GET_CODE (insn) == NOTE)
7958c87b03e5Sespie 	    {
7959c87b03e5Sespie 	      if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_PROLOGUE_END)
7960c87b03e5Sespie 		note = insn;
7961c87b03e5Sespie 	    }
7962c87b03e5Sespie 	  else if (contains (insn, prologue))
7963c87b03e5Sespie 	    {
7964c87b03e5Sespie 	      last = insn;
7965c87b03e5Sespie 	      if (--len == 0)
7966c87b03e5Sespie 		break;
7967c87b03e5Sespie 	    }
7968c87b03e5Sespie 	}
7969c87b03e5Sespie 
7970c87b03e5Sespie       if (last)
7971c87b03e5Sespie 	{
7972c87b03e5Sespie 	  rtx next;
7973c87b03e5Sespie 
7974c87b03e5Sespie 	  /* Find the prologue-end note if we haven't already, and
7975c87b03e5Sespie 	     move it to just after the last prologue insn.  */
7976c87b03e5Sespie 	  if (note == 0)
7977c87b03e5Sespie 	    {
7978c87b03e5Sespie 	      for (note = last; (note = NEXT_INSN (note));)
7979c87b03e5Sespie 		if (GET_CODE (note) == NOTE
7980c87b03e5Sespie 		    && NOTE_LINE_NUMBER (note) == NOTE_INSN_PROLOGUE_END)
7981c87b03e5Sespie 		  break;
7982c87b03e5Sespie 	    }
7983c87b03e5Sespie 
7984c87b03e5Sespie 	  next = NEXT_INSN (note);
7985c87b03e5Sespie 
7986c87b03e5Sespie 	  /* Avoid placing note between CODE_LABEL and BASIC_BLOCK note.  */
7987c87b03e5Sespie 	  if (GET_CODE (last) == CODE_LABEL)
7988c87b03e5Sespie 	    last = NEXT_INSN (last);
7989c87b03e5Sespie 	  reorder_insns (note, note, last);
7990c87b03e5Sespie 	}
7991c87b03e5Sespie     }
7992c87b03e5Sespie 
7993c87b03e5Sespie   if ((len = VARRAY_SIZE (epilogue)) > 0)
7994c87b03e5Sespie     {
7995c87b03e5Sespie       last = 0, note = 0;
7996c87b03e5Sespie 
7997c87b03e5Sespie       /* Scan from the end until we reach the first epilogue insn.
7998c87b03e5Sespie 	 We apparently can't depend on basic_block_{head,end} after
7999c87b03e5Sespie 	 reorg has run.  */
8000c87b03e5Sespie       for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
8001c87b03e5Sespie 	{
8002c87b03e5Sespie 	  if (GET_CODE (insn) == NOTE)
8003c87b03e5Sespie 	    {
8004c87b03e5Sespie 	      if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_EPILOGUE_BEG)
8005c87b03e5Sespie 		note = insn;
8006c87b03e5Sespie 	    }
8007c87b03e5Sespie 	  else if (contains (insn, epilogue))
8008c87b03e5Sespie 	    {
8009c87b03e5Sespie 	      last = insn;
8010c87b03e5Sespie 	      if (--len == 0)
8011c87b03e5Sespie 		break;
8012c87b03e5Sespie 	    }
8013c87b03e5Sespie 	}
8014c87b03e5Sespie 
8015c87b03e5Sespie       if (last)
8016c87b03e5Sespie 	{
8017c87b03e5Sespie 	  /* Find the epilogue-begin note if we haven't already, and
8018c87b03e5Sespie 	     move it to just before the first epilogue insn.  */
8019c87b03e5Sespie 	  if (note == 0)
8020c87b03e5Sespie 	    {
8021c87b03e5Sespie 	      for (note = insn; (note = PREV_INSN (note));)
8022c87b03e5Sespie 		if (GET_CODE (note) == NOTE
8023c87b03e5Sespie 		    && NOTE_LINE_NUMBER (note) == NOTE_INSN_EPILOGUE_BEG)
8024c87b03e5Sespie 		  break;
8025c87b03e5Sespie 	    }
8026c87b03e5Sespie 
8027c87b03e5Sespie 	  if (PREV_INSN (last) != note)
8028c87b03e5Sespie 	    reorder_insns (note, note, PREV_INSN (last));
8029c87b03e5Sespie 	}
8030c87b03e5Sespie     }
8031c87b03e5Sespie #endif /* HAVE_prologue or HAVE_epilogue */
8032c87b03e5Sespie }
8033c87b03e5Sespie 
8034c87b03e5Sespie /* Called once, at initialization, to initialize function.c.  */
8035c87b03e5Sespie 
8036c87b03e5Sespie void
init_function_once()8037c87b03e5Sespie init_function_once ()
8038c87b03e5Sespie {
8039c87b03e5Sespie   VARRAY_INT_INIT (prologue, 0, "prologue");
8040c87b03e5Sespie   VARRAY_INT_INIT (epilogue, 0, "epilogue");
8041c87b03e5Sespie   VARRAY_INT_INIT (sibcall_epilogue, 0, "sibcall_epilogue");
8042c87b03e5Sespie }
8043c87b03e5Sespie 
8044c87b03e5Sespie #include "gt-function.h"
8045